body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
c5f74a00aff7c990dab246ebc3bfc2d7bfb585611bce64bc0b1b0a42a7b7eb96
|
def delete_dns_record(self, fqdn, record_type):
'Removes an existing DNS record'
existing_record = self.get_dns_record(fqdn, record_type)
if existing_record:
self._request('delete_dns_settings', {'record_id': existing_record['id']})
|
Removes an existing DNS record
|
kasserver/__init__.py
|
delete_dns_record
|
Lightweb-Media/kasserver
| 0 |
python
|
def delete_dns_record(self, fqdn, record_type):
existing_record = self.get_dns_record(fqdn, record_type)
if existing_record:
self._request('delete_dns_settings', {'record_id': existing_record['id']})
|
def delete_dns_record(self, fqdn, record_type):
existing_record = self.get_dns_record(fqdn, record_type)
if existing_record:
self._request('delete_dns_settings', {'record_id': existing_record['id']})<|docstring|>Removes an existing DNS record<|endoftext|>
|
f0857a70c9e778f75552447fdf932a2b8acc7e8721a40929e4e32985fc3e1f72
|
def list_account(self):
'Removes an existing DNS record'
res = self._request('get_accounts', {})
print(res)
|
Removes an existing DNS record
|
kasserver/__init__.py
|
list_account
|
Lightweb-Media/kasserver
| 0 |
python
|
def list_account(self):
res = self._request('get_accounts', {})
print(res)
|
def list_account(self):
res = self._request('get_accounts', {})
print(res)<|docstring|>Removes an existing DNS record<|endoftext|>
|
43aa6aad880952c72e34033db12180fbb132dbade0c3d94bd748db8f07eeff73
|
def add_account(self):
'Removes an existing DNS record'
res = self._request('get_accounts', {})
print(res)
|
Removes an existing DNS record
|
kasserver/__init__.py
|
add_account
|
Lightweb-Media/kasserver
| 0 |
python
|
def add_account(self):
res = self._request('get_accounts', {})
print(res)
|
def add_account(self):
res = self._request('get_accounts', {})
print(res)<|docstring|>Removes an existing DNS record<|endoftext|>
|
13eecc88b0f3bfccf22a7d2c45237460aa504d9d7cd22ca5e0a2dbca91359a2e
|
def add_subaccount(self, account_kas_password, account_ftp_password, hostname_art, hostname_part1, hostname_part2):
'add Subaccount'
account_comment = 'test'
hostname_art = 'subdomain'
hostname_part2 = 'dev-wp.de'
max_subdomain = 1
res = self._request('add_account', {'max_subdomain': max_subdomain, 'account_kas_password': account_kas_password, 'account_ftp_password': account_ftp_password, 'hostname_art': hostname_art, 'hostname_part1': hostname_part1, 'hostname_part2': hostname_part2, 'account_comment': account_comment})
print(res)
|
add Subaccount
|
kasserver/__init__.py
|
add_subaccount
|
Lightweb-Media/kasserver
| 0 |
python
|
def add_subaccount(self, account_kas_password, account_ftp_password, hostname_art, hostname_part1, hostname_part2):
account_comment = 'test'
hostname_art = 'subdomain'
hostname_part2 = 'dev-wp.de'
max_subdomain = 1
res = self._request('add_account', {'max_subdomain': max_subdomain, 'account_kas_password': account_kas_password, 'account_ftp_password': account_ftp_password, 'hostname_art': hostname_art, 'hostname_part1': hostname_part1, 'hostname_part2': hostname_part2, 'account_comment': account_comment})
print(res)
|
def add_subaccount(self, account_kas_password, account_ftp_password, hostname_art, hostname_part1, hostname_part2):
account_comment = 'test'
hostname_art = 'subdomain'
hostname_part2 = 'dev-wp.de'
max_subdomain = 1
res = self._request('add_account', {'max_subdomain': max_subdomain, 'account_kas_password': account_kas_password, 'account_ftp_password': account_ftp_password, 'hostname_art': hostname_art, 'hostname_part1': hostname_part1, 'hostname_part2': hostname_part2, 'account_comment': account_comment})
print(res)<|docstring|>add Subaccount<|endoftext|>
|
a486fa77a7a3b70fa338541163ae3046ed49c8a88532cd1205e340fbfbd67770
|
def make_mosaic_pb(vis_dataset, gcf_dataset, img_dataset, sel_parms, grid_parms, storage_parms):
"\n The make_pb function currently supports rotationally symmetric airy disk primary beams. Primary beams can be generated for any number of dishes.\n The make_pb_parms['list_dish_diameters'] and make_pb_parms['list_blockage_diameters'] must be specified for each dish.\n \n Parameters\n ----------\n vis_dataset : xarray.core.dataset.Dataset\n Input visibility dataset.\n gcf_dataset : xarray.core.dataset.Dataset\n Input gridding convolution function dataset.\n img_dataset : xarray.core.dataset.Dataset\n Input image dataset. ()\n make_pb_parms : dictionary\n make_pb_parms['function'] : {'airy'}, default='airy'\n Only the airy disk function is currently supported.\n grid_parms['imsize'] : list of int, length = 2\n The image size (no padding).\n grid_parms['cell'] : list of number, length = 2, units = arcseconds\n The image cell size.\n make_pb_parms['list_dish_diameters'] : list of number\n The list of dish diameters.\n make_pb_parms['list_blockage_diameters'] = list of number\n The list of blockage diameters for each dish.\n make_pb_parms['pb_name'] = 'PB'\n The created PB name.\n storage_parms : dictionary\n storage_parms['to_disk'] : bool, default = False\n If true the dask graph is executed and saved to disk in the zarr format.\n storage_parms['append'] : bool, default = False\n If storage_parms['to_disk'] is True only the dask graph associated with the function is executed and the resulting data variables are saved to an existing zarr file on disk.\n Note that graphs on unrelated data to this function will not be executed or saved.\n storage_parms['outfile'] : str\n The zarr file to create or append to.\n storage_parms['chunks_on_disk'] : dict of int, default = {}\n The chunk size to use when writing to disk. This is ignored if storage_parms['append'] is True. The default will use the chunking of the input dataset.\n storage_parms['chunks_return'] : dict of int, default = {}\n The chunk size of the dataset that is returned. The default will use the chunking of the input dataset.\n storage_parms['graph_name'] : str\n The time to compute and save the data is stored in the attribute section of the dataset and storage_parms['graph_name'] is used in the label.\n storage_parms['compressor'] : numcodecs.blosc.Blosc,default=Blosc(cname='zstd', clevel=2, shuffle=0)\n The compression algorithm to use. Available compression algorithms can be found at https://numcodecs.readthedocs.io/en/stable/blosc.html.\n \n Returns\n -------\n img_xds : xarray.core.dataset.Dataset\n "
print('######################### Start make_mosaic_pb #########################')
from ngcasa._ngcasa_utils._store import _store
from ngcasa._ngcasa_utils._check_parms import _check_storage_parms, _check_sel_parms, _check_existence_sel_parms
from ._imaging_utils._check_imaging_parms import _check_grid_parms, _check_mosaic_pb_parms
from ._imaging_utils._aperture_grid import _graph_aperture_grid
import dask.array.fft as dafft
import matplotlib.pylab as plt
import numpy as np
import dask.array as da
import copy
import xarray as xr
from ._imaging_utils._remove_padding import _remove_padding
from ._imaging_utils._normalize import _normalize
_sel_parms = copy.deepcopy(sel_parms)
_grid_parms = copy.deepcopy(grid_parms)
_storage_parms = copy.deepcopy(storage_parms)
assert _check_sel_parms(_sel_parms, {'pb': 'PB', 'weight_pb': 'WEIGHT_PB', 'weight_pb_sum_weight': 'WEIGHT_PB_SUM_WEIGHT'}), '######### ERROR: sel_parms checking failed'
assert _check_grid_parms(_grid_parms), '######### ERROR: grid_parms checking failed'
assert _check_storage_parms(_storage_parms, 'mosaic_pb.img.zarr', 'make_mosaic_pb'), '######### ERROR: storage_parms checking failed'
_grid_parms['grid_weights'] = True
_grid_parms['do_psf'] = False
_grid_parms['oversampling'] = np.array(gcf_dataset.attrs['oversampling'])
grids_and_sum_weights = _graph_aperture_grid(vis_dataset, gcf_dataset, _grid_parms, _sel_parms)
weight_image = (_remove_padding(dafft.fftshift(dafft.ifft2(dafft.ifftshift(grids_and_sum_weights[0], axes=(0, 1)), axes=(0, 1)), axes=(0, 1)), _grid_parms['image_size']).real * (_grid_parms['image_size_padded'][0] * _grid_parms['image_size_padded'][1]))
def correct_image(weight_image, sum_weights):
sum_weights_copy = copy.deepcopy(sum_weights)
sum_weights_copy[(sum_weights_copy == 0)] = 1
weight_image = (weight_image / sum_weights_copy[(None, None, :, :)])
return weight_image
weight_image = da.map_blocks(correct_image, weight_image, grids_and_sum_weights[1], dtype=np.double)
mosaic_primary_beam = da.sqrt(np.abs(weight_image))
if (_grid_parms['chan_mode'] == 'continuum'):
freq_coords = [da.mean(vis_dataset.coords['chan'].values)]
chan_width = da.from_array([da.mean(vis_dataset['chan_width'].data)], chunks=(1,))
imag_chan_chunk_size = 1
elif (_grid_parms['chan_mode'] == 'cube'):
freq_coords = vis_dataset.coords['chan'].values
chan_width = vis_dataset['chan_width'].data
imag_chan_chunk_size = vis_dataset.DATA.chunks[2][0]
chunks = vis_dataset.DATA.chunks
n_imag_pol = chunks[3][0]
image_dict = {}
coords = {'d0': np.arange(_grid_parms['image_size'][0]), 'd1': np.arange(_grid_parms['image_size'][1]), 'chan': freq_coords, 'pol': np.arange(n_imag_pol), 'chan_width': ('chan', chan_width)}
img_dataset = img_dataset.assign_coords(coords)
img_dataset[_sel_parms['pb']] = xr.DataArray(mosaic_primary_beam, dims=['d0', 'd1', 'chan', 'pol'])
img_dataset[_sel_parms['weight']] = xr.DataArray(weight_image, dims=['d0', 'd1', 'chan', 'pol'])
img_dataset[_sel_parms['weight_pb_sum_weight']] = xr.DataArray(grids_and_sum_weights[1], dims=['chan', 'pol'])
list_xarray_data_variables = [img_dataset[_sel_parms['pb']], img_dataset[_sel_parms['weight']]]
return _store(img_dataset, list_xarray_data_variables, _storage_parms)
'\n \n ## Add PB to img_dataset\n\n #coords = {\'d0\': np.arange(pb_parms[\'imsize\'][0]), \'d1\': np.arange(_pb_parms[\'imsize\'][1]),\n # \'chan\': freq_coords.compute(), \'pol\': pol,\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))}\n \n img_dataset[_pb_mosaic_parms[\'mosaic_weight_name\']] = xr.DataArray(weight_image, dims=[\'d0\', \'d1\', \'chan\', \'pol\',\'dish_type\'])\n img_dataset[_pb_mosaic_parms[\'mosaic_pb_name\']] = xr.DataArray(, dims=[\'d0\', \'d1\', \'chan\', \'pol\',\'dish_type\'])\n img_dataset = img_dataset.assign_coords({\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))})\n\n list_xarray_data_variables = [img_dataset[_pb_parms[\'pb_name\']]]\n return _store(img_dataset,list_xarray_data_variables,_storage_parms)\n\n\n\n from ngcasa._ngcasa_utils._store import _store\n from ngcasa._ngcasa_utils._check_parms import _check_storage_parms\n from ._imaging_utils._check_imaging_parms import _check_pb_parms\n import numpy as np\n import dask.array as da\n import copy, os\n import xarray as xr\n\n import matplotlib.pylab as plt\n\n _pb_parms = copy.deepcopy(pb_parms)\n _storage_parms = copy.deepcopy(storage_parms)\n\n assert(_check_pb_parms(img_dataset,_pb_parms)), "######### ERROR: user_imaging_weights_parms checking failed"\n assert(_check_storage_parms(_storage_parms,\'dataset.img.zarr\',\'make_pb\')), "######### ERROR: user_storage_parms checking failed"\n\n #parameter check\n #cube continuum check\n\n\n if _pb_parms[\'function\'] == \'airy\':\n from ._imaging_utils._make_pb_1d import _airy_disk\n pb_func = _airy_disk\n else:\n print(\'Only the airy function has been implemented\')\n\n _pb_parms[\'ipower\'] = 2\n _pb_parms[\'center_indx\'] = []\n\n\n chan_chunk_size = img_dataset.chan_width.chunks[0][0]\n freq_coords = da.from_array(img_dataset.coords[\'chan\'].values, chunks=(chan_chunk_size))\n\n pol = img_dataset.pol.values #don\'t want chunking here\n\n chunksize = (_pb_parms[\'imsize\'][0],_pb_parms[\'imsize\'][1]) + freq_coords.chunksize + (len(pol),) + (len(_pb_parms[\'list_dish_diameters\']),)\n\n pb = da.map_blocks(pb_func, freq_coords, pol, _pb_parms, chunks=chunksize ,new_axis=[0,1,3,4], dtype=np.double)\n\n ## Add PB to img_dataset\n\n coords = {\'d0\': np.arange(pb_parms[\'imsize\'][0]), \'d1\': np.arange(_pb_parms[\'imsize\'][1]),\n \'chan\': freq_coords.compute(), \'pol\': pol,\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))}\n\n\n img_dataset[_pb_parms[\'pb_name\']] = xr.DataArray(pb, dims=[\'d0\', \'d1\', \'chan\', \'pol\',\'dish_type\'])\n img_dataset = img_dataset.assign_coords({\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))})\n\n list_xarray_data_variables = [img_dataset[_pb_parms[\'pb_name\']]]\n return _store(img_dataset,list_xarray_data_variables,_storage_parms)\n '
|
The make_pb function currently supports rotationally symmetric airy disk primary beams. Primary beams can be generated for any number of dishes.
The make_pb_parms['list_dish_diameters'] and make_pb_parms['list_blockage_diameters'] must be specified for each dish.
Parameters
----------
vis_dataset : xarray.core.dataset.Dataset
Input visibility dataset.
gcf_dataset : xarray.core.dataset.Dataset
Input gridding convolution function dataset.
img_dataset : xarray.core.dataset.Dataset
Input image dataset. ()
make_pb_parms : dictionary
make_pb_parms['function'] : {'airy'}, default='airy'
Only the airy disk function is currently supported.
grid_parms['imsize'] : list of int, length = 2
The image size (no padding).
grid_parms['cell'] : list of number, length = 2, units = arcseconds
The image cell size.
make_pb_parms['list_dish_diameters'] : list of number
The list of dish diameters.
make_pb_parms['list_blockage_diameters'] = list of number
The list of blockage diameters for each dish.
make_pb_parms['pb_name'] = 'PB'
The created PB name.
storage_parms : dictionary
storage_parms['to_disk'] : bool, default = False
If true the dask graph is executed and saved to disk in the zarr format.
storage_parms['append'] : bool, default = False
If storage_parms['to_disk'] is True only the dask graph associated with the function is executed and the resulting data variables are saved to an existing zarr file on disk.
Note that graphs on unrelated data to this function will not be executed or saved.
storage_parms['outfile'] : str
The zarr file to create or append to.
storage_parms['chunks_on_disk'] : dict of int, default = {}
The chunk size to use when writing to disk. This is ignored if storage_parms['append'] is True. The default will use the chunking of the input dataset.
storage_parms['chunks_return'] : dict of int, default = {}
The chunk size of the dataset that is returned. The default will use the chunking of the input dataset.
storage_parms['graph_name'] : str
The time to compute and save the data is stored in the attribute section of the dataset and storage_parms['graph_name'] is used in the label.
storage_parms['compressor'] : numcodecs.blosc.Blosc,default=Blosc(cname='zstd', clevel=2, shuffle=0)
The compression algorithm to use. Available compression algorithms can be found at https://numcodecs.readthedocs.io/en/stable/blosc.html.
Returns
-------
img_xds : xarray.core.dataset.Dataset
|
ngcasa/imaging/make_mosaic_pb.py
|
make_mosaic_pb
|
FedeMPouzols/cngi_prototype
| 0 |
python
|
def make_mosaic_pb(vis_dataset, gcf_dataset, img_dataset, sel_parms, grid_parms, storage_parms):
"\n The make_pb function currently supports rotationally symmetric airy disk primary beams. Primary beams can be generated for any number of dishes.\n The make_pb_parms['list_dish_diameters'] and make_pb_parms['list_blockage_diameters'] must be specified for each dish.\n \n Parameters\n ----------\n vis_dataset : xarray.core.dataset.Dataset\n Input visibility dataset.\n gcf_dataset : xarray.core.dataset.Dataset\n Input gridding convolution function dataset.\n img_dataset : xarray.core.dataset.Dataset\n Input image dataset. ()\n make_pb_parms : dictionary\n make_pb_parms['function'] : {'airy'}, default='airy'\n Only the airy disk function is currently supported.\n grid_parms['imsize'] : list of int, length = 2\n The image size (no padding).\n grid_parms['cell'] : list of number, length = 2, units = arcseconds\n The image cell size.\n make_pb_parms['list_dish_diameters'] : list of number\n The list of dish diameters.\n make_pb_parms['list_blockage_diameters'] = list of number\n The list of blockage diameters for each dish.\n make_pb_parms['pb_name'] = 'PB'\n The created PB name.\n storage_parms : dictionary\n storage_parms['to_disk'] : bool, default = False\n If true the dask graph is executed and saved to disk in the zarr format.\n storage_parms['append'] : bool, default = False\n If storage_parms['to_disk'] is True only the dask graph associated with the function is executed and the resulting data variables are saved to an existing zarr file on disk.\n Note that graphs on unrelated data to this function will not be executed or saved.\n storage_parms['outfile'] : str\n The zarr file to create or append to.\n storage_parms['chunks_on_disk'] : dict of int, default = {}\n The chunk size to use when writing to disk. This is ignored if storage_parms['append'] is True. The default will use the chunking of the input dataset.\n storage_parms['chunks_return'] : dict of int, default = {}\n The chunk size of the dataset that is returned. The default will use the chunking of the input dataset.\n storage_parms['graph_name'] : str\n The time to compute and save the data is stored in the attribute section of the dataset and storage_parms['graph_name'] is used in the label.\n storage_parms['compressor'] : numcodecs.blosc.Blosc,default=Blosc(cname='zstd', clevel=2, shuffle=0)\n The compression algorithm to use. Available compression algorithms can be found at https://numcodecs.readthedocs.io/en/stable/blosc.html.\n \n Returns\n -------\n img_xds : xarray.core.dataset.Dataset\n "
print('######################### Start make_mosaic_pb #########################')
from ngcasa._ngcasa_utils._store import _store
from ngcasa._ngcasa_utils._check_parms import _check_storage_parms, _check_sel_parms, _check_existence_sel_parms
from ._imaging_utils._check_imaging_parms import _check_grid_parms, _check_mosaic_pb_parms
from ._imaging_utils._aperture_grid import _graph_aperture_grid
import dask.array.fft as dafft
import matplotlib.pylab as plt
import numpy as np
import dask.array as da
import copy
import xarray as xr
from ._imaging_utils._remove_padding import _remove_padding
from ._imaging_utils._normalize import _normalize
_sel_parms = copy.deepcopy(sel_parms)
_grid_parms = copy.deepcopy(grid_parms)
_storage_parms = copy.deepcopy(storage_parms)
assert _check_sel_parms(_sel_parms, {'pb': 'PB', 'weight_pb': 'WEIGHT_PB', 'weight_pb_sum_weight': 'WEIGHT_PB_SUM_WEIGHT'}), '######### ERROR: sel_parms checking failed'
assert _check_grid_parms(_grid_parms), '######### ERROR: grid_parms checking failed'
assert _check_storage_parms(_storage_parms, 'mosaic_pb.img.zarr', 'make_mosaic_pb'), '######### ERROR: storage_parms checking failed'
_grid_parms['grid_weights'] = True
_grid_parms['do_psf'] = False
_grid_parms['oversampling'] = np.array(gcf_dataset.attrs['oversampling'])
grids_and_sum_weights = _graph_aperture_grid(vis_dataset, gcf_dataset, _grid_parms, _sel_parms)
weight_image = (_remove_padding(dafft.fftshift(dafft.ifft2(dafft.ifftshift(grids_and_sum_weights[0], axes=(0, 1)), axes=(0, 1)), axes=(0, 1)), _grid_parms['image_size']).real * (_grid_parms['image_size_padded'][0] * _grid_parms['image_size_padded'][1]))
def correct_image(weight_image, sum_weights):
sum_weights_copy = copy.deepcopy(sum_weights)
sum_weights_copy[(sum_weights_copy == 0)] = 1
weight_image = (weight_image / sum_weights_copy[(None, None, :, :)])
return weight_image
weight_image = da.map_blocks(correct_image, weight_image, grids_and_sum_weights[1], dtype=np.double)
mosaic_primary_beam = da.sqrt(np.abs(weight_image))
if (_grid_parms['chan_mode'] == 'continuum'):
freq_coords = [da.mean(vis_dataset.coords['chan'].values)]
chan_width = da.from_array([da.mean(vis_dataset['chan_width'].data)], chunks=(1,))
imag_chan_chunk_size = 1
elif (_grid_parms['chan_mode'] == 'cube'):
freq_coords = vis_dataset.coords['chan'].values
chan_width = vis_dataset['chan_width'].data
imag_chan_chunk_size = vis_dataset.DATA.chunks[2][0]
chunks = vis_dataset.DATA.chunks
n_imag_pol = chunks[3][0]
image_dict = {}
coords = {'d0': np.arange(_grid_parms['image_size'][0]), 'd1': np.arange(_grid_parms['image_size'][1]), 'chan': freq_coords, 'pol': np.arange(n_imag_pol), 'chan_width': ('chan', chan_width)}
img_dataset = img_dataset.assign_coords(coords)
img_dataset[_sel_parms['pb']] = xr.DataArray(mosaic_primary_beam, dims=['d0', 'd1', 'chan', 'pol'])
img_dataset[_sel_parms['weight']] = xr.DataArray(weight_image, dims=['d0', 'd1', 'chan', 'pol'])
img_dataset[_sel_parms['weight_pb_sum_weight']] = xr.DataArray(grids_and_sum_weights[1], dims=['chan', 'pol'])
list_xarray_data_variables = [img_dataset[_sel_parms['pb']], img_dataset[_sel_parms['weight']]]
return _store(img_dataset, list_xarray_data_variables, _storage_parms)
'\n \n ## Add PB to img_dataset\n\n #coords = {\'d0\': np.arange(pb_parms[\'imsize\'][0]), \'d1\': np.arange(_pb_parms[\'imsize\'][1]),\n # \'chan\': freq_coords.compute(), \'pol\': pol,\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))}\n \n img_dataset[_pb_mosaic_parms[\'mosaic_weight_name\']] = xr.DataArray(weight_image, dims=[\'d0\', \'d1\', \'chan\', \'pol\',\'dish_type\'])\n img_dataset[_pb_mosaic_parms[\'mosaic_pb_name\']] = xr.DataArray(, dims=[\'d0\', \'d1\', \'chan\', \'pol\',\'dish_type\'])\n img_dataset = img_dataset.assign_coords({\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))})\n\n list_xarray_data_variables = [img_dataset[_pb_parms[\'pb_name\']]]\n return _store(img_dataset,list_xarray_data_variables,_storage_parms)\n\n\n\n from ngcasa._ngcasa_utils._store import _store\n from ngcasa._ngcasa_utils._check_parms import _check_storage_parms\n from ._imaging_utils._check_imaging_parms import _check_pb_parms\n import numpy as np\n import dask.array as da\n import copy, os\n import xarray as xr\n\n import matplotlib.pylab as plt\n\n _pb_parms = copy.deepcopy(pb_parms)\n _storage_parms = copy.deepcopy(storage_parms)\n\n assert(_check_pb_parms(img_dataset,_pb_parms)), "######### ERROR: user_imaging_weights_parms checking failed"\n assert(_check_storage_parms(_storage_parms,\'dataset.img.zarr\',\'make_pb\')), "######### ERROR: user_storage_parms checking failed"\n\n #parameter check\n #cube continuum check\n\n\n if _pb_parms[\'function\'] == \'airy\':\n from ._imaging_utils._make_pb_1d import _airy_disk\n pb_func = _airy_disk\n else:\n print(\'Only the airy function has been implemented\')\n\n _pb_parms[\'ipower\'] = 2\n _pb_parms[\'center_indx\'] = []\n\n\n chan_chunk_size = img_dataset.chan_width.chunks[0][0]\n freq_coords = da.from_array(img_dataset.coords[\'chan\'].values, chunks=(chan_chunk_size))\n\n pol = img_dataset.pol.values #don\'t want chunking here\n\n chunksize = (_pb_parms[\'imsize\'][0],_pb_parms[\'imsize\'][1]) + freq_coords.chunksize + (len(pol),) + (len(_pb_parms[\'list_dish_diameters\']),)\n\n pb = da.map_blocks(pb_func, freq_coords, pol, _pb_parms, chunks=chunksize ,new_axis=[0,1,3,4], dtype=np.double)\n\n ## Add PB to img_dataset\n\n coords = {\'d0\': np.arange(pb_parms[\'imsize\'][0]), \'d1\': np.arange(_pb_parms[\'imsize\'][1]),\n \'chan\': freq_coords.compute(), \'pol\': pol,\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))}\n\n\n img_dataset[_pb_parms[\'pb_name\']] = xr.DataArray(pb, dims=[\'d0\', \'d1\', \'chan\', \'pol\',\'dish_type\'])\n img_dataset = img_dataset.assign_coords({\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))})\n\n list_xarray_data_variables = [img_dataset[_pb_parms[\'pb_name\']]]\n return _store(img_dataset,list_xarray_data_variables,_storage_parms)\n '
|
def make_mosaic_pb(vis_dataset, gcf_dataset, img_dataset, sel_parms, grid_parms, storage_parms):
"\n The make_pb function currently supports rotationally symmetric airy disk primary beams. Primary beams can be generated for any number of dishes.\n The make_pb_parms['list_dish_diameters'] and make_pb_parms['list_blockage_diameters'] must be specified for each dish.\n \n Parameters\n ----------\n vis_dataset : xarray.core.dataset.Dataset\n Input visibility dataset.\n gcf_dataset : xarray.core.dataset.Dataset\n Input gridding convolution function dataset.\n img_dataset : xarray.core.dataset.Dataset\n Input image dataset. ()\n make_pb_parms : dictionary\n make_pb_parms['function'] : {'airy'}, default='airy'\n Only the airy disk function is currently supported.\n grid_parms['imsize'] : list of int, length = 2\n The image size (no padding).\n grid_parms['cell'] : list of number, length = 2, units = arcseconds\n The image cell size.\n make_pb_parms['list_dish_diameters'] : list of number\n The list of dish diameters.\n make_pb_parms['list_blockage_diameters'] = list of number\n The list of blockage diameters for each dish.\n make_pb_parms['pb_name'] = 'PB'\n The created PB name.\n storage_parms : dictionary\n storage_parms['to_disk'] : bool, default = False\n If true the dask graph is executed and saved to disk in the zarr format.\n storage_parms['append'] : bool, default = False\n If storage_parms['to_disk'] is True only the dask graph associated with the function is executed and the resulting data variables are saved to an existing zarr file on disk.\n Note that graphs on unrelated data to this function will not be executed or saved.\n storage_parms['outfile'] : str\n The zarr file to create or append to.\n storage_parms['chunks_on_disk'] : dict of int, default = {}\n The chunk size to use when writing to disk. This is ignored if storage_parms['append'] is True. The default will use the chunking of the input dataset.\n storage_parms['chunks_return'] : dict of int, default = {}\n The chunk size of the dataset that is returned. The default will use the chunking of the input dataset.\n storage_parms['graph_name'] : str\n The time to compute and save the data is stored in the attribute section of the dataset and storage_parms['graph_name'] is used in the label.\n storage_parms['compressor'] : numcodecs.blosc.Blosc,default=Blosc(cname='zstd', clevel=2, shuffle=0)\n The compression algorithm to use. Available compression algorithms can be found at https://numcodecs.readthedocs.io/en/stable/blosc.html.\n \n Returns\n -------\n img_xds : xarray.core.dataset.Dataset\n "
print('######################### Start make_mosaic_pb #########################')
from ngcasa._ngcasa_utils._store import _store
from ngcasa._ngcasa_utils._check_parms import _check_storage_parms, _check_sel_parms, _check_existence_sel_parms
from ._imaging_utils._check_imaging_parms import _check_grid_parms, _check_mosaic_pb_parms
from ._imaging_utils._aperture_grid import _graph_aperture_grid
import dask.array.fft as dafft
import matplotlib.pylab as plt
import numpy as np
import dask.array as da
import copy
import xarray as xr
from ._imaging_utils._remove_padding import _remove_padding
from ._imaging_utils._normalize import _normalize
_sel_parms = copy.deepcopy(sel_parms)
_grid_parms = copy.deepcopy(grid_parms)
_storage_parms = copy.deepcopy(storage_parms)
assert _check_sel_parms(_sel_parms, {'pb': 'PB', 'weight_pb': 'WEIGHT_PB', 'weight_pb_sum_weight': 'WEIGHT_PB_SUM_WEIGHT'}), '######### ERROR: sel_parms checking failed'
assert _check_grid_parms(_grid_parms), '######### ERROR: grid_parms checking failed'
assert _check_storage_parms(_storage_parms, 'mosaic_pb.img.zarr', 'make_mosaic_pb'), '######### ERROR: storage_parms checking failed'
_grid_parms['grid_weights'] = True
_grid_parms['do_psf'] = False
_grid_parms['oversampling'] = np.array(gcf_dataset.attrs['oversampling'])
grids_and_sum_weights = _graph_aperture_grid(vis_dataset, gcf_dataset, _grid_parms, _sel_parms)
weight_image = (_remove_padding(dafft.fftshift(dafft.ifft2(dafft.ifftshift(grids_and_sum_weights[0], axes=(0, 1)), axes=(0, 1)), axes=(0, 1)), _grid_parms['image_size']).real * (_grid_parms['image_size_padded'][0] * _grid_parms['image_size_padded'][1]))
def correct_image(weight_image, sum_weights):
sum_weights_copy = copy.deepcopy(sum_weights)
sum_weights_copy[(sum_weights_copy == 0)] = 1
weight_image = (weight_image / sum_weights_copy[(None, None, :, :)])
return weight_image
weight_image = da.map_blocks(correct_image, weight_image, grids_and_sum_weights[1], dtype=np.double)
mosaic_primary_beam = da.sqrt(np.abs(weight_image))
if (_grid_parms['chan_mode'] == 'continuum'):
freq_coords = [da.mean(vis_dataset.coords['chan'].values)]
chan_width = da.from_array([da.mean(vis_dataset['chan_width'].data)], chunks=(1,))
imag_chan_chunk_size = 1
elif (_grid_parms['chan_mode'] == 'cube'):
freq_coords = vis_dataset.coords['chan'].values
chan_width = vis_dataset['chan_width'].data
imag_chan_chunk_size = vis_dataset.DATA.chunks[2][0]
chunks = vis_dataset.DATA.chunks
n_imag_pol = chunks[3][0]
image_dict = {}
coords = {'d0': np.arange(_grid_parms['image_size'][0]), 'd1': np.arange(_grid_parms['image_size'][1]), 'chan': freq_coords, 'pol': np.arange(n_imag_pol), 'chan_width': ('chan', chan_width)}
img_dataset = img_dataset.assign_coords(coords)
img_dataset[_sel_parms['pb']] = xr.DataArray(mosaic_primary_beam, dims=['d0', 'd1', 'chan', 'pol'])
img_dataset[_sel_parms['weight']] = xr.DataArray(weight_image, dims=['d0', 'd1', 'chan', 'pol'])
img_dataset[_sel_parms['weight_pb_sum_weight']] = xr.DataArray(grids_and_sum_weights[1], dims=['chan', 'pol'])
list_xarray_data_variables = [img_dataset[_sel_parms['pb']], img_dataset[_sel_parms['weight']]]
return _store(img_dataset, list_xarray_data_variables, _storage_parms)
'\n \n ## Add PB to img_dataset\n\n #coords = {\'d0\': np.arange(pb_parms[\'imsize\'][0]), \'d1\': np.arange(_pb_parms[\'imsize\'][1]),\n # \'chan\': freq_coords.compute(), \'pol\': pol,\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))}\n \n img_dataset[_pb_mosaic_parms[\'mosaic_weight_name\']] = xr.DataArray(weight_image, dims=[\'d0\', \'d1\', \'chan\', \'pol\',\'dish_type\'])\n img_dataset[_pb_mosaic_parms[\'mosaic_pb_name\']] = xr.DataArray(, dims=[\'d0\', \'d1\', \'chan\', \'pol\',\'dish_type\'])\n img_dataset = img_dataset.assign_coords({\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))})\n\n list_xarray_data_variables = [img_dataset[_pb_parms[\'pb_name\']]]\n return _store(img_dataset,list_xarray_data_variables,_storage_parms)\n\n\n\n from ngcasa._ngcasa_utils._store import _store\n from ngcasa._ngcasa_utils._check_parms import _check_storage_parms\n from ._imaging_utils._check_imaging_parms import _check_pb_parms\n import numpy as np\n import dask.array as da\n import copy, os\n import xarray as xr\n\n import matplotlib.pylab as plt\n\n _pb_parms = copy.deepcopy(pb_parms)\n _storage_parms = copy.deepcopy(storage_parms)\n\n assert(_check_pb_parms(img_dataset,_pb_parms)), "######### ERROR: user_imaging_weights_parms checking failed"\n assert(_check_storage_parms(_storage_parms,\'dataset.img.zarr\',\'make_pb\')), "######### ERROR: user_storage_parms checking failed"\n\n #parameter check\n #cube continuum check\n\n\n if _pb_parms[\'function\'] == \'airy\':\n from ._imaging_utils._make_pb_1d import _airy_disk\n pb_func = _airy_disk\n else:\n print(\'Only the airy function has been implemented\')\n\n _pb_parms[\'ipower\'] = 2\n _pb_parms[\'center_indx\'] = []\n\n\n chan_chunk_size = img_dataset.chan_width.chunks[0][0]\n freq_coords = da.from_array(img_dataset.coords[\'chan\'].values, chunks=(chan_chunk_size))\n\n pol = img_dataset.pol.values #don\'t want chunking here\n\n chunksize = (_pb_parms[\'imsize\'][0],_pb_parms[\'imsize\'][1]) + freq_coords.chunksize + (len(pol),) + (len(_pb_parms[\'list_dish_diameters\']),)\n\n pb = da.map_blocks(pb_func, freq_coords, pol, _pb_parms, chunks=chunksize ,new_axis=[0,1,3,4], dtype=np.double)\n\n ## Add PB to img_dataset\n\n coords = {\'d0\': np.arange(pb_parms[\'imsize\'][0]), \'d1\': np.arange(_pb_parms[\'imsize\'][1]),\n \'chan\': freq_coords.compute(), \'pol\': pol,\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))}\n\n\n img_dataset[_pb_parms[\'pb_name\']] = xr.DataArray(pb, dims=[\'d0\', \'d1\', \'chan\', \'pol\',\'dish_type\'])\n img_dataset = img_dataset.assign_coords({\'dish_type\': np.arange(len(_pb_parms[\'list_dish_diameters\']))})\n\n list_xarray_data_variables = [img_dataset[_pb_parms[\'pb_name\']]]\n return _store(img_dataset,list_xarray_data_variables,_storage_parms)\n '<|docstring|>The make_pb function currently supports rotationally symmetric airy disk primary beams. Primary beams can be generated for any number of dishes.
The make_pb_parms['list_dish_diameters'] and make_pb_parms['list_blockage_diameters'] must be specified for each dish.
Parameters
----------
vis_dataset : xarray.core.dataset.Dataset
Input visibility dataset.
gcf_dataset : xarray.core.dataset.Dataset
Input gridding convolution function dataset.
img_dataset : xarray.core.dataset.Dataset
Input image dataset. ()
make_pb_parms : dictionary
make_pb_parms['function'] : {'airy'}, default='airy'
Only the airy disk function is currently supported.
grid_parms['imsize'] : list of int, length = 2
The image size (no padding).
grid_parms['cell'] : list of number, length = 2, units = arcseconds
The image cell size.
make_pb_parms['list_dish_diameters'] : list of number
The list of dish diameters.
make_pb_parms['list_blockage_diameters'] = list of number
The list of blockage diameters for each dish.
make_pb_parms['pb_name'] = 'PB'
The created PB name.
storage_parms : dictionary
storage_parms['to_disk'] : bool, default = False
If true the dask graph is executed and saved to disk in the zarr format.
storage_parms['append'] : bool, default = False
If storage_parms['to_disk'] is True only the dask graph associated with the function is executed and the resulting data variables are saved to an existing zarr file on disk.
Note that graphs on unrelated data to this function will not be executed or saved.
storage_parms['outfile'] : str
The zarr file to create or append to.
storage_parms['chunks_on_disk'] : dict of int, default = {}
The chunk size to use when writing to disk. This is ignored if storage_parms['append'] is True. The default will use the chunking of the input dataset.
storage_parms['chunks_return'] : dict of int, default = {}
The chunk size of the dataset that is returned. The default will use the chunking of the input dataset.
storage_parms['graph_name'] : str
The time to compute and save the data is stored in the attribute section of the dataset and storage_parms['graph_name'] is used in the label.
storage_parms['compressor'] : numcodecs.blosc.Blosc,default=Blosc(cname='zstd', clevel=2, shuffle=0)
The compression algorithm to use. Available compression algorithms can be found at https://numcodecs.readthedocs.io/en/stable/blosc.html.
Returns
-------
img_xds : xarray.core.dataset.Dataset<|endoftext|>
|
efde42e3e91b8db9404361fd209c250f7fe28bbebf8149566427ac8ce5075eeb
|
def __init__(self, initial_class_observations, perceptron_node=None, random_state=None):
' InactiveLearningNodePerceptron class constructor.'
super().__init__(initial_class_observations)
self.random_state = check_random_state(random_state)
self.samples_seen = 0
if (perceptron_node is None):
self.perceptron_weight = None
else:
self.perceptron_weight = deepcopy(perceptron_node.perceptron_weight)
self.samples_seen = perceptron_node.samples_seen
|
InactiveLearningNodePerceptron class constructor.
|
src/skmultiflow/trees/nodes/inactive_learning_node_perceptron.py
|
__init__
|
nuwangunasekara/scikit-multiflow
| 1 |
python
|
def __init__(self, initial_class_observations, perceptron_node=None, random_state=None):
' '
super().__init__(initial_class_observations)
self.random_state = check_random_state(random_state)
self.samples_seen = 0
if (perceptron_node is None):
self.perceptron_weight = None
else:
self.perceptron_weight = deepcopy(perceptron_node.perceptron_weight)
self.samples_seen = perceptron_node.samples_seen
|
def __init__(self, initial_class_observations, perceptron_node=None, random_state=None):
' '
super().__init__(initial_class_observations)
self.random_state = check_random_state(random_state)
self.samples_seen = 0
if (perceptron_node is None):
self.perceptron_weight = None
else:
self.perceptron_weight = deepcopy(perceptron_node.perceptron_weight)
self.samples_seen = perceptron_node.samples_seen<|docstring|>InactiveLearningNodePerceptron class constructor.<|endoftext|>
|
8f57458af0574e8dc3a619b539227743990278569b1e179703b60e2aaca37141
|
def learn_from_instance(self, X, y, weight, rht):
'Update the node with the provided instance.\n\n Parameters\n ----------\n X: numpy.ndarray of length equal to the number of features.\n Instance attributes for updating the node.\n y: double\n Instance target value.\n weight: float\n Instance weight.\n rht: HoeffdingTreeRegressor\n Regression Hoeffding Tree to update.\n\n '
if (self.perceptron_weight is None):
self.perceptron_weight = self.random_state.uniform((- 1), 1, (len(X) + 1))
try:
self._observed_class_distribution[0] += weight
self._observed_class_distribution[1] += (y * weight)
self._observed_class_distribution[2] += ((y * y) * weight)
except KeyError:
self._observed_class_distribution[0] = weight
self._observed_class_distribution[1] = (y * weight)
self._observed_class_distribution[2] = ((y * y) * weight)
self.samples_seen = self._observed_class_distribution[0]
if rht.learning_ratio_const:
learning_ratio = rht.learning_ratio_perceptron
else:
learning_ratio = (rht.learning_ratio_perceptron / (1 + (self.samples_seen * rht.learning_ratio_decay)))
for i in range(int(weight)):
self.update_weights(X, y, learning_ratio, rht)
|
Update the node with the provided instance.
Parameters
----------
X: numpy.ndarray of length equal to the number of features.
Instance attributes for updating the node.
y: double
Instance target value.
weight: float
Instance weight.
rht: HoeffdingTreeRegressor
Regression Hoeffding Tree to update.
|
src/skmultiflow/trees/nodes/inactive_learning_node_perceptron.py
|
learn_from_instance
|
nuwangunasekara/scikit-multiflow
| 1 |
python
|
def learn_from_instance(self, X, y, weight, rht):
'Update the node with the provided instance.\n\n Parameters\n ----------\n X: numpy.ndarray of length equal to the number of features.\n Instance attributes for updating the node.\n y: double\n Instance target value.\n weight: float\n Instance weight.\n rht: HoeffdingTreeRegressor\n Regression Hoeffding Tree to update.\n\n '
if (self.perceptron_weight is None):
self.perceptron_weight = self.random_state.uniform((- 1), 1, (len(X) + 1))
try:
self._observed_class_distribution[0] += weight
self._observed_class_distribution[1] += (y * weight)
self._observed_class_distribution[2] += ((y * y) * weight)
except KeyError:
self._observed_class_distribution[0] = weight
self._observed_class_distribution[1] = (y * weight)
self._observed_class_distribution[2] = ((y * y) * weight)
self.samples_seen = self._observed_class_distribution[0]
if rht.learning_ratio_const:
learning_ratio = rht.learning_ratio_perceptron
else:
learning_ratio = (rht.learning_ratio_perceptron / (1 + (self.samples_seen * rht.learning_ratio_decay)))
for i in range(int(weight)):
self.update_weights(X, y, learning_ratio, rht)
|
def learn_from_instance(self, X, y, weight, rht):
'Update the node with the provided instance.\n\n Parameters\n ----------\n X: numpy.ndarray of length equal to the number of features.\n Instance attributes for updating the node.\n y: double\n Instance target value.\n weight: float\n Instance weight.\n rht: HoeffdingTreeRegressor\n Regression Hoeffding Tree to update.\n\n '
if (self.perceptron_weight is None):
self.perceptron_weight = self.random_state.uniform((- 1), 1, (len(X) + 1))
try:
self._observed_class_distribution[0] += weight
self._observed_class_distribution[1] += (y * weight)
self._observed_class_distribution[2] += ((y * y) * weight)
except KeyError:
self._observed_class_distribution[0] = weight
self._observed_class_distribution[1] = (y * weight)
self._observed_class_distribution[2] = ((y * y) * weight)
self.samples_seen = self._observed_class_distribution[0]
if rht.learning_ratio_const:
learning_ratio = rht.learning_ratio_perceptron
else:
learning_ratio = (rht.learning_ratio_perceptron / (1 + (self.samples_seen * rht.learning_ratio_decay)))
for i in range(int(weight)):
self.update_weights(X, y, learning_ratio, rht)<|docstring|>Update the node with the provided instance.
Parameters
----------
X: numpy.ndarray of length equal to the number of features.
Instance attributes for updating the node.
y: double
Instance target value.
weight: float
Instance weight.
rht: HoeffdingTreeRegressor
Regression Hoeffding Tree to update.<|endoftext|>
|
70569e63f39b524491d7eb20ea719e3cf40e10fdd2c26765f3ceeaece94f5687
|
def update_weights(self, X, y, learning_ratio, ht):
'\n Update the perceptron weights\n Parameters\n ----------\n X: numpy.ndarray of length equal to the number of features.\n Instance attributes for updating the node.\n y: float\n Instance target value.\n learning_ratio: float\n perceptron learning ratio\n rht: HoeffdingTreeRegressor\n Regression Hoeffding Tree to update.\n '
normalized_sample = ht.normalize_sample(X)
normalized_pred = np.dot(self.perceptron_weight, normalized_sample)
normalized_target_value = ht.normalize_target_value(y)
delta = (normalized_target_value - normalized_pred)
self.perceptron_weight = (self.perceptron_weight + ((learning_ratio * delta) * normalized_sample))
self.perceptron_weight = (self.perceptron_weight / np.sum(np.abs(self.perceptron_weight)))
|
Update the perceptron weights
Parameters
----------
X: numpy.ndarray of length equal to the number of features.
Instance attributes for updating the node.
y: float
Instance target value.
learning_ratio: float
perceptron learning ratio
rht: HoeffdingTreeRegressor
Regression Hoeffding Tree to update.
|
src/skmultiflow/trees/nodes/inactive_learning_node_perceptron.py
|
update_weights
|
nuwangunasekara/scikit-multiflow
| 1 |
python
|
def update_weights(self, X, y, learning_ratio, ht):
'\n Update the perceptron weights\n Parameters\n ----------\n X: numpy.ndarray of length equal to the number of features.\n Instance attributes for updating the node.\n y: float\n Instance target value.\n learning_ratio: float\n perceptron learning ratio\n rht: HoeffdingTreeRegressor\n Regression Hoeffding Tree to update.\n '
normalized_sample = ht.normalize_sample(X)
normalized_pred = np.dot(self.perceptron_weight, normalized_sample)
normalized_target_value = ht.normalize_target_value(y)
delta = (normalized_target_value - normalized_pred)
self.perceptron_weight = (self.perceptron_weight + ((learning_ratio * delta) * normalized_sample))
self.perceptron_weight = (self.perceptron_weight / np.sum(np.abs(self.perceptron_weight)))
|
def update_weights(self, X, y, learning_ratio, ht):
'\n Update the perceptron weights\n Parameters\n ----------\n X: numpy.ndarray of length equal to the number of features.\n Instance attributes for updating the node.\n y: float\n Instance target value.\n learning_ratio: float\n perceptron learning ratio\n rht: HoeffdingTreeRegressor\n Regression Hoeffding Tree to update.\n '
normalized_sample = ht.normalize_sample(X)
normalized_pred = np.dot(self.perceptron_weight, normalized_sample)
normalized_target_value = ht.normalize_target_value(y)
delta = (normalized_target_value - normalized_pred)
self.perceptron_weight = (self.perceptron_weight + ((learning_ratio * delta) * normalized_sample))
self.perceptron_weight = (self.perceptron_weight / np.sum(np.abs(self.perceptron_weight)))<|docstring|>Update the perceptron weights
Parameters
----------
X: numpy.ndarray of length equal to the number of features.
Instance attributes for updating the node.
y: float
Instance target value.
learning_ratio: float
perceptron learning ratio
rht: HoeffdingTreeRegressor
Regression Hoeffding Tree to update.<|endoftext|>
|
6da412d4abdb9ed05efb6a533d48d9e24f74f0e679bd55e764a74684302b1f97
|
def __init__(self, piezas):
'Inicializador de clase Posiciones.\n \n Parameters\n ----------\n piezas : list of Piezas\n Piezas actuales dentro del tablero'
self.bk = []
self.wh = []
for i in piezas:
if (i.team == 'bk'):
self.bk.append(i.posicion)
else:
self.wh.append(i.posicion)
|
Inicializador de clase Posiciones.
Parameters
----------
piezas : list of Piezas
Piezas actuales dentro del tablero
|
CLI/tablero.py
|
__init__
|
EnriqueMC557/POO_Ajedrez
| 0 |
python
|
def __init__(self, piezas):
'Inicializador de clase Posiciones.\n \n Parameters\n ----------\n piezas : list of Piezas\n Piezas actuales dentro del tablero'
self.bk = []
self.wh = []
for i in piezas:
if (i.team == 'bk'):
self.bk.append(i.posicion)
else:
self.wh.append(i.posicion)
|
def __init__(self, piezas):
'Inicializador de clase Posiciones.\n \n Parameters\n ----------\n piezas : list of Piezas\n Piezas actuales dentro del tablero'
self.bk = []
self.wh = []
for i in piezas:
if (i.team == 'bk'):
self.bk.append(i.posicion)
else:
self.wh.append(i.posicion)<|docstring|>Inicializador de clase Posiciones.
Parameters
----------
piezas : list of Piezas
Piezas actuales dentro del tablero<|endoftext|>
|
18f8302f087c51ebe9bae4a457a1537ebcb939c393a934b00ae489c5b832ed43
|
def __init__(self, piezas):
'Inicializador de clase Tablero.\n \n Parameteres\n ----------\n piezas : list of Piezas\n Piezas con las cuales se jugará ajedrez'
self.fondo = np.array([[0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0], [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0], [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0], [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0]])
self.posiciones = Posiciones(piezas)
self.mostrar(piezas)
|
Inicializador de clase Tablero.
Parameteres
----------
piezas : list of Piezas
Piezas con las cuales se jugará ajedrez
|
CLI/tablero.py
|
__init__
|
EnriqueMC557/POO_Ajedrez
| 0 |
python
|
def __init__(self, piezas):
'Inicializador de clase Tablero.\n \n Parameteres\n ----------\n piezas : list of Piezas\n Piezas con las cuales se jugará ajedrez'
self.fondo = np.array([[0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0], [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0], [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0], [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0]])
self.posiciones = Posiciones(piezas)
self.mostrar(piezas)
|
def __init__(self, piezas):
'Inicializador de clase Tablero.\n \n Parameteres\n ----------\n piezas : list of Piezas\n Piezas con las cuales se jugará ajedrez'
self.fondo = np.array([[0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0], [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0], [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0], [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0]])
self.posiciones = Posiciones(piezas)
self.mostrar(piezas)<|docstring|>Inicializador de clase Tablero.
Parameteres
----------
piezas : list of Piezas
Piezas con las cuales se jugará ajedrez<|endoftext|>
|
2b7f7ca94bab6267197436372c44e3549cbd980d6a251ad9aca360f03c405590
|
def mostrar(self, piezas):
'Método que permite realizar el despliegue del tablero con las piezas\n con las que se jugará ajedrez.\n \n Parameters\n ----------\n piezas : list of Piezas\n Piezas a desplegar en tablero'
(fig, ax) = plt.subplots()
ax.set_xticks(np.arange(8))
ax.set_xticklabels(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'])
ax.set_yticks(np.arange(8))
ax.set_yticklabels(['1', '2', '3', '4', '5', '6', '7', '8'])
ax.imshow(self.fondo, cmap='binary', alpha=0.5)
for i in piezas:
if (i.team == 'wh'):
plt.plot((i.posicion[0] - 1), (i.posicion[1] - 1), marker=i.marker, mfc='white', mec='silver', ls='', ms=15)
else:
plt.plot((i.posicion[0] - 1), (i.posicion[1] - 1), marker=i.marker, mfc='white', mec='black', ls='', ms=15)
plt.show()
|
Método que permite realizar el despliegue del tablero con las piezas
con las que se jugará ajedrez.
Parameters
----------
piezas : list of Piezas
Piezas a desplegar en tablero
|
CLI/tablero.py
|
mostrar
|
EnriqueMC557/POO_Ajedrez
| 0 |
python
|
def mostrar(self, piezas):
'Método que permite realizar el despliegue del tablero con las piezas\n con las que se jugará ajedrez.\n \n Parameters\n ----------\n piezas : list of Piezas\n Piezas a desplegar en tablero'
(fig, ax) = plt.subplots()
ax.set_xticks(np.arange(8))
ax.set_xticklabels(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'])
ax.set_yticks(np.arange(8))
ax.set_yticklabels(['1', '2', '3', '4', '5', '6', '7', '8'])
ax.imshow(self.fondo, cmap='binary', alpha=0.5)
for i in piezas:
if (i.team == 'wh'):
plt.plot((i.posicion[0] - 1), (i.posicion[1] - 1), marker=i.marker, mfc='white', mec='silver', ls=, ms=15)
else:
plt.plot((i.posicion[0] - 1), (i.posicion[1] - 1), marker=i.marker, mfc='white', mec='black', ls=, ms=15)
plt.show()
|
def mostrar(self, piezas):
'Método que permite realizar el despliegue del tablero con las piezas\n con las que se jugará ajedrez.\n \n Parameters\n ----------\n piezas : list of Piezas\n Piezas a desplegar en tablero'
(fig, ax) = plt.subplots()
ax.set_xticks(np.arange(8))
ax.set_xticklabels(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'])
ax.set_yticks(np.arange(8))
ax.set_yticklabels(['1', '2', '3', '4', '5', '6', '7', '8'])
ax.imshow(self.fondo, cmap='binary', alpha=0.5)
for i in piezas:
if (i.team == 'wh'):
plt.plot((i.posicion[0] - 1), (i.posicion[1] - 1), marker=i.marker, mfc='white', mec='silver', ls=, ms=15)
else:
plt.plot((i.posicion[0] - 1), (i.posicion[1] - 1), marker=i.marker, mfc='white', mec='black', ls=, ms=15)
plt.show()<|docstring|>Método que permite realizar el despliegue del tablero con las piezas
con las que se jugará ajedrez.
Parameters
----------
piezas : list of Piezas
Piezas a desplegar en tablero<|endoftext|>
|
1869dcd6b1b1bbe08e6a6052afc16313923c05db6e1f4da2ab9aa2b26be3d3d4
|
def get_payload(self):
'\n Function to access the payload information. If using a DB that\n supports JSON this function should be rewritten (to be transparent).\n :return: The JSON structure with the payload\n '
if (self.payload == ''):
return {}
return json.loads(self.payload)
|
Function to access the payload information. If using a DB that
supports JSON this function should be rewritten (to be transparent).
:return: The JSON structure with the payload
|
src/logs/models.py
|
get_payload
|
Lukahm/ontask
| 3 |
python
|
def get_payload(self):
'\n Function to access the payload information. If using a DB that\n supports JSON this function should be rewritten (to be transparent).\n :return: The JSON structure with the payload\n '
if (self.payload == ):
return {}
return json.loads(self.payload)
|
def get_payload(self):
'\n Function to access the payload information. If using a DB that\n supports JSON this function should be rewritten (to be transparent).\n :return: The JSON structure with the payload\n '
if (self.payload == ):
return {}
return json.loads(self.payload)<|docstring|>Function to access the payload information. If using a DB that
supports JSON this function should be rewritten (to be transparent).
:return: The JSON structure with the payload<|endoftext|>
|
4deb6a917eda95bbad0fc391601766df278e742656a81edf50b6aa64ddf1eaaf
|
def set_payload(self, payload):
'\n Save the payload structure as text. If using a DB that supports JSON,\n this function should be rewritten.\n :return: Nothing.\n '
self.payload = json.dumps(payload)
|
Save the payload structure as text. If using a DB that supports JSON,
this function should be rewritten.
:return: Nothing.
|
src/logs/models.py
|
set_payload
|
Lukahm/ontask
| 3 |
python
|
def set_payload(self, payload):
'\n Save the payload structure as text. If using a DB that supports JSON,\n this function should be rewritten.\n :return: Nothing.\n '
self.payload = json.dumps(payload)
|
def set_payload(self, payload):
'\n Save the payload structure as text. If using a DB that supports JSON,\n this function should be rewritten.\n :return: Nothing.\n '
self.payload = json.dumps(payload)<|docstring|>Save the payload structure as text. If using a DB that supports JSON,
this function should be rewritten.
:return: Nothing.<|endoftext|>
|
491b458ea145eefc2f8e73484e13a03c4bd076336c583b1235f94062c2ca9666
|
def findMaterials(self, memo=None):
"Yield all materials present.\n\n Parameters\n ----------\n memo : set, optional\n Set containing ids of previously visited materials. Don't\n pass unless you know what you're doing. If given, will\n be modified with :attr:`hydep.Material.id` of discovered\n materials\n\n Yields\n ------\n hydep.Material\n The first occurance of this material.\n\n "
memo = (set() if (memo is None) else memo)
for (_r, mat) in self:
hid = id(mat)
if (hid in memo):
continue
memo.add(hid)
(yield mat)
|
Yield all materials present.
Parameters
----------
memo : set, optional
Set containing ids of previously visited materials. Don't
pass unless you know what you're doing. If given, will
be modified with :attr:`hydep.Material.id` of discovered
materials
Yields
------
hydep.Material
The first occurance of this material.
|
src/hydep/pin.py
|
findMaterials
|
CORE-GATECH-GROUP/hydep
| 2 |
python
|
def findMaterials(self, memo=None):
"Yield all materials present.\n\n Parameters\n ----------\n memo : set, optional\n Set containing ids of previously visited materials. Don't\n pass unless you know what you're doing. If given, will\n be modified with :attr:`hydep.Material.id` of discovered\n materials\n\n Yields\n ------\n hydep.Material\n The first occurance of this material.\n\n "
memo = (set() if (memo is None) else memo)
for (_r, mat) in self:
hid = id(mat)
if (hid in memo):
continue
memo.add(hid)
(yield mat)
|
def findMaterials(self, memo=None):
"Yield all materials present.\n\n Parameters\n ----------\n memo : set, optional\n Set containing ids of previously visited materials. Don't\n pass unless you know what you're doing. If given, will\n be modified with :attr:`hydep.Material.id` of discovered\n materials\n\n Yields\n ------\n hydep.Material\n The first occurance of this material.\n\n "
memo = (set() if (memo is None) else memo)
for (_r, mat) in self:
hid = id(mat)
if (hid in memo):
continue
memo.add(hid)
(yield mat)<|docstring|>Yield all materials present.
Parameters
----------
memo : set, optional
Set containing ids of previously visited materials. Don't
pass unless you know what you're doing. If given, will
be modified with :attr:`hydep.Material.id` of discovered
materials
Yields
------
hydep.Material
The first occurance of this material.<|endoftext|>
|
9aa434ff219bad60c08ea7543bc64a60d31f712a59aee06c1c36105cb6d264c7
|
def countBurnableMaterials(self, _memo=None):
'Count all occurances of burnable materials\n\n Useful prior to cloning new burnable materials, so\n that volumes can be properly scaled.\n\n Parameters\n ----------\n memo : dict of str to [hydep.BurnableMaterial, int], optional\n Previously visited universes will populate this as they\n traverse the geometry. Needed for internal use, and modified\n through the traversal. Keys indicate ids of universes\n as they are discovered and will be updated.\n\n Returns\n -------\n Mapping[str, [hydep.BurnableMaterial, int]]\n Map of unique hashable IDs for unique burnable materials to\n the material and the number of instances. Should only contain\n information on this specific instance.\n '
local = {}
for (_r, mat) in self:
if (not isinstance(mat, BurnableMaterial)):
continue
hid = mat.id
repeat = local.get(hid)
if (repeat is None):
local[hid] = [mat, 1]
else:
repeat[1] += 1
return local
|
Count all occurances of burnable materials
Useful prior to cloning new burnable materials, so
that volumes can be properly scaled.
Parameters
----------
memo : dict of str to [hydep.BurnableMaterial, int], optional
Previously visited universes will populate this as they
traverse the geometry. Needed for internal use, and modified
through the traversal. Keys indicate ids of universes
as they are discovered and will be updated.
Returns
-------
Mapping[str, [hydep.BurnableMaterial, int]]
Map of unique hashable IDs for unique burnable materials to
the material and the number of instances. Should only contain
information on this specific instance.
|
src/hydep/pin.py
|
countBurnableMaterials
|
CORE-GATECH-GROUP/hydep
| 2 |
python
|
def countBurnableMaterials(self, _memo=None):
'Count all occurances of burnable materials\n\n Useful prior to cloning new burnable materials, so\n that volumes can be properly scaled.\n\n Parameters\n ----------\n memo : dict of str to [hydep.BurnableMaterial, int], optional\n Previously visited universes will populate this as they\n traverse the geometry. Needed for internal use, and modified\n through the traversal. Keys indicate ids of universes\n as they are discovered and will be updated.\n\n Returns\n -------\n Mapping[str, [hydep.BurnableMaterial, int]]\n Map of unique hashable IDs for unique burnable materials to\n the material and the number of instances. Should only contain\n information on this specific instance.\n '
local = {}
for (_r, mat) in self:
if (not isinstance(mat, BurnableMaterial)):
continue
hid = mat.id
repeat = local.get(hid)
if (repeat is None):
local[hid] = [mat, 1]
else:
repeat[1] += 1
return local
|
def countBurnableMaterials(self, _memo=None):
'Count all occurances of burnable materials\n\n Useful prior to cloning new burnable materials, so\n that volumes can be properly scaled.\n\n Parameters\n ----------\n memo : dict of str to [hydep.BurnableMaterial, int], optional\n Previously visited universes will populate this as they\n traverse the geometry. Needed for internal use, and modified\n through the traversal. Keys indicate ids of universes\n as they are discovered and will be updated.\n\n Returns\n -------\n Mapping[str, [hydep.BurnableMaterial, int]]\n Map of unique hashable IDs for unique burnable materials to\n the material and the number of instances. Should only contain\n information on this specific instance.\n '
local = {}
for (_r, mat) in self:
if (not isinstance(mat, BurnableMaterial)):
continue
hid = mat.id
repeat = local.get(hid)
if (repeat is None):
local[hid] = [mat, 1]
else:
repeat[1] += 1
return local<|docstring|>Count all occurances of burnable materials
Useful prior to cloning new burnable materials, so
that volumes can be properly scaled.
Parameters
----------
memo : dict of str to [hydep.BurnableMaterial, int], optional
Previously visited universes will populate this as they
traverse the geometry. Needed for internal use, and modified
through the traversal. Keys indicate ids of universes
as they are discovered and will be updated.
Returns
-------
Mapping[str, [hydep.BurnableMaterial, int]]
Map of unique hashable IDs for unique burnable materials to
the material and the number of instances. Should only contain
information on this specific instance.<|endoftext|>
|
fe6a79d5cbb2a101b268d195a7eea9c8afcc9a228c6acf1a702fd66cda99b749
|
def differentiateBurnableMaterials(self, memo=None):
'Create new burnable materials and potentially mimic this pin\n\n This routine is important to create unique burnable materials\n that can be depleted using spatially correct fluxes and\n reaction rates.\n\n This method digs through contained materials and creates unique\n :class:`hydep.BurnedMaterial` objects.This material itself may\n be cloned if the following conditions are met:\n\n 1. At least one contained material was cloned\n 2. This object has been encountered before\n\n If at least one contained universe was cloned but this\n is the first time encountering this universe, the\n modifications will be made in-place, and the original\n returned.\n\n Parameters\n ----------\n memo : set of str, optional\n Set containing unique ids of previously found universes.\n Needed for internal use and not necessary for most end\n users.\n\n Returns\n -------\n Pin\n Either the originating universe or a near clone, but with\n one or more underlying materials changed.\n\n '
memo = (set() if (memo is None) else memo)
updates = {}
for (index, (_r, mat)) in enumerate(self):
if (not isinstance(mat, BurnableMaterial)):
continue
if (id(mat) in memo):
mat = updates[index] = mat.copy()
demangled = mat.name.split('_copy')[0]
mat.name = f'{demangled}_copy{mat.id}'
memo.add(id(mat))
if (not updates):
memo.add(id(self))
return self
outer = updates.pop(len(self.materials), self.outer)
materials = [updates.get(ix, mat) for (ix, mat) in enumerate(self.materials)]
if (id(self) not in memo):
memo.add(id(self))
self.outer = outer
self.materials = materials
return self
new = self.__class__(self.radii, materials, outer)
if (self.name is not None):
demangled = self.name.split('_copy')[0]
new.name = f'{demangled}_copy{mat.id}'
return new
|
Create new burnable materials and potentially mimic this pin
This routine is important to create unique burnable materials
that can be depleted using spatially correct fluxes and
reaction rates.
This method digs through contained materials and creates unique
:class:`hydep.BurnedMaterial` objects.This material itself may
be cloned if the following conditions are met:
1. At least one contained material was cloned
2. This object has been encountered before
If at least one contained universe was cloned but this
is the first time encountering this universe, the
modifications will be made in-place, and the original
returned.
Parameters
----------
memo : set of str, optional
Set containing unique ids of previously found universes.
Needed for internal use and not necessary for most end
users.
Returns
-------
Pin
Either the originating universe or a near clone, but with
one or more underlying materials changed.
|
src/hydep/pin.py
|
differentiateBurnableMaterials
|
CORE-GATECH-GROUP/hydep
| 2 |
python
|
def differentiateBurnableMaterials(self, memo=None):
'Create new burnable materials and potentially mimic this pin\n\n This routine is important to create unique burnable materials\n that can be depleted using spatially correct fluxes and\n reaction rates.\n\n This method digs through contained materials and creates unique\n :class:`hydep.BurnedMaterial` objects.This material itself may\n be cloned if the following conditions are met:\n\n 1. At least one contained material was cloned\n 2. This object has been encountered before\n\n If at least one contained universe was cloned but this\n is the first time encountering this universe, the\n modifications will be made in-place, and the original\n returned.\n\n Parameters\n ----------\n memo : set of str, optional\n Set containing unique ids of previously found universes.\n Needed for internal use and not necessary for most end\n users.\n\n Returns\n -------\n Pin\n Either the originating universe or a near clone, but with\n one or more underlying materials changed.\n\n '
memo = (set() if (memo is None) else memo)
updates = {}
for (index, (_r, mat)) in enumerate(self):
if (not isinstance(mat, BurnableMaterial)):
continue
if (id(mat) in memo):
mat = updates[index] = mat.copy()
demangled = mat.name.split('_copy')[0]
mat.name = f'{demangled}_copy{mat.id}'
memo.add(id(mat))
if (not updates):
memo.add(id(self))
return self
outer = updates.pop(len(self.materials), self.outer)
materials = [updates.get(ix, mat) for (ix, mat) in enumerate(self.materials)]
if (id(self) not in memo):
memo.add(id(self))
self.outer = outer
self.materials = materials
return self
new = self.__class__(self.radii, materials, outer)
if (self.name is not None):
demangled = self.name.split('_copy')[0]
new.name = f'{demangled}_copy{mat.id}'
return new
|
def differentiateBurnableMaterials(self, memo=None):
'Create new burnable materials and potentially mimic this pin\n\n This routine is important to create unique burnable materials\n that can be depleted using spatially correct fluxes and\n reaction rates.\n\n This method digs through contained materials and creates unique\n :class:`hydep.BurnedMaterial` objects.This material itself may\n be cloned if the following conditions are met:\n\n 1. At least one contained material was cloned\n 2. This object has been encountered before\n\n If at least one contained universe was cloned but this\n is the first time encountering this universe, the\n modifications will be made in-place, and the original\n returned.\n\n Parameters\n ----------\n memo : set of str, optional\n Set containing unique ids of previously found universes.\n Needed for internal use and not necessary for most end\n users.\n\n Returns\n -------\n Pin\n Either the originating universe or a near clone, but with\n one or more underlying materials changed.\n\n '
memo = (set() if (memo is None) else memo)
updates = {}
for (index, (_r, mat)) in enumerate(self):
if (not isinstance(mat, BurnableMaterial)):
continue
if (id(mat) in memo):
mat = updates[index] = mat.copy()
demangled = mat.name.split('_copy')[0]
mat.name = f'{demangled}_copy{mat.id}'
memo.add(id(mat))
if (not updates):
memo.add(id(self))
return self
outer = updates.pop(len(self.materials), self.outer)
materials = [updates.get(ix, mat) for (ix, mat) in enumerate(self.materials)]
if (id(self) not in memo):
memo.add(id(self))
self.outer = outer
self.materials = materials
return self
new = self.__class__(self.radii, materials, outer)
if (self.name is not None):
demangled = self.name.split('_copy')[0]
new.name = f'{demangled}_copy{mat.id}'
return new<|docstring|>Create new burnable materials and potentially mimic this pin
This routine is important to create unique burnable materials
that can be depleted using spatially correct fluxes and
reaction rates.
This method digs through contained materials and creates unique
:class:`hydep.BurnedMaterial` objects.This material itself may
be cloned if the following conditions are met:
1. At least one contained material was cloned
2. This object has been encountered before
If at least one contained universe was cloned but this
is the first time encountering this universe, the
modifications will be made in-place, and the original
returned.
Parameters
----------
memo : set of str, optional
Set containing unique ids of previously found universes.
Needed for internal use and not necessary for most end
users.
Returns
-------
Pin
Either the originating universe or a near clone, but with
one or more underlying materials changed.<|endoftext|>
|
d24bac0ce020ecbf38ee8bafda82714e13a461561c794409fb6fe0a1bf877a47
|
def radialComponentSelection(mesh, center, radius=1.0):
'\n\tBuild component selection from a point and radial distance.\n\t@param mesh: Geometry to build component selection from.\n\t@type mesh: str\n\t@param center: Radial center to build selection from.\n\t@type center: str or list\n\t@param radius: Radial distance to build selection from.\n\t@type radius: float\n\t'
if (not mc.objExists(mesh)):
raise Exception((('Mesh object "' + mesh) + '" does not exist!!'))
pt = glTools.utils.base.getMPoint(center)
ptList = glTools.utils.base.getMPointArray(mesh)
sel = []
for i in range(ptList.length()):
dist = (pt - ptList[i]).length()
if (dist <= radius):
sel.append((((mesh + '.vtx[') + str(i)) + ']'))
return sel
|
Build component selection from a point and radial distance.
@param mesh: Geometry to build component selection from.
@type mesh: str
@param center: Radial center to build selection from.
@type center: str or list
@param radius: Radial distance to build selection from.
@type radius: float
|
tools/volumeSelection.py
|
radialComponentSelection
|
obrakeo/glTools
| 165 |
python
|
def radialComponentSelection(mesh, center, radius=1.0):
'\n\tBuild component selection from a point and radial distance.\n\t@param mesh: Geometry to build component selection from.\n\t@type mesh: str\n\t@param center: Radial center to build selection from.\n\t@type center: str or list\n\t@param radius: Radial distance to build selection from.\n\t@type radius: float\n\t'
if (not mc.objExists(mesh)):
raise Exception((('Mesh object "' + mesh) + '" does not exist!!'))
pt = glTools.utils.base.getMPoint(center)
ptList = glTools.utils.base.getMPointArray(mesh)
sel = []
for i in range(ptList.length()):
dist = (pt - ptList[i]).length()
if (dist <= radius):
sel.append((((mesh + '.vtx[') + str(i)) + ']'))
return sel
|
def radialComponentSelection(mesh, center, radius=1.0):
'\n\tBuild component selection from a point and radial distance.\n\t@param mesh: Geometry to build component selection from.\n\t@type mesh: str\n\t@param center: Radial center to build selection from.\n\t@type center: str or list\n\t@param radius: Radial distance to build selection from.\n\t@type radius: float\n\t'
if (not mc.objExists(mesh)):
raise Exception((('Mesh object "' + mesh) + '" does not exist!!'))
pt = glTools.utils.base.getMPoint(center)
ptList = glTools.utils.base.getMPointArray(mesh)
sel = []
for i in range(ptList.length()):
dist = (pt - ptList[i]).length()
if (dist <= radius):
sel.append((((mesh + '.vtx[') + str(i)) + ']'))
return sel<|docstring|>Build component selection from a point and radial distance.
@param mesh: Geometry to build component selection from.
@type mesh: str
@param center: Radial center to build selection from.
@type center: str or list
@param radius: Radial distance to build selection from.
@type radius: float<|endoftext|>
|
928d3d3023f2c81631f9550bd2c2c63821bb5f9d7c47bc753ad58b8fd90bd95b
|
def volumeComponentSelection(mesh, volume):
'\n\tBuild component selection from volume.\n\t@param mesh: Geometry to build component selection from.\n\t@type mesh: str\n\t@param volume: Volume shape to build component selection from.\n\t@type volume: str\n\t'
if (not mc.objExists(mesh)):
raise Exception((('Mesh object "' + mesh) + '" does not exist!!'))
if (not mc.objExists(volume)):
raise Exception((('Volume object "' + volume) + '" does not exist!!'))
volumeShape = volume
if (mc.objectType(volumeShape) == 'transform'):
volumeShape = mc.listRelatives(volume, s=True, ni=True)
if (not volumeShape):
raise Exception((('Volume object "' + mesh) + '" does not exist!!'))
else:
volumeShape = volumeShape[0]
volumeType = mc.objectType(volumeShape)
nurbsToPolyConvert = None
if (volumeType == 'nurbsSurface'):
nurbsToPolyConvert = mc.nurbsToPoly(volumeShape, ch=0, f=1, pt=1, ft=0.01, mel=0.001, d=0.1)
nurbsToPolyShape = mc.listRelatives(nurbsToPolyConvert, s=True, ni=True)
volumeShape = nurbsToPolyShape[0]
volumeFn = glTools.utils.mesh.getMeshFn(volume)
volumeBBox = glTools.utils.base.getMBoundingBox(volume)
pntList = glTools.utils.base.getMPointArray(mesh)
sel = []
point = OpenMaya.MPoint()
normal = OpenMaya.MVector()
for i in range(pntList.length()):
if (not volumeBBox.contains(pntList[i])):
continue
volumeFn.getClosestPointAndNormal(pntList[i], point, normal)
dotVal = (normal * (point - pntList[i]).normal())
if (dotVal > 0.0):
sel.append((((mesh + '.vtx[') + str(i)) + ']'))
if nurbsToPolyConvert:
mc.delete(nurbsToPolyConvert)
return sel
|
Build component selection from volume.
@param mesh: Geometry to build component selection from.
@type mesh: str
@param volume: Volume shape to build component selection from.
@type volume: str
|
tools/volumeSelection.py
|
volumeComponentSelection
|
obrakeo/glTools
| 165 |
python
|
def volumeComponentSelection(mesh, volume):
'\n\tBuild component selection from volume.\n\t@param mesh: Geometry to build component selection from.\n\t@type mesh: str\n\t@param volume: Volume shape to build component selection from.\n\t@type volume: str\n\t'
if (not mc.objExists(mesh)):
raise Exception((('Mesh object "' + mesh) + '" does not exist!!'))
if (not mc.objExists(volume)):
raise Exception((('Volume object "' + volume) + '" does not exist!!'))
volumeShape = volume
if (mc.objectType(volumeShape) == 'transform'):
volumeShape = mc.listRelatives(volume, s=True, ni=True)
if (not volumeShape):
raise Exception((('Volume object "' + mesh) + '" does not exist!!'))
else:
volumeShape = volumeShape[0]
volumeType = mc.objectType(volumeShape)
nurbsToPolyConvert = None
if (volumeType == 'nurbsSurface'):
nurbsToPolyConvert = mc.nurbsToPoly(volumeShape, ch=0, f=1, pt=1, ft=0.01, mel=0.001, d=0.1)
nurbsToPolyShape = mc.listRelatives(nurbsToPolyConvert, s=True, ni=True)
volumeShape = nurbsToPolyShape[0]
volumeFn = glTools.utils.mesh.getMeshFn(volume)
volumeBBox = glTools.utils.base.getMBoundingBox(volume)
pntList = glTools.utils.base.getMPointArray(mesh)
sel = []
point = OpenMaya.MPoint()
normal = OpenMaya.MVector()
for i in range(pntList.length()):
if (not volumeBBox.contains(pntList[i])):
continue
volumeFn.getClosestPointAndNormal(pntList[i], point, normal)
dotVal = (normal * (point - pntList[i]).normal())
if (dotVal > 0.0):
sel.append((((mesh + '.vtx[') + str(i)) + ']'))
if nurbsToPolyConvert:
mc.delete(nurbsToPolyConvert)
return sel
|
def volumeComponentSelection(mesh, volume):
'\n\tBuild component selection from volume.\n\t@param mesh: Geometry to build component selection from.\n\t@type mesh: str\n\t@param volume: Volume shape to build component selection from.\n\t@type volume: str\n\t'
if (not mc.objExists(mesh)):
raise Exception((('Mesh object "' + mesh) + '" does not exist!!'))
if (not mc.objExists(volume)):
raise Exception((('Volume object "' + volume) + '" does not exist!!'))
volumeShape = volume
if (mc.objectType(volumeShape) == 'transform'):
volumeShape = mc.listRelatives(volume, s=True, ni=True)
if (not volumeShape):
raise Exception((('Volume object "' + mesh) + '" does not exist!!'))
else:
volumeShape = volumeShape[0]
volumeType = mc.objectType(volumeShape)
nurbsToPolyConvert = None
if (volumeType == 'nurbsSurface'):
nurbsToPolyConvert = mc.nurbsToPoly(volumeShape, ch=0, f=1, pt=1, ft=0.01, mel=0.001, d=0.1)
nurbsToPolyShape = mc.listRelatives(nurbsToPolyConvert, s=True, ni=True)
volumeShape = nurbsToPolyShape[0]
volumeFn = glTools.utils.mesh.getMeshFn(volume)
volumeBBox = glTools.utils.base.getMBoundingBox(volume)
pntList = glTools.utils.base.getMPointArray(mesh)
sel = []
point = OpenMaya.MPoint()
normal = OpenMaya.MVector()
for i in range(pntList.length()):
if (not volumeBBox.contains(pntList[i])):
continue
volumeFn.getClosestPointAndNormal(pntList[i], point, normal)
dotVal = (normal * (point - pntList[i]).normal())
if (dotVal > 0.0):
sel.append((((mesh + '.vtx[') + str(i)) + ']'))
if nurbsToPolyConvert:
mc.delete(nurbsToPolyConvert)
return sel<|docstring|>Build component selection from volume.
@param mesh: Geometry to build component selection from.
@type mesh: str
@param volume: Volume shape to build component selection from.
@type volume: str<|endoftext|>
|
d1ee2d3873c182d17455852ad710510cd370e65facc3948d86dde04c005f43df
|
def rand_par(par, cvar):
'This function adds gaussian noise to parameters (means) stored in a dictionary.\n Input\n par: dictionary of ODE parameters which constitute the means\n cvar: coeficient of variation of the distributon that each parameter will be sampled from (1 = 100% of the not noisy value).\n return\n dictionary with parameters sampled from gaussian around parameter means (inputs) or zero, if sampled value is negative\n '
temp = par.copy()
for key in temp.keys():
temp[key] = (par[key] * (1 + (cvar * randn())))
if (temp[key] < 0):
temp[key] = 0
return temp
|
This function adds gaussian noise to parameters (means) stored in a dictionary.
Input
par: dictionary of ODE parameters which constitute the means
cvar: coeficient of variation of the distributon that each parameter will be sampled from (1 = 100% of the not noisy value).
return
dictionary with parameters sampled from gaussian around parameter means (inputs) or zero, if sampled value is negative
|
sparseodes/rec_to_ode.py
|
rand_par
|
maimanuel/sparseodes
| 0 |
python
|
def rand_par(par, cvar):
'This function adds gaussian noise to parameters (means) stored in a dictionary.\n Input\n par: dictionary of ODE parameters which constitute the means\n cvar: coeficient of variation of the distributon that each parameter will be sampled from (1 = 100% of the not noisy value).\n return\n dictionary with parameters sampled from gaussian around parameter means (inputs) or zero, if sampled value is negative\n '
temp = par.copy()
for key in temp.keys():
temp[key] = (par[key] * (1 + (cvar * randn())))
if (temp[key] < 0):
temp[key] = 0
return temp
|
def rand_par(par, cvar):
'This function adds gaussian noise to parameters (means) stored in a dictionary.\n Input\n par: dictionary of ODE parameters which constitute the means\n cvar: coeficient of variation of the distributon that each parameter will be sampled from (1 = 100% of the not noisy value).\n return\n dictionary with parameters sampled from gaussian around parameter means (inputs) or zero, if sampled value is negative\n '
temp = par.copy()
for key in temp.keys():
temp[key] = (par[key] * (1 + (cvar * randn())))
if (temp[key] < 0):
temp[key] = 0
return temp<|docstring|>This function adds gaussian noise to parameters (means) stored in a dictionary.
Input
par: dictionary of ODE parameters which constitute the means
cvar: coeficient of variation of the distributon that each parameter will be sampled from (1 = 100% of the not noisy value).
return
dictionary with parameters sampled from gaussian around parameter means (inputs) or zero, if sampled value is negative<|endoftext|>
|
520dbdbc9c087a29b5cc06d3b0d25aa5b94fd42bdf16330ca28f66caac3204dd
|
def traj_solve(N, dt, model_der, mod_par, cvar):
'Solve N trajectories with time delta dt for model given in model_der with parameters mod_par\n and coefficient of variation cvar'
t0 = 0
tend = 100
Nt = round(((tend - t0) / float(dt)))
time = np.linspace(t0, tend, Nt)
traj = np.full((N, len(time), 2), (- 3.0))
for i in range(N):
rlvpar = rand_par(mod_par, cvar)
yinit = (rand(2) * np.array([3, 0]))
traj[(i, :, :)] = odeint(model_der, yinit, time, args=(rlvpar,))
return (traj, time)
|
Solve N trajectories with time delta dt for model given in model_der with parameters mod_par
and coefficient of variation cvar
|
sparseodes/rec_to_ode.py
|
traj_solve
|
maimanuel/sparseodes
| 0 |
python
|
def traj_solve(N, dt, model_der, mod_par, cvar):
'Solve N trajectories with time delta dt for model given in model_der with parameters mod_par\n and coefficient of variation cvar'
t0 = 0
tend = 100
Nt = round(((tend - t0) / float(dt)))
time = np.linspace(t0, tend, Nt)
traj = np.full((N, len(time), 2), (- 3.0))
for i in range(N):
rlvpar = rand_par(mod_par, cvar)
yinit = (rand(2) * np.array([3, 0]))
traj[(i, :, :)] = odeint(model_der, yinit, time, args=(rlvpar,))
return (traj, time)
|
def traj_solve(N, dt, model_der, mod_par, cvar):
'Solve N trajectories with time delta dt for model given in model_der with parameters mod_par\n and coefficient of variation cvar'
t0 = 0
tend = 100
Nt = round(((tend - t0) / float(dt)))
time = np.linspace(t0, tend, Nt)
traj = np.full((N, len(time), 2), (- 3.0))
for i in range(N):
rlvpar = rand_par(mod_par, cvar)
yinit = (rand(2) * np.array([3, 0]))
traj[(i, :, :)] = odeint(model_der, yinit, time, args=(rlvpar,))
return (traj, time)<|docstring|>Solve N trajectories with time delta dt for model given in model_der with parameters mod_par
and coefficient of variation cvar<|endoftext|>
|
04900a364064c6a08e072a6d69aba16c35cb7e84ab3aed0525a63038c0a973a2
|
def _init_gui(self):
'Initialize GUI.'
self.setWindowTitle('Region Growing')
xyz = self._model.get_cross_pos()
self.source_combo = QComboBox()
pointx_label = QLabel('Seed point x')
self.pointx_edit = QLineEdit()
self.pointx_edit.setText(str(xyz[0]))
pointy_label = QLabel('Seed point y')
self.pointy_edit = QLineEdit()
self.pointy_edit.setText(str(xyz[1]))
pointz_label = QLabel('Seed point z')
self.pointz_edit = QLineEdit()
self.pointz_edit.setText(str(xyz[2]))
number_label = QLabel('Number of voxels')
self.number_edit = QLineEdit()
self.number_edit.setText('100')
vol_list = self._model.getItemList()
self.source_combo.addItems(vol_list)
row = self._model.currentIndex().row()
self.source_combo.setCurrentIndex(row)
out_label = QLabel('Output volume name')
self.out_edit = QLineEdit()
grid_layout = QGridLayout()
grid_layout.addWidget(pointx_label, 0, 0)
grid_layout.addWidget(self.pointx_edit, 0, 1)
grid_layout.addWidget(pointy_label, 1, 0)
grid_layout.addWidget(self.pointy_edit, 1, 1)
grid_layout.addWidget(pointz_label, 2, 0)
grid_layout.addWidget(self.pointz_edit, 2, 1)
grid_layout.addWidget(number_label, 3, 0)
grid_layout.addWidget(self.number_edit, 3, 1)
grid_layout.addWidget(out_label, 4, 0)
grid_layout.addWidget(self.out_edit, 4, 1)
self.run_button = QPushButton('Run')
self.cancel_button = QPushButton('Cancel')
hbox_layout = QHBoxLayout()
hbox_layout.addWidget(self.run_button)
hbox_layout.addWidget(self.cancel_button)
vbox_layout = QVBoxLayout()
vbox_layout.addLayout(grid_layout)
vbox_layout.addLayout(hbox_layout)
self.setLayout(vbox_layout)
self._create_output()
|
Initialize GUI.
|
froi/widgets/growdialog.py
|
_init_gui
|
sunshineDrizzle/FreeROI
| 13 |
python
|
def _init_gui(self):
self.setWindowTitle('Region Growing')
xyz = self._model.get_cross_pos()
self.source_combo = QComboBox()
pointx_label = QLabel('Seed point x')
self.pointx_edit = QLineEdit()
self.pointx_edit.setText(str(xyz[0]))
pointy_label = QLabel('Seed point y')
self.pointy_edit = QLineEdit()
self.pointy_edit.setText(str(xyz[1]))
pointz_label = QLabel('Seed point z')
self.pointz_edit = QLineEdit()
self.pointz_edit.setText(str(xyz[2]))
number_label = QLabel('Number of voxels')
self.number_edit = QLineEdit()
self.number_edit.setText('100')
vol_list = self._model.getItemList()
self.source_combo.addItems(vol_list)
row = self._model.currentIndex().row()
self.source_combo.setCurrentIndex(row)
out_label = QLabel('Output volume name')
self.out_edit = QLineEdit()
grid_layout = QGridLayout()
grid_layout.addWidget(pointx_label, 0, 0)
grid_layout.addWidget(self.pointx_edit, 0, 1)
grid_layout.addWidget(pointy_label, 1, 0)
grid_layout.addWidget(self.pointy_edit, 1, 1)
grid_layout.addWidget(pointz_label, 2, 0)
grid_layout.addWidget(self.pointz_edit, 2, 1)
grid_layout.addWidget(number_label, 3, 0)
grid_layout.addWidget(self.number_edit, 3, 1)
grid_layout.addWidget(out_label, 4, 0)
grid_layout.addWidget(self.out_edit, 4, 1)
self.run_button = QPushButton('Run')
self.cancel_button = QPushButton('Cancel')
hbox_layout = QHBoxLayout()
hbox_layout.addWidget(self.run_button)
hbox_layout.addWidget(self.cancel_button)
vbox_layout = QVBoxLayout()
vbox_layout.addLayout(grid_layout)
vbox_layout.addLayout(hbox_layout)
self.setLayout(vbox_layout)
self._create_output()
|
def _init_gui(self):
self.setWindowTitle('Region Growing')
xyz = self._model.get_cross_pos()
self.source_combo = QComboBox()
pointx_label = QLabel('Seed point x')
self.pointx_edit = QLineEdit()
self.pointx_edit.setText(str(xyz[0]))
pointy_label = QLabel('Seed point y')
self.pointy_edit = QLineEdit()
self.pointy_edit.setText(str(xyz[1]))
pointz_label = QLabel('Seed point z')
self.pointz_edit = QLineEdit()
self.pointz_edit.setText(str(xyz[2]))
number_label = QLabel('Number of voxels')
self.number_edit = QLineEdit()
self.number_edit.setText('100')
vol_list = self._model.getItemList()
self.source_combo.addItems(vol_list)
row = self._model.currentIndex().row()
self.source_combo.setCurrentIndex(row)
out_label = QLabel('Output volume name')
self.out_edit = QLineEdit()
grid_layout = QGridLayout()
grid_layout.addWidget(pointx_label, 0, 0)
grid_layout.addWidget(self.pointx_edit, 0, 1)
grid_layout.addWidget(pointy_label, 1, 0)
grid_layout.addWidget(self.pointy_edit, 1, 1)
grid_layout.addWidget(pointz_label, 2, 0)
grid_layout.addWidget(self.pointz_edit, 2, 1)
grid_layout.addWidget(number_label, 3, 0)
grid_layout.addWidget(self.number_edit, 3, 1)
grid_layout.addWidget(out_label, 4, 0)
grid_layout.addWidget(self.out_edit, 4, 1)
self.run_button = QPushButton('Run')
self.cancel_button = QPushButton('Cancel')
hbox_layout = QHBoxLayout()
hbox_layout.addWidget(self.run_button)
hbox_layout.addWidget(self.cancel_button)
vbox_layout = QVBoxLayout()
vbox_layout.addLayout(grid_layout)
vbox_layout.addLayout(hbox_layout)
self.setLayout(vbox_layout)
self._create_output()<|docstring|>Initialize GUI.<|endoftext|>
|
0a51f2559b2d47d999523f1b03c6c73acb1a4800be3ab47bdf106f6f98b2386e
|
def _show_result(self, rg_result):
"\n Add RG's result as tree items\n "
data = np.zeros(self.vol_shape, np.uint8)
label = 1
for r in rg_result:
labeled_vertices = r.get_vertices()
if np.any([data[_] for _ in labeled_vertices]):
QMessageBox.warning(self, 'Warning', "Group{0}'s result has overlap with other groups".format((label - 1)), QMessageBox.Yes)
for v in labeled_vertices:
data[v] = label
label += 1
name = ('rg_' + self.model.data(self.rg_qmodel_idx, Qt.DisplayRole))
if self.model.data(self.rg_qmodel_idx, (Qt.UserRole + 8)):
map_idx = self.model.data(self.rg_qmodel_idx, (Qt.UserRole + 9))
name += ('_' + str(map_idx))
header = copy.deepcopy(self.model._data[0].get_header())
header.set_data_shape(self.vol_shape)
self.model.addItem(data, colormap='blue', name=name, header=header)
|
Add RG's result as tree items
|
froi/widgets/growdialog.py
|
_show_result
|
sunshineDrizzle/FreeROI
| 13 |
python
|
def _show_result(self, rg_result):
"\n \n "
data = np.zeros(self.vol_shape, np.uint8)
label = 1
for r in rg_result:
labeled_vertices = r.get_vertices()
if np.any([data[_] for _ in labeled_vertices]):
QMessageBox.warning(self, 'Warning', "Group{0}'s result has overlap with other groups".format((label - 1)), QMessageBox.Yes)
for v in labeled_vertices:
data[v] = label
label += 1
name = ('rg_' + self.model.data(self.rg_qmodel_idx, Qt.DisplayRole))
if self.model.data(self.rg_qmodel_idx, (Qt.UserRole + 8)):
map_idx = self.model.data(self.rg_qmodel_idx, (Qt.UserRole + 9))
name += ('_' + str(map_idx))
header = copy.deepcopy(self.model._data[0].get_header())
header.set_data_shape(self.vol_shape)
self.model.addItem(data, colormap='blue', name=name, header=header)
|
def _show_result(self, rg_result):
"\n \n "
data = np.zeros(self.vol_shape, np.uint8)
label = 1
for r in rg_result:
labeled_vertices = r.get_vertices()
if np.any([data[_] for _ in labeled_vertices]):
QMessageBox.warning(self, 'Warning', "Group{0}'s result has overlap with other groups".format((label - 1)), QMessageBox.Yes)
for v in labeled_vertices:
data[v] = label
label += 1
name = ('rg_' + self.model.data(self.rg_qmodel_idx, Qt.DisplayRole))
if self.model.data(self.rg_qmodel_idx, (Qt.UserRole + 8)):
map_idx = self.model.data(self.rg_qmodel_idx, (Qt.UserRole + 9))
name += ('_' + str(map_idx))
header = copy.deepcopy(self.model._data[0].get_header())
header.set_data_shape(self.vol_shape)
self.model.addItem(data, colormap='blue', name=name, header=header)<|docstring|>Add RG's result as tree items<|endoftext|>
|
01138d27161a11898b1e4080c663e88175caeb26f1dc126a566917e6b149a479
|
def __init__(self, input_dim: int, output_dim: Optional[int]=None, dropout: float=0.2, activation: Hint[nn.Module]=nn.ReLU, composition: Hint[Composition]=None, qualifier_aggregation: Hint[QualifierAggregation]=None, qualifier_aggregation_kwargs: Optional[Mapping[(str, Any)]]=None, qualifier_composition: Hint[Composition]=None, use_bias: bool=True, message_weighting: Hint[MessageWeighting]=None, message_weighting_kwargs: Optional[Mapping[(str, Any)]]=None, edge_dropout: float=0.0):
'\n Initialize the layer.\n\n :param input_dim:\n The input dimension (entity and relation representations).\n :param output_dim:\n The output dimension. Defaults to the input dimension.\n :param dropout:\n The dropout to apply to the updated entity representations from forward / backward edges (but not for\n self-loops).\n :param activation:\n The activation function to use.\n :param composition:\n The composition function to use for merging entity and relation representations to messages.\n :param qualifier_aggregation:\n The aggregation method to use for aggregation of multiple qualifier pair representations for a single edge.\n :param qualifier_aggregation_kwargs:\n Additional keyword-based arguments for the aggregation method.\n :param qualifier_composition:\n The composition function to use to combine entity and relation representations from a qualifier pair.\n :param use_bias:\n Whether to add a trainable bias.\n :param edge_dropout:\n An additional dropout on the edges (applied by randomly setting edge weights to zero).\n '
super().__init__()
output_dim = (output_dim or input_dim)
self.composition = composition_resolver.make(composition)
self.qualifier_composition = composition_resolver.make(qualifier_composition)
self.qualifier_aggregation = qualifier_aggregation_resolver.make(qualifier_aggregation, pos_kwargs=qualifier_aggregation_kwargs, input_dim=input_dim)
message_weighting_kwargs = dict((message_weighting_kwargs or {}))
if (message_weighting == message_weighting_resolver.normalize_cls(AttentionMessageWeighting)):
message_weighting_kwargs.setdefault('output_dim', output_dim)
self.message_weighting = message_weighting_resolver.make(message_weighting, pos_kwargs=message_weighting_kwargs)
self.activation = activation_resolver.make(activation)
self.dropout = nn.Dropout(dropout)
self.batch_norm = nn.BatchNorm1d(output_dim)
self.edge_dropout = nn.Dropout(edge_dropout)
self.w_loop = get_parameter(input_dim, output_dim)
self.w_in = get_parameter(input_dim, output_dim)
self.w_out = get_parameter(input_dim, output_dim)
self.w_rel = get_parameter(input_dim, output_dim)
self.loop_rel = get_parameter(1, input_dim)
self.bias = (get_parameter(output_dim, initializer_=nn.init.zeros_) if use_bias else None)
|
Initialize the layer.
:param input_dim:
The input dimension (entity and relation representations).
:param output_dim:
The output dimension. Defaults to the input dimension.
:param dropout:
The dropout to apply to the updated entity representations from forward / backward edges (but not for
self-loops).
:param activation:
The activation function to use.
:param composition:
The composition function to use for merging entity and relation representations to messages.
:param qualifier_aggregation:
The aggregation method to use for aggregation of multiple qualifier pair representations for a single edge.
:param qualifier_aggregation_kwargs:
Additional keyword-based arguments for the aggregation method.
:param qualifier_composition:
The composition function to use to combine entity and relation representations from a qualifier pair.
:param use_bias:
Whether to add a trainable bias.
:param edge_dropout:
An additional dropout on the edges (applied by randomly setting edge weights to zero).
|
src/mphrqe/layer/gnn.py
|
__init__
|
DimitrisAlivas/StarQE
| 11 |
python
|
def __init__(self, input_dim: int, output_dim: Optional[int]=None, dropout: float=0.2, activation: Hint[nn.Module]=nn.ReLU, composition: Hint[Composition]=None, qualifier_aggregation: Hint[QualifierAggregation]=None, qualifier_aggregation_kwargs: Optional[Mapping[(str, Any)]]=None, qualifier_composition: Hint[Composition]=None, use_bias: bool=True, message_weighting: Hint[MessageWeighting]=None, message_weighting_kwargs: Optional[Mapping[(str, Any)]]=None, edge_dropout: float=0.0):
'\n Initialize the layer.\n\n :param input_dim:\n The input dimension (entity and relation representations).\n :param output_dim:\n The output dimension. Defaults to the input dimension.\n :param dropout:\n The dropout to apply to the updated entity representations from forward / backward edges (but not for\n self-loops).\n :param activation:\n The activation function to use.\n :param composition:\n The composition function to use for merging entity and relation representations to messages.\n :param qualifier_aggregation:\n The aggregation method to use for aggregation of multiple qualifier pair representations for a single edge.\n :param qualifier_aggregation_kwargs:\n Additional keyword-based arguments for the aggregation method.\n :param qualifier_composition:\n The composition function to use to combine entity and relation representations from a qualifier pair.\n :param use_bias:\n Whether to add a trainable bias.\n :param edge_dropout:\n An additional dropout on the edges (applied by randomly setting edge weights to zero).\n '
super().__init__()
output_dim = (output_dim or input_dim)
self.composition = composition_resolver.make(composition)
self.qualifier_composition = composition_resolver.make(qualifier_composition)
self.qualifier_aggregation = qualifier_aggregation_resolver.make(qualifier_aggregation, pos_kwargs=qualifier_aggregation_kwargs, input_dim=input_dim)
message_weighting_kwargs = dict((message_weighting_kwargs or {}))
if (message_weighting == message_weighting_resolver.normalize_cls(AttentionMessageWeighting)):
message_weighting_kwargs.setdefault('output_dim', output_dim)
self.message_weighting = message_weighting_resolver.make(message_weighting, pos_kwargs=message_weighting_kwargs)
self.activation = activation_resolver.make(activation)
self.dropout = nn.Dropout(dropout)
self.batch_norm = nn.BatchNorm1d(output_dim)
self.edge_dropout = nn.Dropout(edge_dropout)
self.w_loop = get_parameter(input_dim, output_dim)
self.w_in = get_parameter(input_dim, output_dim)
self.w_out = get_parameter(input_dim, output_dim)
self.w_rel = get_parameter(input_dim, output_dim)
self.loop_rel = get_parameter(1, input_dim)
self.bias = (get_parameter(output_dim, initializer_=nn.init.zeros_) if use_bias else None)
|
def __init__(self, input_dim: int, output_dim: Optional[int]=None, dropout: float=0.2, activation: Hint[nn.Module]=nn.ReLU, composition: Hint[Composition]=None, qualifier_aggregation: Hint[QualifierAggregation]=None, qualifier_aggregation_kwargs: Optional[Mapping[(str, Any)]]=None, qualifier_composition: Hint[Composition]=None, use_bias: bool=True, message_weighting: Hint[MessageWeighting]=None, message_weighting_kwargs: Optional[Mapping[(str, Any)]]=None, edge_dropout: float=0.0):
'\n Initialize the layer.\n\n :param input_dim:\n The input dimension (entity and relation representations).\n :param output_dim:\n The output dimension. Defaults to the input dimension.\n :param dropout:\n The dropout to apply to the updated entity representations from forward / backward edges (but not for\n self-loops).\n :param activation:\n The activation function to use.\n :param composition:\n The composition function to use for merging entity and relation representations to messages.\n :param qualifier_aggregation:\n The aggregation method to use for aggregation of multiple qualifier pair representations for a single edge.\n :param qualifier_aggregation_kwargs:\n Additional keyword-based arguments for the aggregation method.\n :param qualifier_composition:\n The composition function to use to combine entity and relation representations from a qualifier pair.\n :param use_bias:\n Whether to add a trainable bias.\n :param edge_dropout:\n An additional dropout on the edges (applied by randomly setting edge weights to zero).\n '
super().__init__()
output_dim = (output_dim or input_dim)
self.composition = composition_resolver.make(composition)
self.qualifier_composition = composition_resolver.make(qualifier_composition)
self.qualifier_aggregation = qualifier_aggregation_resolver.make(qualifier_aggregation, pos_kwargs=qualifier_aggregation_kwargs, input_dim=input_dim)
message_weighting_kwargs = dict((message_weighting_kwargs or {}))
if (message_weighting == message_weighting_resolver.normalize_cls(AttentionMessageWeighting)):
message_weighting_kwargs.setdefault('output_dim', output_dim)
self.message_weighting = message_weighting_resolver.make(message_weighting, pos_kwargs=message_weighting_kwargs)
self.activation = activation_resolver.make(activation)
self.dropout = nn.Dropout(dropout)
self.batch_norm = nn.BatchNorm1d(output_dim)
self.edge_dropout = nn.Dropout(edge_dropout)
self.w_loop = get_parameter(input_dim, output_dim)
self.w_in = get_parameter(input_dim, output_dim)
self.w_out = get_parameter(input_dim, output_dim)
self.w_rel = get_parameter(input_dim, output_dim)
self.loop_rel = get_parameter(1, input_dim)
self.bias = (get_parameter(output_dim, initializer_=nn.init.zeros_) if use_bias else None)<|docstring|>Initialize the layer.
:param input_dim:
The input dimension (entity and relation representations).
:param output_dim:
The output dimension. Defaults to the input dimension.
:param dropout:
The dropout to apply to the updated entity representations from forward / backward edges (but not for
self-loops).
:param activation:
The activation function to use.
:param composition:
The composition function to use for merging entity and relation representations to messages.
:param qualifier_aggregation:
The aggregation method to use for aggregation of multiple qualifier pair representations for a single edge.
:param qualifier_aggregation_kwargs:
Additional keyword-based arguments for the aggregation method.
:param qualifier_composition:
The composition function to use to combine entity and relation representations from a qualifier pair.
:param use_bias:
Whether to add a trainable bias.
:param edge_dropout:
An additional dropout on the edges (applied by randomly setting edge weights to zero).<|endoftext|>
|
ed3ac2f7036270d5f5e6c1ef1077feeb1fff164d4cf99e198979d6628917a549
|
def propagate(self, x_e: torch.FloatTensor, x_r: FloatTensor, edge_index: LongTensor, edge_type: LongTensor, qualifier_index: torch.LongTensor, weight: nn.Parameter) -> torch.FloatTensor:
'\n The real message passing.\n\n :param x_e: shape: (num_entities, input_dim)\n The entity representations.\n :param x_r: shape: (2 * num_relations, dim)\n The relation representations. This includes the relation representation for inverse relations, but does\n not include the self-loop relation (which is learned independently for each layer).\n :param edge_index: shape: (2, num_edges)\n The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created\n locally.\n :param edge_type: shape: (num_edges,)\n The edge type (=relation ID) for each edge.\n :param qualifier_index: shape: (3, num_qualifier_pairs)\n The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).\n :param weight: shape: (input_dim, output_dim)\n The transformation weight.\n '
(i_qr, i_qe, i_e) = qualifier_index
x_qr = x_r[i_qr]
x_qe = x_e[i_qe]
x_q = self.qualifier_composition(x_qe, x_qr)
x_r = self.qualifier_aggregation(x_q, x_r[edge_type], edge_ids=i_e)
(source, target) = edge_index
m = (self.composition(x_e[source], x_r) @ weight)
(m, message_weight) = self.message_weighting(edge_index=edge_index, message=m, x_e=x_e)
message_weight = self.edge_dropout(message_weight)
m = (m * message_weight.unsqueeze(dim=(- 1)))
m = m.view(m.shape[0], (- 1))
return torch_scatter.scatter_add(src=m, index=target, dim=0, dim_size=x_e.shape[0])
|
The real message passing.
:param x_e: shape: (num_entities, input_dim)
The entity representations.
:param x_r: shape: (2 * num_relations, dim)
The relation representations. This includes the relation representation for inverse relations, but does
not include the self-loop relation (which is learned independently for each layer).
:param edge_index: shape: (2, num_edges)
The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created
locally.
:param edge_type: shape: (num_edges,)
The edge type (=relation ID) for each edge.
:param qualifier_index: shape: (3, num_qualifier_pairs)
The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).
:param weight: shape: (input_dim, output_dim)
The transformation weight.
|
src/mphrqe/layer/gnn.py
|
propagate
|
DimitrisAlivas/StarQE
| 11 |
python
|
def propagate(self, x_e: torch.FloatTensor, x_r: FloatTensor, edge_index: LongTensor, edge_type: LongTensor, qualifier_index: torch.LongTensor, weight: nn.Parameter) -> torch.FloatTensor:
'\n The real message passing.\n\n :param x_e: shape: (num_entities, input_dim)\n The entity representations.\n :param x_r: shape: (2 * num_relations, dim)\n The relation representations. This includes the relation representation for inverse relations, but does\n not include the self-loop relation (which is learned independently for each layer).\n :param edge_index: shape: (2, num_edges)\n The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created\n locally.\n :param edge_type: shape: (num_edges,)\n The edge type (=relation ID) for each edge.\n :param qualifier_index: shape: (3, num_qualifier_pairs)\n The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).\n :param weight: shape: (input_dim, output_dim)\n The transformation weight.\n '
(i_qr, i_qe, i_e) = qualifier_index
x_qr = x_r[i_qr]
x_qe = x_e[i_qe]
x_q = self.qualifier_composition(x_qe, x_qr)
x_r = self.qualifier_aggregation(x_q, x_r[edge_type], edge_ids=i_e)
(source, target) = edge_index
m = (self.composition(x_e[source], x_r) @ weight)
(m, message_weight) = self.message_weighting(edge_index=edge_index, message=m, x_e=x_e)
message_weight = self.edge_dropout(message_weight)
m = (m * message_weight.unsqueeze(dim=(- 1)))
m = m.view(m.shape[0], (- 1))
return torch_scatter.scatter_add(src=m, index=target, dim=0, dim_size=x_e.shape[0])
|
def propagate(self, x_e: torch.FloatTensor, x_r: FloatTensor, edge_index: LongTensor, edge_type: LongTensor, qualifier_index: torch.LongTensor, weight: nn.Parameter) -> torch.FloatTensor:
'\n The real message passing.\n\n :param x_e: shape: (num_entities, input_dim)\n The entity representations.\n :param x_r: shape: (2 * num_relations, dim)\n The relation representations. This includes the relation representation for inverse relations, but does\n not include the self-loop relation (which is learned independently for each layer).\n :param edge_index: shape: (2, num_edges)\n The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created\n locally.\n :param edge_type: shape: (num_edges,)\n The edge type (=relation ID) for each edge.\n :param qualifier_index: shape: (3, num_qualifier_pairs)\n The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).\n :param weight: shape: (input_dim, output_dim)\n The transformation weight.\n '
(i_qr, i_qe, i_e) = qualifier_index
x_qr = x_r[i_qr]
x_qe = x_e[i_qe]
x_q = self.qualifier_composition(x_qe, x_qr)
x_r = self.qualifier_aggregation(x_q, x_r[edge_type], edge_ids=i_e)
(source, target) = edge_index
m = (self.composition(x_e[source], x_r) @ weight)
(m, message_weight) = self.message_weighting(edge_index=edge_index, message=m, x_e=x_e)
message_weight = self.edge_dropout(message_weight)
m = (m * message_weight.unsqueeze(dim=(- 1)))
m = m.view(m.shape[0], (- 1))
return torch_scatter.scatter_add(src=m, index=target, dim=0, dim_size=x_e.shape[0])<|docstring|>The real message passing.
:param x_e: shape: (num_entities, input_dim)
The entity representations.
:param x_r: shape: (2 * num_relations, dim)
The relation representations. This includes the relation representation for inverse relations, but does
not include the self-loop relation (which is learned independently for each layer).
:param edge_index: shape: (2, num_edges)
The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created
locally.
:param edge_type: shape: (num_edges,)
The edge type (=relation ID) for each edge.
:param qualifier_index: shape: (3, num_qualifier_pairs)
The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).
:param weight: shape: (input_dim, output_dim)
The transformation weight.<|endoftext|>
|
dbdcbf3dc3205720e90bad690f200ed65dce99dd4c3f0948dd9884c0ad0f0353
|
def forward(self, x_e: torch.FloatTensor, x_r: FloatTensor, edge_index: torch.LongTensor, edge_type: torch.LongTensor, qualifier_index: torch.LongTensor, entity_mask: Optional[torch.LongTensor]) -> Tuple[(FloatTensor, FloatTensor)]:
'\n Forward pass through the convolution layer.\n\n :param x_e: shape: (num_entities, input_dim)\n The entity representations.\n :param x_r: shape: (2 * num_relations, dim)\n The relation representations. This includes the relation representation for inverse relations, but does\n not include the self-loop relation (which is learned independently for each layer).\n :param edge_index: shape: (2, num_edges)\n The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created\n locally.\n :param edge_type: shape: (num_edges,)\n The edge type (=relation ID) for each edge.\n :param qualifier_index: shape: (3, num_qualifier_pairs)\n The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).\n :param entity_mask: shape (num_entities, )\n If provided, this entities x_e[entity_mask] will not be updated be updated by this message passing layer.\n\n :return:\n The updated entity and relation representations.\n '
assert (edge_type < (x_r.shape[0] // 2)).all()
out = (((1 / 3) * self.composition(x_e, self.loop_rel)) @ self.w_loop)
for (weight, edge_index_, edge_type_) in ((self.w_in, edge_index, edge_type), (self.w_out, edge_index.flip(0), (edge_type + (x_r.shape[0] // 2)))):
out = (out + ((1 / 3) * self.dropout(self.propagate(x_e=x_e, x_r=x_r, edge_index=edge_index_, edge_type=edge_type_, qualifier_index=qualifier_index, weight=weight))))
if (self.bias is not None):
out = (out + self.bias)
out = self.batch_norm(out)
out = self.activation(out)
x_r = (x_r @ self.w_rel)
if (entity_mask is not None):
out[entity_mask] = x_e[entity_mask]
return (out, x_r)
|
Forward pass through the convolution layer.
:param x_e: shape: (num_entities, input_dim)
The entity representations.
:param x_r: shape: (2 * num_relations, dim)
The relation representations. This includes the relation representation for inverse relations, but does
not include the self-loop relation (which is learned independently for each layer).
:param edge_index: shape: (2, num_edges)
The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created
locally.
:param edge_type: shape: (num_edges,)
The edge type (=relation ID) for each edge.
:param qualifier_index: shape: (3, num_qualifier_pairs)
The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).
:param entity_mask: shape (num_entities, )
If provided, this entities x_e[entity_mask] will not be updated be updated by this message passing layer.
:return:
The updated entity and relation representations.
|
src/mphrqe/layer/gnn.py
|
forward
|
DimitrisAlivas/StarQE
| 11 |
python
|
def forward(self, x_e: torch.FloatTensor, x_r: FloatTensor, edge_index: torch.LongTensor, edge_type: torch.LongTensor, qualifier_index: torch.LongTensor, entity_mask: Optional[torch.LongTensor]) -> Tuple[(FloatTensor, FloatTensor)]:
'\n Forward pass through the convolution layer.\n\n :param x_e: shape: (num_entities, input_dim)\n The entity representations.\n :param x_r: shape: (2 * num_relations, dim)\n The relation representations. This includes the relation representation for inverse relations, but does\n not include the self-loop relation (which is learned independently for each layer).\n :param edge_index: shape: (2, num_edges)\n The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created\n locally.\n :param edge_type: shape: (num_edges,)\n The edge type (=relation ID) for each edge.\n :param qualifier_index: shape: (3, num_qualifier_pairs)\n The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).\n :param entity_mask: shape (num_entities, )\n If provided, this entities x_e[entity_mask] will not be updated be updated by this message passing layer.\n\n :return:\n The updated entity and relation representations.\n '
assert (edge_type < (x_r.shape[0] // 2)).all()
out = (((1 / 3) * self.composition(x_e, self.loop_rel)) @ self.w_loop)
for (weight, edge_index_, edge_type_) in ((self.w_in, edge_index, edge_type), (self.w_out, edge_index.flip(0), (edge_type + (x_r.shape[0] // 2)))):
out = (out + ((1 / 3) * self.dropout(self.propagate(x_e=x_e, x_r=x_r, edge_index=edge_index_, edge_type=edge_type_, qualifier_index=qualifier_index, weight=weight))))
if (self.bias is not None):
out = (out + self.bias)
out = self.batch_norm(out)
out = self.activation(out)
x_r = (x_r @ self.w_rel)
if (entity_mask is not None):
out[entity_mask] = x_e[entity_mask]
return (out, x_r)
|
def forward(self, x_e: torch.FloatTensor, x_r: FloatTensor, edge_index: torch.LongTensor, edge_type: torch.LongTensor, qualifier_index: torch.LongTensor, entity_mask: Optional[torch.LongTensor]) -> Tuple[(FloatTensor, FloatTensor)]:
'\n Forward pass through the convolution layer.\n\n :param x_e: shape: (num_entities, input_dim)\n The entity representations.\n :param x_r: shape: (2 * num_relations, dim)\n The relation representations. This includes the relation representation for inverse relations, but does\n not include the self-loop relation (which is learned independently for each layer).\n :param edge_index: shape: (2, num_edges)\n The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created\n locally.\n :param edge_type: shape: (num_edges,)\n The edge type (=relation ID) for each edge.\n :param qualifier_index: shape: (3, num_qualifier_pairs)\n The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).\n :param entity_mask: shape (num_entities, )\n If provided, this entities x_e[entity_mask] will not be updated be updated by this message passing layer.\n\n :return:\n The updated entity and relation representations.\n '
assert (edge_type < (x_r.shape[0] // 2)).all()
out = (((1 / 3) * self.composition(x_e, self.loop_rel)) @ self.w_loop)
for (weight, edge_index_, edge_type_) in ((self.w_in, edge_index, edge_type), (self.w_out, edge_index.flip(0), (edge_type + (x_r.shape[0] // 2)))):
out = (out + ((1 / 3) * self.dropout(self.propagate(x_e=x_e, x_r=x_r, edge_index=edge_index_, edge_type=edge_type_, qualifier_index=qualifier_index, weight=weight))))
if (self.bias is not None):
out = (out + self.bias)
out = self.batch_norm(out)
out = self.activation(out)
x_r = (x_r @ self.w_rel)
if (entity_mask is not None):
out[entity_mask] = x_e[entity_mask]
return (out, x_r)<|docstring|>Forward pass through the convolution layer.
:param x_e: shape: (num_entities, input_dim)
The entity representations.
:param x_r: shape: (2 * num_relations, dim)
The relation representations. This includes the relation representation for inverse relations, but does
not include the self-loop relation (which is learned independently for each layer).
:param edge_index: shape: (2, num_edges)
The edge index, pairs of source/target nodes. This does not include inverse edges, since they are created
locally.
:param edge_type: shape: (num_edges,)
The edge type (=relation ID) for each edge.
:param qualifier_index: shape: (3, num_qualifier_pairs)
The qualifier index, triples of (qualifier-relation-ID, qualifier-entity-ID, edge-ID).
:param entity_mask: shape (num_entities, )
If provided, this entities x_e[entity_mask] will not be updated be updated by this message passing layer.
:return:
The updated entity and relation representations.<|endoftext|>
|
91d056701c5449876995f9558423b36030529fe1176a3ecc946ee862e6ea720e
|
def dictfetchall(self, cursor):
'Returns all rows from a cursor as a dict'
desc = cursor.description
return [dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall()]
|
Returns all rows from a cursor as a dict
|
core/reports/TitanProgressReport.py
|
dictfetchall
|
jilbertozamorasaa/panda-bigmon-core
| 3 |
python
|
def dictfetchall(self, cursor):
desc = cursor.description
return [dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall()]
|
def dictfetchall(self, cursor):
desc = cursor.description
return [dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall()]<|docstring|>Returns all rows from a cursor as a dict<|endoftext|>
|
a11666599677d6d5e9d0080d8bb4ac9c725303f214f2f3363820bab15804e933
|
@unpack_args
def formulate(index, n2, gama, alphadB, z, P_p, P_s, TFWHM_p, TFWHM_s, spl_losses, betas, lamda_c, WDMS_pars, lamp, lams, num_cores, maxerr, ss, plots, N, nplot, master_index, filesaves, Df_band, fr, fopa):
'------------------propagation paramaters------------------'
dzstep = (z / nplot)
dz_less = 100.0
int_fwm = sim_parameters(n2, 1, alphadB)
int_fwm.general_options(maxerr, ss)
int_fwm.propagation_parameters(N, z, nplot, dz_less)
lamda = (lamp * 1e-09)
'-----------------------------f-----------------------------'
'---------------------Aeff-Qmatrixes-----------------------'
M = Q_matrixes(int_fwm.nm, int_fwm.n2, lamda_c, gama)
'----------------------------------------------------------'
'---------------------Grid&window-----------------------'
(P_p_bef, P_s_bef) = pre_fibre_init_power(WDMS_pars[0][0], WDMS_pars[0][1], lamp, P_p, P_s)
(fv, where, f_centrals) = fv_creator(lamp, lams, lamda_c, int_fwm, betas, M, P_p_bef, P_s_bef, Df_band)
print((fv[0][1] - fv[0][0]))
(p_pos, s_pos, i_pos) = where
sim_wind = sim_window(fv, lamda, f_centrals, lamda_c, int_fwm)
'----------------------------------------------------------'
'---------------------Loss-in-fibres-----------------------'
slice_from_edge = ((sim_wind.fv[(- 1)] - sim_wind.fv[0]) / 100)
loss = Loss(int_fwm, sim_wind, amax=0)
int_fwm.alpha = loss.atten_func_full(fv)
int_fwm.gama = np.array([(((((((- 1j) * n2) * 2) * M) * pi) * (1000000000000.0 * f_c)) / c) for f_c in f_centrals])
int_fwm.gama[0:2] = 0
int_fwm.gama[5:] = 0
'----------------------------------------------------------'
'--------------------Dispersion----------------------------'
Dop = dispersion_operator(betas, lamda_c, int_fwm, sim_wind)
'----------------------------------------------------------'
'---------------------Raman Factors------------------------'
ram = Raman_factors(fr)
ram.set_raman_band(sim_wind)
'----------------------------------------------------------'
'--------------------Noise---------------------------------'
noise_obj = Noise(int_fwm, sim_wind)
'----------------------------------------------------------'
pulse_pos_dict_or = ('after propagation', 'pass WDM2', 'pass WDM1 on port2 (remove pump)', 'add more pump', 'out')
keys = [(('loading_data/green_dot_fopo/pngs/' + str(i)) + str('.png')) for i in range(7)]
D_pic = [plt.imread(i) for i in keys]
'----------------Construct the integrator----------------'
non_integrand = Integrand(int_fwm.gama, sim_wind.tsh, sim_wind.w_tiled, ss, ram, cython_tick=True, timer=False)
'--------------------------------------------------------'
'----------------------Formulate WDMS--------------------'
if (WDMS_pars == 'signal_locked'):
Omega = ((((2 * pi) * c) / (lamp * 1e-09)) - (((2 * pi) * c) / (lams * 1e-09)))
omegai = ((((2 * pi) * c) / (lamp * 1e-09)) + Omega)
lami = ((((1000000000.0 * 2) * pi) * c) / omegai)
WDMS_pars = ([lamp, lams], [lami, lams], [lami, lamp], [lami, lams])
WDM_vec = [WDM(i[0], i[1], sim_wind.fv, c, fopa) for i in WDMS_pars]
pm_fopa = Phase_modulation_FOPA(sim_wind.fv, where)
pm_WDM1 = Phase_modulation_infase_WDM(P_s, where, WDM_vec[0])
'--------------------------------------------------------'
'----------------------Formulate splicers--------------------'
splicers_vec = [Splicer(loss=i) for i in spl_losses]
'------------------------------------------------------------'
(f_p, f_s) = (sim_wind.fv[(where[0][0], where[0][1])], sim_wind.fv[(where[1][0], where[1][1])])
ex = Plotter_saver(plots, filesaves, sim_wind.fv, sim_wind.t)
ro = oscilate(sim_wind, int_fwm, noise_obj, TFWHM_p, TFWHM_s, index, master_index, P_p, P_s, f_p, f_s, p_pos, s_pos, splicers_vec, WDM_vec, Dop, non_integrand, D_pic, pulse_pos_dict_or, plots, ex, pm_fopa, pm_WDM1, fopa)
return None
|
------------------propagation paramaters------------------
|
src/oscillator.py
|
formulate
|
Computational-Nonlinear-Optics-ORC/Single-mode-FOPO
| 3 |
python
|
@unpack_args
def formulate(index, n2, gama, alphadB, z, P_p, P_s, TFWHM_p, TFWHM_s, spl_losses, betas, lamda_c, WDMS_pars, lamp, lams, num_cores, maxerr, ss, plots, N, nplot, master_index, filesaves, Df_band, fr, fopa):
dzstep = (z / nplot)
dz_less = 100.0
int_fwm = sim_parameters(n2, 1, alphadB)
int_fwm.general_options(maxerr, ss)
int_fwm.propagation_parameters(N, z, nplot, dz_less)
lamda = (lamp * 1e-09)
'-----------------------------f-----------------------------'
'---------------------Aeff-Qmatrixes-----------------------'
M = Q_matrixes(int_fwm.nm, int_fwm.n2, lamda_c, gama)
'----------------------------------------------------------'
'---------------------Grid&window-----------------------'
(P_p_bef, P_s_bef) = pre_fibre_init_power(WDMS_pars[0][0], WDMS_pars[0][1], lamp, P_p, P_s)
(fv, where, f_centrals) = fv_creator(lamp, lams, lamda_c, int_fwm, betas, M, P_p_bef, P_s_bef, Df_band)
print((fv[0][1] - fv[0][0]))
(p_pos, s_pos, i_pos) = where
sim_wind = sim_window(fv, lamda, f_centrals, lamda_c, int_fwm)
'----------------------------------------------------------'
'---------------------Loss-in-fibres-----------------------'
slice_from_edge = ((sim_wind.fv[(- 1)] - sim_wind.fv[0]) / 100)
loss = Loss(int_fwm, sim_wind, amax=0)
int_fwm.alpha = loss.atten_func_full(fv)
int_fwm.gama = np.array([(((((((- 1j) * n2) * 2) * M) * pi) * (1000000000000.0 * f_c)) / c) for f_c in f_centrals])
int_fwm.gama[0:2] = 0
int_fwm.gama[5:] = 0
'----------------------------------------------------------'
'--------------------Dispersion----------------------------'
Dop = dispersion_operator(betas, lamda_c, int_fwm, sim_wind)
'----------------------------------------------------------'
'---------------------Raman Factors------------------------'
ram = Raman_factors(fr)
ram.set_raman_band(sim_wind)
'----------------------------------------------------------'
'--------------------Noise---------------------------------'
noise_obj = Noise(int_fwm, sim_wind)
'----------------------------------------------------------'
pulse_pos_dict_or = ('after propagation', 'pass WDM2', 'pass WDM1 on port2 (remove pump)', 'add more pump', 'out')
keys = [(('loading_data/green_dot_fopo/pngs/' + str(i)) + str('.png')) for i in range(7)]
D_pic = [plt.imread(i) for i in keys]
'----------------Construct the integrator----------------'
non_integrand = Integrand(int_fwm.gama, sim_wind.tsh, sim_wind.w_tiled, ss, ram, cython_tick=True, timer=False)
'--------------------------------------------------------'
'----------------------Formulate WDMS--------------------'
if (WDMS_pars == 'signal_locked'):
Omega = ((((2 * pi) * c) / (lamp * 1e-09)) - (((2 * pi) * c) / (lams * 1e-09)))
omegai = ((((2 * pi) * c) / (lamp * 1e-09)) + Omega)
lami = ((((1000000000.0 * 2) * pi) * c) / omegai)
WDMS_pars = ([lamp, lams], [lami, lams], [lami, lamp], [lami, lams])
WDM_vec = [WDM(i[0], i[1], sim_wind.fv, c, fopa) for i in WDMS_pars]
pm_fopa = Phase_modulation_FOPA(sim_wind.fv, where)
pm_WDM1 = Phase_modulation_infase_WDM(P_s, where, WDM_vec[0])
'--------------------------------------------------------'
'----------------------Formulate splicers--------------------'
splicers_vec = [Splicer(loss=i) for i in spl_losses]
'------------------------------------------------------------'
(f_p, f_s) = (sim_wind.fv[(where[0][0], where[0][1])], sim_wind.fv[(where[1][0], where[1][1])])
ex = Plotter_saver(plots, filesaves, sim_wind.fv, sim_wind.t)
ro = oscilate(sim_wind, int_fwm, noise_obj, TFWHM_p, TFWHM_s, index, master_index, P_p, P_s, f_p, f_s, p_pos, s_pos, splicers_vec, WDM_vec, Dop, non_integrand, D_pic, pulse_pos_dict_or, plots, ex, pm_fopa, pm_WDM1, fopa)
return None
|
@unpack_args
def formulate(index, n2, gama, alphadB, z, P_p, P_s, TFWHM_p, TFWHM_s, spl_losses, betas, lamda_c, WDMS_pars, lamp, lams, num_cores, maxerr, ss, plots, N, nplot, master_index, filesaves, Df_band, fr, fopa):
dzstep = (z / nplot)
dz_less = 100.0
int_fwm = sim_parameters(n2, 1, alphadB)
int_fwm.general_options(maxerr, ss)
int_fwm.propagation_parameters(N, z, nplot, dz_less)
lamda = (lamp * 1e-09)
'-----------------------------f-----------------------------'
'---------------------Aeff-Qmatrixes-----------------------'
M = Q_matrixes(int_fwm.nm, int_fwm.n2, lamda_c, gama)
'----------------------------------------------------------'
'---------------------Grid&window-----------------------'
(P_p_bef, P_s_bef) = pre_fibre_init_power(WDMS_pars[0][0], WDMS_pars[0][1], lamp, P_p, P_s)
(fv, where, f_centrals) = fv_creator(lamp, lams, lamda_c, int_fwm, betas, M, P_p_bef, P_s_bef, Df_band)
print((fv[0][1] - fv[0][0]))
(p_pos, s_pos, i_pos) = where
sim_wind = sim_window(fv, lamda, f_centrals, lamda_c, int_fwm)
'----------------------------------------------------------'
'---------------------Loss-in-fibres-----------------------'
slice_from_edge = ((sim_wind.fv[(- 1)] - sim_wind.fv[0]) / 100)
loss = Loss(int_fwm, sim_wind, amax=0)
int_fwm.alpha = loss.atten_func_full(fv)
int_fwm.gama = np.array([(((((((- 1j) * n2) * 2) * M) * pi) * (1000000000000.0 * f_c)) / c) for f_c in f_centrals])
int_fwm.gama[0:2] = 0
int_fwm.gama[5:] = 0
'----------------------------------------------------------'
'--------------------Dispersion----------------------------'
Dop = dispersion_operator(betas, lamda_c, int_fwm, sim_wind)
'----------------------------------------------------------'
'---------------------Raman Factors------------------------'
ram = Raman_factors(fr)
ram.set_raman_band(sim_wind)
'----------------------------------------------------------'
'--------------------Noise---------------------------------'
noise_obj = Noise(int_fwm, sim_wind)
'----------------------------------------------------------'
pulse_pos_dict_or = ('after propagation', 'pass WDM2', 'pass WDM1 on port2 (remove pump)', 'add more pump', 'out')
keys = [(('loading_data/green_dot_fopo/pngs/' + str(i)) + str('.png')) for i in range(7)]
D_pic = [plt.imread(i) for i in keys]
'----------------Construct the integrator----------------'
non_integrand = Integrand(int_fwm.gama, sim_wind.tsh, sim_wind.w_tiled, ss, ram, cython_tick=True, timer=False)
'--------------------------------------------------------'
'----------------------Formulate WDMS--------------------'
if (WDMS_pars == 'signal_locked'):
Omega = ((((2 * pi) * c) / (lamp * 1e-09)) - (((2 * pi) * c) / (lams * 1e-09)))
omegai = ((((2 * pi) * c) / (lamp * 1e-09)) + Omega)
lami = ((((1000000000.0 * 2) * pi) * c) / omegai)
WDMS_pars = ([lamp, lams], [lami, lams], [lami, lamp], [lami, lams])
WDM_vec = [WDM(i[0], i[1], sim_wind.fv, c, fopa) for i in WDMS_pars]
pm_fopa = Phase_modulation_FOPA(sim_wind.fv, where)
pm_WDM1 = Phase_modulation_infase_WDM(P_s, where, WDM_vec[0])
'--------------------------------------------------------'
'----------------------Formulate splicers--------------------'
splicers_vec = [Splicer(loss=i) for i in spl_losses]
'------------------------------------------------------------'
(f_p, f_s) = (sim_wind.fv[(where[0][0], where[0][1])], sim_wind.fv[(where[1][0], where[1][1])])
ex = Plotter_saver(plots, filesaves, sim_wind.fv, sim_wind.t)
ro = oscilate(sim_wind, int_fwm, noise_obj, TFWHM_p, TFWHM_s, index, master_index, P_p, P_s, f_p, f_s, p_pos, s_pos, splicers_vec, WDM_vec, Dop, non_integrand, D_pic, pulse_pos_dict_or, plots, ex, pm_fopa, pm_WDM1, fopa)
return None<|docstring|>------------------propagation paramaters------------------<|endoftext|>
|
b2429ed3b93e616c9fbaa6689a33d1960b3183a634429fb1930ec9b4244c43a6
|
def main():
'-----------------------------Stable parameters----------------------------'
num_cores = arguments_determine(1)
maxerr = 1e-13
ss = 1
Df_band_vec = [5, 5, 10, 20]
fr = 0.18
plots = False
filesaves = True
complete = False
nplot = 1
if (arguments_determine((- 1)) == 0):
fopa = True
else:
fopa = False
if ('mpi' in sys.argv):
method = 'mpi'
elif ('joblib' in sys.argv):
method = 'joblib'
else:
method = 'single'
'--------------------------------------------------------------------------'
stable_dic = {'num_cores': num_cores, 'maxerr': maxerr, 'ss': ss, 'plots': plots, 'nplot': nplot, 'filesaves': filesaves, 'fr': fr, 'fopa': fopa}
'------------------------Can be variable parameters------------------------'
n2 = 2.5e-20
gama = 0.01
alphadB = 0
z = 18
wave_idx = 0
power_area_idx = 0
N = np.array([i for i in range(2, 13)])
P_p_vec = [[my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 4.4, 0.1), my_arange(4.5, 5, 0.05), my_arange(5.1, 8.1, 0.1), my_arange(8.2, 12, 0.1)]]
Df_band = Df_band_vec[power_area_idx]
P_p = P_p_vec[wave_idx][power_area_idx]
P_p = [6]
P_s = 0
TFWHM_p = 0
TFWHM_s = 0
spl_losses = [0, 0, 1.4]
betas = (np.array([0, 0, 0, 0.06756, (- 0.0001002), 3.671e-07]) * 0.001)
lamda_c = 1.05185e-06
WDMS_pars = ([1048.0, 1204.16], [927.7, 1204.16])
lamp_vec = [1046, 1047, 1048, 1049, 1050]
lamp = [lamp_vec[wave_idx]]
lams = ['lock' for i in range(len(lamp))]
lamp = lamp_vec[wave_idx]
lams = 'lock'
var_dic = {'n2': n2, 'gama': gama, 'alphadB': alphadB, 'z': z, 'P_p': P_p, 'P_s': P_s, 'TFWHM_p': TFWHM_p, 'TFWHM_s': TFWHM_s, 'spl_losses': spl_losses, 'betas': betas, 'lamda_c': lamda_c, 'WDMS_pars': WDMS_pars, 'lamp': lamp, 'lams': lams, 'N': N, 'Df_band': Df_band}
'--------------------------------------------------------------------------'
outside_var_key = 'P_p'
inside_var_key = 'N'
inside_var = var_dic[inside_var_key]
outside_var = var_dic[outside_var_key]
del var_dic[outside_var_key]
del var_dic[inside_var_key]
'----------------------------Simulation------------------------------------'
D_ins = [{'index': i, inside_var_key: insvar} for (i, insvar) in enumerate(inside_var)]
large_dic = {**stable_dic, **var_dic}
if (len(inside_var) < num_cores):
num_cores = len(inside_var)
profiler_bool = arguments_determine(0)
for (kk, variable) in enumerate(outside_var):
create_file_structure(kk)
_temps = create_destroy(inside_var, str(kk))
_temps.prepare_folder()
large_dic['lams'] = lams[kk]
large_dic['master_index'] = kk
large_dic[outside_var_key] = variable
if profiler_bool:
for i in range(len(D_ins)):
formulate(**{**D_ins[i], **large_dic})
elif (method == 'mpi'):
iterables = ({**D_ins[i], **large_dic} for i in range(len(D_ins)))
with MPIPoolExecutor() as executor:
A = executor.map(formulate, iterables)
else:
A = Parallel(n_jobs=num_cores)((delayed(formulate)(**{**D_ins[i], **large_dic}) for i in range(len(D_ins))))
_temps.cleanup_folder()
print('\x07')
return None
|
-----------------------------Stable parameters----------------------------
|
src/oscillator.py
|
main
|
Computational-Nonlinear-Optics-ORC/Single-mode-FOPO
| 3 |
python
|
def main():
num_cores = arguments_determine(1)
maxerr = 1e-13
ss = 1
Df_band_vec = [5, 5, 10, 20]
fr = 0.18
plots = False
filesaves = True
complete = False
nplot = 1
if (arguments_determine((- 1)) == 0):
fopa = True
else:
fopa = False
if ('mpi' in sys.argv):
method = 'mpi'
elif ('joblib' in sys.argv):
method = 'joblib'
else:
method = 'single'
'--------------------------------------------------------------------------'
stable_dic = {'num_cores': num_cores, 'maxerr': maxerr, 'ss': ss, 'plots': plots, 'nplot': nplot, 'filesaves': filesaves, 'fr': fr, 'fopa': fopa}
'------------------------Can be variable parameters------------------------'
n2 = 2.5e-20
gama = 0.01
alphadB = 0
z = 18
wave_idx = 0
power_area_idx = 0
N = np.array([i for i in range(2, 13)])
P_p_vec = [[my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 4.4, 0.1), my_arange(4.5, 5, 0.05), my_arange(5.1, 8.1, 0.1), my_arange(8.2, 12, 0.1)]]
Df_band = Df_band_vec[power_area_idx]
P_p = P_p_vec[wave_idx][power_area_idx]
P_p = [6]
P_s = 0
TFWHM_p = 0
TFWHM_s = 0
spl_losses = [0, 0, 1.4]
betas = (np.array([0, 0, 0, 0.06756, (- 0.0001002), 3.671e-07]) * 0.001)
lamda_c = 1.05185e-06
WDMS_pars = ([1048.0, 1204.16], [927.7, 1204.16])
lamp_vec = [1046, 1047, 1048, 1049, 1050]
lamp = [lamp_vec[wave_idx]]
lams = ['lock' for i in range(len(lamp))]
lamp = lamp_vec[wave_idx]
lams = 'lock'
var_dic = {'n2': n2, 'gama': gama, 'alphadB': alphadB, 'z': z, 'P_p': P_p, 'P_s': P_s, 'TFWHM_p': TFWHM_p, 'TFWHM_s': TFWHM_s, 'spl_losses': spl_losses, 'betas': betas, 'lamda_c': lamda_c, 'WDMS_pars': WDMS_pars, 'lamp': lamp, 'lams': lams, 'N': N, 'Df_band': Df_band}
'--------------------------------------------------------------------------'
outside_var_key = 'P_p'
inside_var_key = 'N'
inside_var = var_dic[inside_var_key]
outside_var = var_dic[outside_var_key]
del var_dic[outside_var_key]
del var_dic[inside_var_key]
'----------------------------Simulation------------------------------------'
D_ins = [{'index': i, inside_var_key: insvar} for (i, insvar) in enumerate(inside_var)]
large_dic = {**stable_dic, **var_dic}
if (len(inside_var) < num_cores):
num_cores = len(inside_var)
profiler_bool = arguments_determine(0)
for (kk, variable) in enumerate(outside_var):
create_file_structure(kk)
_temps = create_destroy(inside_var, str(kk))
_temps.prepare_folder()
large_dic['lams'] = lams[kk]
large_dic['master_index'] = kk
large_dic[outside_var_key] = variable
if profiler_bool:
for i in range(len(D_ins)):
formulate(**{**D_ins[i], **large_dic})
elif (method == 'mpi'):
iterables = ({**D_ins[i], **large_dic} for i in range(len(D_ins)))
with MPIPoolExecutor() as executor:
A = executor.map(formulate, iterables)
else:
A = Parallel(n_jobs=num_cores)((delayed(formulate)(**{**D_ins[i], **large_dic}) for i in range(len(D_ins))))
_temps.cleanup_folder()
print('\x07')
return None
|
def main():
num_cores = arguments_determine(1)
maxerr = 1e-13
ss = 1
Df_band_vec = [5, 5, 10, 20]
fr = 0.18
plots = False
filesaves = True
complete = False
nplot = 1
if (arguments_determine((- 1)) == 0):
fopa = True
else:
fopa = False
if ('mpi' in sys.argv):
method = 'mpi'
elif ('joblib' in sys.argv):
method = 'joblib'
else:
method = 'single'
'--------------------------------------------------------------------------'
stable_dic = {'num_cores': num_cores, 'maxerr': maxerr, 'ss': ss, 'plots': plots, 'nplot': nplot, 'filesaves': filesaves, 'fr': fr, 'fopa': fopa}
'------------------------Can be variable parameters------------------------'
n2 = 2.5e-20
gama = 0.01
alphadB = 0
z = 18
wave_idx = 0
power_area_idx = 0
N = np.array([i for i in range(2, 13)])
P_p_vec = [[my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 3.9, 0.1), my_arange(4, 4.5, 0.05), my_arange(4.6, 8.1, 0.1), my_arange(8.2, 12, 0.1)], [my_arange(3.5, 4.4, 0.1), my_arange(4.5, 5, 0.05), my_arange(5.1, 8.1, 0.1), my_arange(8.2, 12, 0.1)]]
Df_band = Df_band_vec[power_area_idx]
P_p = P_p_vec[wave_idx][power_area_idx]
P_p = [6]
P_s = 0
TFWHM_p = 0
TFWHM_s = 0
spl_losses = [0, 0, 1.4]
betas = (np.array([0, 0, 0, 0.06756, (- 0.0001002), 3.671e-07]) * 0.001)
lamda_c = 1.05185e-06
WDMS_pars = ([1048.0, 1204.16], [927.7, 1204.16])
lamp_vec = [1046, 1047, 1048, 1049, 1050]
lamp = [lamp_vec[wave_idx]]
lams = ['lock' for i in range(len(lamp))]
lamp = lamp_vec[wave_idx]
lams = 'lock'
var_dic = {'n2': n2, 'gama': gama, 'alphadB': alphadB, 'z': z, 'P_p': P_p, 'P_s': P_s, 'TFWHM_p': TFWHM_p, 'TFWHM_s': TFWHM_s, 'spl_losses': spl_losses, 'betas': betas, 'lamda_c': lamda_c, 'WDMS_pars': WDMS_pars, 'lamp': lamp, 'lams': lams, 'N': N, 'Df_band': Df_band}
'--------------------------------------------------------------------------'
outside_var_key = 'P_p'
inside_var_key = 'N'
inside_var = var_dic[inside_var_key]
outside_var = var_dic[outside_var_key]
del var_dic[outside_var_key]
del var_dic[inside_var_key]
'----------------------------Simulation------------------------------------'
D_ins = [{'index': i, inside_var_key: insvar} for (i, insvar) in enumerate(inside_var)]
large_dic = {**stable_dic, **var_dic}
if (len(inside_var) < num_cores):
num_cores = len(inside_var)
profiler_bool = arguments_determine(0)
for (kk, variable) in enumerate(outside_var):
create_file_structure(kk)
_temps = create_destroy(inside_var, str(kk))
_temps.prepare_folder()
large_dic['lams'] = lams[kk]
large_dic['master_index'] = kk
large_dic[outside_var_key] = variable
if profiler_bool:
for i in range(len(D_ins)):
formulate(**{**D_ins[i], **large_dic})
elif (method == 'mpi'):
iterables = ({**D_ins[i], **large_dic} for i in range(len(D_ins)))
with MPIPoolExecutor() as executor:
A = executor.map(formulate, iterables)
else:
A = Parallel(n_jobs=num_cores)((delayed(formulate)(**{**D_ins[i], **large_dic}) for i in range(len(D_ins))))
_temps.cleanup_folder()
print('\x07')
return None<|docstring|>-----------------------------Stable parameters----------------------------<|endoftext|>
|
8a7b539bb932ad2bf2c48e3e22a3a1695db3afba72f2d400fc4e107ae6144b81
|
def copy(ps_file_path: str, rat_file_path: str, user_domain: str, username: str, password: str, target: str) -> Tuple[(CommandLine, Callable[([str], None)])]:
'\n Net use will mount a network share on this host\n\n Args:\n ps_file_path: The path to the psexec binary\n rat_file_path: The path to the ratremote computer\n username: The username remote share\n user_domain: The (Windows) domain of the user account\n password: (Optional) The password to be used\n target: The target host to run the file on\n Returns:\n The CommandLine and a parser\n '
args = [ps_file_path, '-accepteula', '-u', ((user_domain + '\\') + username), '-p', password, '-d', '-cv', rat_file_path, ('\\\\' + target)]
return (CommandLine(args), parsers.psexec.copy)
|
Net use will mount a network share on this host
Args:
ps_file_path: The path to the psexec binary
rat_file_path: The path to the ratremote computer
username: The username remote share
user_domain: The (Windows) domain of the user account
password: (Optional) The password to be used
target: The target host to run the file on
Returns:
The CommandLine and a parser
|
caldera/app/commands/psexec.py
|
copy
|
m4l1c3/caldera
| 3 |
python
|
def copy(ps_file_path: str, rat_file_path: str, user_domain: str, username: str, password: str, target: str) -> Tuple[(CommandLine, Callable[([str], None)])]:
'\n Net use will mount a network share on this host\n\n Args:\n ps_file_path: The path to the psexec binary\n rat_file_path: The path to the ratremote computer\n username: The username remote share\n user_domain: The (Windows) domain of the user account\n password: (Optional) The password to be used\n target: The target host to run the file on\n Returns:\n The CommandLine and a parser\n '
args = [ps_file_path, '-accepteula', '-u', ((user_domain + '\\') + username), '-p', password, '-d', '-cv', rat_file_path, ('\\\\' + target)]
return (CommandLine(args), parsers.psexec.copy)
|
def copy(ps_file_path: str, rat_file_path: str, user_domain: str, username: str, password: str, target: str) -> Tuple[(CommandLine, Callable[([str], None)])]:
'\n Net use will mount a network share on this host\n\n Args:\n ps_file_path: The path to the psexec binary\n rat_file_path: The path to the ratremote computer\n username: The username remote share\n user_domain: The (Windows) domain of the user account\n password: (Optional) The password to be used\n target: The target host to run the file on\n Returns:\n The CommandLine and a parser\n '
args = [ps_file_path, '-accepteula', '-u', ((user_domain + '\\') + username), '-p', password, '-d', '-cv', rat_file_path, ('\\\\' + target)]
return (CommandLine(args), parsers.psexec.copy)<|docstring|>Net use will mount a network share on this host
Args:
ps_file_path: The path to the psexec binary
rat_file_path: The path to the ratremote computer
username: The username remote share
user_domain: The (Windows) domain of the user account
password: (Optional) The password to be used
target: The target host to run the file on
Returns:
The CommandLine and a parser<|endoftext|>
|
357a11adb50c38ebfc54870f2bc400ca0c43088da06eef9256be8ca87034d5e0
|
def __call__(self, audiodata):
'\n Args:\n audiodata: numpy ndarray of audio data with shape (N,).\n\n Returns:\n numpy ndarray X with shape (N, M). For each step in (0...N-1), the\n array X[step, :] contains the M log(1 + magnitude) components where\n M is equal to (int(winlen * sample_rate) / 2 + 1).\n '
D = librosa.stft(audiodata, n_fft=self._n_fft, hop_length=self._hop_length, win_length=self._n_fft, window='hamming')
(mag, _) = librosa.magphase(D)
return np.log1p(mag).T
|
Args:
audiodata: numpy ndarray of audio data with shape (N,).
Returns:
numpy ndarray X with shape (N, M). For each step in (0...N-1), the
array X[step, :] contains the M log(1 + magnitude) components where
M is equal to (int(winlen * sample_rate) / 2 + 1).
|
others/edge/speech_recognition/pytorch/src/deepspeech/data/preprocess.py
|
__call__
|
nv-eric-hw/inference
| 49 |
python
|
def __call__(self, audiodata):
'\n Args:\n audiodata: numpy ndarray of audio data with shape (N,).\n\n Returns:\n numpy ndarray X with shape (N, M). For each step in (0...N-1), the\n array X[step, :] contains the M log(1 + magnitude) components where\n M is equal to (int(winlen * sample_rate) / 2 + 1).\n '
D = librosa.stft(audiodata, n_fft=self._n_fft, hop_length=self._hop_length, win_length=self._n_fft, window='hamming')
(mag, _) = librosa.magphase(D)
return np.log1p(mag).T
|
def __call__(self, audiodata):
'\n Args:\n audiodata: numpy ndarray of audio data with shape (N,).\n\n Returns:\n numpy ndarray X with shape (N, M). For each step in (0...N-1), the\n array X[step, :] contains the M log(1 + magnitude) components where\n M is equal to (int(winlen * sample_rate) / 2 + 1).\n '
D = librosa.stft(audiodata, n_fft=self._n_fft, hop_length=self._hop_length, win_length=self._n_fft, window='hamming')
(mag, _) = librosa.magphase(D)
return np.log1p(mag).T<|docstring|>Args:
audiodata: numpy ndarray of audio data with shape (N,).
Returns:
numpy ndarray X with shape (N, M). For each step in (0...N-1), the
array X[step, :] contains the M log(1 + magnitude) components where
M is equal to (int(winlen * sample_rate) / 2 + 1).<|endoftext|>
|
ce0ebd19695fdd80d7bb584b8ada329e65dbc9fd7cf4c360804dea7b7f618c2c
|
def __call__(self, signal):
'\n Args:\n signal: numpy ndarray with shape (steps, features).\n\n Returns:\n numpy ndarray with shape:\n (steps, features * (n_context + 1 + n_context))\n '
(steps, features) = signal.shape
padding = np.zeros((self.n_context, features), dtype=signal.dtype)
signal = np.concatenate((padding, signal, padding))
window_size = ((self.n_context + 1) + self.n_context)
strided_signal = np.lib.stride_tricks.as_strided(signal, (steps, window_size, features), (signal.strides[0], signal.strides[0], signal.strides[1]), writeable=False)
return strided_signal.reshape(steps, (- 1)).copy()
|
Args:
signal: numpy ndarray with shape (steps, features).
Returns:
numpy ndarray with shape:
(steps, features * (n_context + 1 + n_context))
|
others/edge/speech_recognition/pytorch/src/deepspeech/data/preprocess.py
|
__call__
|
nv-eric-hw/inference
| 49 |
python
|
def __call__(self, signal):
'\n Args:\n signal: numpy ndarray with shape (steps, features).\n\n Returns:\n numpy ndarray with shape:\n (steps, features * (n_context + 1 + n_context))\n '
(steps, features) = signal.shape
padding = np.zeros((self.n_context, features), dtype=signal.dtype)
signal = np.concatenate((padding, signal, padding))
window_size = ((self.n_context + 1) + self.n_context)
strided_signal = np.lib.stride_tricks.as_strided(signal, (steps, window_size, features), (signal.strides[0], signal.strides[0], signal.strides[1]), writeable=False)
return strided_signal.reshape(steps, (- 1)).copy()
|
def __call__(self, signal):
'\n Args:\n signal: numpy ndarray with shape (steps, features).\n\n Returns:\n numpy ndarray with shape:\n (steps, features * (n_context + 1 + n_context))\n '
(steps, features) = signal.shape
padding = np.zeros((self.n_context, features), dtype=signal.dtype)
signal = np.concatenate((padding, signal, padding))
window_size = ((self.n_context + 1) + self.n_context)
strided_signal = np.lib.stride_tricks.as_strided(signal, (steps, window_size, features), (signal.strides[0], signal.strides[0], signal.strides[1]), writeable=False)
return strided_signal.reshape(steps, (- 1)).copy()<|docstring|>Args:
signal: numpy ndarray with shape (steps, features).
Returns:
numpy ndarray with shape:
(steps, features * (n_context + 1 + n_context))<|endoftext|>
|
53a4863bd275689c13b7f6847de1607407f81ef83047f378a1d41922f8e8daa8
|
def __call__(self, tensor):
'\n Args:\n tensor: numpy ndarray\n '
return ((tensor - tensor.mean()) / tensor.std())
|
Args:
tensor: numpy ndarray
|
others/edge/speech_recognition/pytorch/src/deepspeech/data/preprocess.py
|
__call__
|
nv-eric-hw/inference
| 49 |
python
|
def __call__(self, tensor):
'\n Args:\n tensor: numpy ndarray\n '
return ((tensor - tensor.mean()) / tensor.std())
|
def __call__(self, tensor):
'\n Args:\n tensor: numpy ndarray\n '
return ((tensor - tensor.mean()) / tensor.std())<|docstring|>Args:
tensor: numpy ndarray<|endoftext|>
|
bb8f60c68a27d356250cdf9c7a89de1954cc5aaf2437dabb1d05337e8aeb7c2f
|
def AllocateNextFabricIndex(self):
' Allocate the next un-used fabric index.\n '
nextFabricIndex = 1
while (nextFabricIndex in FabricAdmin.activeFabricIndexList):
nextFabricIndex = (nextFabricIndex + 1)
return nextFabricIndex
|
Allocate the next un-used fabric index.
|
src/controller/python/chip/FabricAdmin.py
|
AllocateNextFabricIndex
|
adamb-q/connectedhomeip
| 4 |
python
|
def AllocateNextFabricIndex(self):
' \n '
nextFabricIndex = 1
while (nextFabricIndex in FabricAdmin.activeFabricIndexList):
nextFabricIndex = (nextFabricIndex + 1)
return nextFabricIndex
|
def AllocateNextFabricIndex(self):
' \n '
nextFabricIndex = 1
while (nextFabricIndex in FabricAdmin.activeFabricIndexList):
nextFabricIndex = (nextFabricIndex + 1)
return nextFabricIndex<|docstring|>Allocate the next un-used fabric index.<|endoftext|>
|
9877440ad89d85df2aceb8cfc1fcd7c09b0c7627895ba1c73fad96ed32152c1f
|
def AllocateNextFabricId(self):
' Allocate the next un-used fabric ID.\n '
nextFabricId = 1
while (nextFabricId in FabricAdmin.activeFabricIdList):
nextFabricId = (nextFabricId + 1)
return nextFabricId
|
Allocate the next un-used fabric ID.
|
src/controller/python/chip/FabricAdmin.py
|
AllocateNextFabricId
|
adamb-q/connectedhomeip
| 4 |
python
|
def AllocateNextFabricId(self):
' \n '
nextFabricId = 1
while (nextFabricId in FabricAdmin.activeFabricIdList):
nextFabricId = (nextFabricId + 1)
return nextFabricId
|
def AllocateNextFabricId(self):
' \n '
nextFabricId = 1
while (nextFabricId in FabricAdmin.activeFabricIdList):
nextFabricId = (nextFabricId + 1)
return nextFabricId<|docstring|>Allocate the next un-used fabric ID.<|endoftext|>
|
fdf9c9085833f72e32dec67679bd0b059c805aec399f4181577c38e6a0f46f7a
|
def __init__(self, rcac: bytes=None, icac: bytes=None, fabricIndex: int=None, fabricId: int=None):
' Creates a valid FabricAdmin object with valid RCAC/ICAC, and registers itself as an OperationalCredentialsDelegate\n for other parts of the system (notably, DeviceController) to vend NOCs.\n\n rcac, icac: Specify the RCAC and ICAC to be used with this fabric (not-supported). If not specified, an RCAC and ICAC will\n be automatically generated.\n\n fabricIndex: Local fabric index to be associated with this fabric. If omitted, one will be automatically assigned.\n fabricId: Local fabric ID to be associated with this fabric. If omitted, one will be automatically assigned.\n '
if ((rcac is not None) or (icac is not None)):
raise ValueError('Providing valid rcac/icac values is not supported right now!')
if (fabricId is None):
self._fabricId = self.AllocateNextFabricId()
else:
if (fabricId in FabricAdmin.activeFabricIdList):
raise ValueError(f'FabricId {fabricId} is already being managed by an existing FabricAdmin object!')
self._fabricId = fabricId
if (fabricIndex is None):
self._fabricIndex = self.AllocateNextFabricIndex()
else:
if (fabricIndex in FabricAdmin.activeFabricIndexList):
raise ValueError(f'FabricIndex {fabricIndex} is already being managed by an existing FabricAdmin object!')
self._fabricIndex = fabricIndex
FabricAdmin.activeFabricIdList.add(self._fabricId)
FabricAdmin.activeFabricIndexList.add(self._fabricIndex)
print(f'New FabricAdmin: FabricId: {self._fabricId}({self._fabricIndex})')
self._handle.pychip_OpCreds_InitializeDelegate.restype = c_void_p
self.closure = builtins.chipStack.Call((lambda : self._handle.pychip_OpCreds_InitializeDelegate(ctypes.py_object(self), ctypes.c_uint32(self._fabricIndex))))
if (self.closure is None):
raise ValueError('Encountered error initializing OpCreds adapter')
try:
adminList = builtins.chipStack.GetStorageManager().GetReplKey('fabricAdmins')
except KeyError:
adminList = {str(self._fabricIndex): {'fabricId': self._fabricId}}
builtins.chipStack.GetStorageManager().SetReplKey('fabricAdmins', adminList)
adminList[str(self._fabricIndex)] = {'fabricId': self._fabricId}
builtins.chipStack.GetStorageManager().SetReplKey('fabricAdmins', adminList)
self._isActive = True
self.nextControllerId = 1
FabricAdmin.activeAdmins.add(self)
|
Creates a valid FabricAdmin object with valid RCAC/ICAC, and registers itself as an OperationalCredentialsDelegate
for other parts of the system (notably, DeviceController) to vend NOCs.
rcac, icac: Specify the RCAC and ICAC to be used with this fabric (not-supported). If not specified, an RCAC and ICAC will
be automatically generated.
fabricIndex: Local fabric index to be associated with this fabric. If omitted, one will be automatically assigned.
fabricId: Local fabric ID to be associated with this fabric. If omitted, one will be automatically assigned.
|
src/controller/python/chip/FabricAdmin.py
|
__init__
|
adamb-q/connectedhomeip
| 4 |
python
|
def __init__(self, rcac: bytes=None, icac: bytes=None, fabricIndex: int=None, fabricId: int=None):
' Creates a valid FabricAdmin object with valid RCAC/ICAC, and registers itself as an OperationalCredentialsDelegate\n for other parts of the system (notably, DeviceController) to vend NOCs.\n\n rcac, icac: Specify the RCAC and ICAC to be used with this fabric (not-supported). If not specified, an RCAC and ICAC will\n be automatically generated.\n\n fabricIndex: Local fabric index to be associated with this fabric. If omitted, one will be automatically assigned.\n fabricId: Local fabric ID to be associated with this fabric. If omitted, one will be automatically assigned.\n '
if ((rcac is not None) or (icac is not None)):
raise ValueError('Providing valid rcac/icac values is not supported right now!')
if (fabricId is None):
self._fabricId = self.AllocateNextFabricId()
else:
if (fabricId in FabricAdmin.activeFabricIdList):
raise ValueError(f'FabricId {fabricId} is already being managed by an existing FabricAdmin object!')
self._fabricId = fabricId
if (fabricIndex is None):
self._fabricIndex = self.AllocateNextFabricIndex()
else:
if (fabricIndex in FabricAdmin.activeFabricIndexList):
raise ValueError(f'FabricIndex {fabricIndex} is already being managed by an existing FabricAdmin object!')
self._fabricIndex = fabricIndex
FabricAdmin.activeFabricIdList.add(self._fabricId)
FabricAdmin.activeFabricIndexList.add(self._fabricIndex)
print(f'New FabricAdmin: FabricId: {self._fabricId}({self._fabricIndex})')
self._handle.pychip_OpCreds_InitializeDelegate.restype = c_void_p
self.closure = builtins.chipStack.Call((lambda : self._handle.pychip_OpCreds_InitializeDelegate(ctypes.py_object(self), ctypes.c_uint32(self._fabricIndex))))
if (self.closure is None):
raise ValueError('Encountered error initializing OpCreds adapter')
try:
adminList = builtins.chipStack.GetStorageManager().GetReplKey('fabricAdmins')
except KeyError:
adminList = {str(self._fabricIndex): {'fabricId': self._fabricId}}
builtins.chipStack.GetStorageManager().SetReplKey('fabricAdmins', adminList)
adminList[str(self._fabricIndex)] = {'fabricId': self._fabricId}
builtins.chipStack.GetStorageManager().SetReplKey('fabricAdmins', adminList)
self._isActive = True
self.nextControllerId = 1
FabricAdmin.activeAdmins.add(self)
|
def __init__(self, rcac: bytes=None, icac: bytes=None, fabricIndex: int=None, fabricId: int=None):
' Creates a valid FabricAdmin object with valid RCAC/ICAC, and registers itself as an OperationalCredentialsDelegate\n for other parts of the system (notably, DeviceController) to vend NOCs.\n\n rcac, icac: Specify the RCAC and ICAC to be used with this fabric (not-supported). If not specified, an RCAC and ICAC will\n be automatically generated.\n\n fabricIndex: Local fabric index to be associated with this fabric. If omitted, one will be automatically assigned.\n fabricId: Local fabric ID to be associated with this fabric. If omitted, one will be automatically assigned.\n '
if ((rcac is not None) or (icac is not None)):
raise ValueError('Providing valid rcac/icac values is not supported right now!')
if (fabricId is None):
self._fabricId = self.AllocateNextFabricId()
else:
if (fabricId in FabricAdmin.activeFabricIdList):
raise ValueError(f'FabricId {fabricId} is already being managed by an existing FabricAdmin object!')
self._fabricId = fabricId
if (fabricIndex is None):
self._fabricIndex = self.AllocateNextFabricIndex()
else:
if (fabricIndex in FabricAdmin.activeFabricIndexList):
raise ValueError(f'FabricIndex {fabricIndex} is already being managed by an existing FabricAdmin object!')
self._fabricIndex = fabricIndex
FabricAdmin.activeFabricIdList.add(self._fabricId)
FabricAdmin.activeFabricIndexList.add(self._fabricIndex)
print(f'New FabricAdmin: FabricId: {self._fabricId}({self._fabricIndex})')
self._handle.pychip_OpCreds_InitializeDelegate.restype = c_void_p
self.closure = builtins.chipStack.Call((lambda : self._handle.pychip_OpCreds_InitializeDelegate(ctypes.py_object(self), ctypes.c_uint32(self._fabricIndex))))
if (self.closure is None):
raise ValueError('Encountered error initializing OpCreds adapter')
try:
adminList = builtins.chipStack.GetStorageManager().GetReplKey('fabricAdmins')
except KeyError:
adminList = {str(self._fabricIndex): {'fabricId': self._fabricId}}
builtins.chipStack.GetStorageManager().SetReplKey('fabricAdmins', adminList)
adminList[str(self._fabricIndex)] = {'fabricId': self._fabricId}
builtins.chipStack.GetStorageManager().SetReplKey('fabricAdmins', adminList)
self._isActive = True
self.nextControllerId = 1
FabricAdmin.activeAdmins.add(self)<|docstring|>Creates a valid FabricAdmin object with valid RCAC/ICAC, and registers itself as an OperationalCredentialsDelegate
for other parts of the system (notably, DeviceController) to vend NOCs.
rcac, icac: Specify the RCAC and ICAC to be used with this fabric (not-supported). If not specified, an RCAC and ICAC will
be automatically generated.
fabricIndex: Local fabric index to be associated with this fabric. If omitted, one will be automatically assigned.
fabricId: Local fabric ID to be associated with this fabric. If omitted, one will be automatically assigned.<|endoftext|>
|
58b435e998f44fd6708da673710d1234b305581defc5382b437768eb7bd25e0e
|
def NewController(self, nodeId: int=None, paaTrustStorePath: str='', useTestCommissioner: bool=False):
' Vend a new controller on this fabric seeded with the right fabric details.\n '
if (not self._isActive):
raise RuntimeError(f'FabricAdmin object was previously shutdown and is no longer valid!')
if (nodeId is None):
nodeId = self.nextControllerId
self.nextControllerId = (self.nextControllerId + 1)
print(f'Allocating new controller with FabricId: {self._fabricId}({self._fabricIndex}), NodeId: {nodeId}')
controller = ChipDeviceCtrl.ChipDeviceController(self.closure, self._fabricId, self._fabricIndex, nodeId, paaTrustStorePath, useTestCommissioner)
return controller
|
Vend a new controller on this fabric seeded with the right fabric details.
|
src/controller/python/chip/FabricAdmin.py
|
NewController
|
adamb-q/connectedhomeip
| 4 |
python
|
def NewController(self, nodeId: int=None, paaTrustStorePath: str=, useTestCommissioner: bool=False):
' \n '
if (not self._isActive):
raise RuntimeError(f'FabricAdmin object was previously shutdown and is no longer valid!')
if (nodeId is None):
nodeId = self.nextControllerId
self.nextControllerId = (self.nextControllerId + 1)
print(f'Allocating new controller with FabricId: {self._fabricId}({self._fabricIndex}), NodeId: {nodeId}')
controller = ChipDeviceCtrl.ChipDeviceController(self.closure, self._fabricId, self._fabricIndex, nodeId, paaTrustStorePath, useTestCommissioner)
return controller
|
def NewController(self, nodeId: int=None, paaTrustStorePath: str=, useTestCommissioner: bool=False):
' \n '
if (not self._isActive):
raise RuntimeError(f'FabricAdmin object was previously shutdown and is no longer valid!')
if (nodeId is None):
nodeId = self.nextControllerId
self.nextControllerId = (self.nextControllerId + 1)
print(f'Allocating new controller with FabricId: {self._fabricId}({self._fabricIndex}), NodeId: {nodeId}')
controller = ChipDeviceCtrl.ChipDeviceController(self.closure, self._fabricId, self._fabricIndex, nodeId, paaTrustStorePath, useTestCommissioner)
return controller<|docstring|>Vend a new controller on this fabric seeded with the right fabric details.<|endoftext|>
|
8c58f7bd3a76213356f866f7b2a63092d591c65830307b4ce826c1671794a9cf
|
def ShutdownAll():
' Shuts down all active fabrics, but without deleting them from storage.\n '
activeAdmins = copy.copy(FabricAdmin.activeAdmins)
for admin in activeAdmins:
admin.Shutdown(False)
FabricAdmin.activeAdmins.clear()
|
Shuts down all active fabrics, but without deleting them from storage.
|
src/controller/python/chip/FabricAdmin.py
|
ShutdownAll
|
adamb-q/connectedhomeip
| 4 |
python
|
def ShutdownAll():
' \n '
activeAdmins = copy.copy(FabricAdmin.activeAdmins)
for admin in activeAdmins:
admin.Shutdown(False)
FabricAdmin.activeAdmins.clear()
|
def ShutdownAll():
' \n '
activeAdmins = copy.copy(FabricAdmin.activeAdmins)
for admin in activeAdmins:
admin.Shutdown(False)
FabricAdmin.activeAdmins.clear()<|docstring|>Shuts down all active fabrics, but without deleting them from storage.<|endoftext|>
|
e41ba3a9af60d536288dbefd08173d30f974afceae2c96deac233c3e45c8bfd9
|
def Shutdown(self, deleteFromStorage: bool=True):
" Shutdown this fabric and free up its resources. This is important since relying\n solely on the destructor will not guarantee relishining of C++-side resources.\n\n deleteFromStorage: Whether to delete this fabric's details from persistent storage.\n "
if self._isActive:
builtins.chipStack.Call((lambda : self._handle.pychip_OpCreds_FreeDelegate(ctypes.c_void_p(self.closure))))
FabricAdmin.activeFabricIdList.remove(self._fabricId)
FabricAdmin.activeFabricIndexList.remove(self._fabricIndex)
if deleteFromStorage:
adminList = builtins.chipStack.GetStorageManager().GetReplKey('fabricAdmins')
del adminList[str(self._fabricIndex)]
if (len(adminList) == 0):
adminList = None
builtins.chipStack.GetStorageManager().SetReplKey('fabricAdmins', adminList)
FabricAdmin.activeAdmins.remove(self)
self._isActive = False
|
Shutdown this fabric and free up its resources. This is important since relying
solely on the destructor will not guarantee relishining of C++-side resources.
deleteFromStorage: Whether to delete this fabric's details from persistent storage.
|
src/controller/python/chip/FabricAdmin.py
|
Shutdown
|
adamb-q/connectedhomeip
| 4 |
python
|
def Shutdown(self, deleteFromStorage: bool=True):
" Shutdown this fabric and free up its resources. This is important since relying\n solely on the destructor will not guarantee relishining of C++-side resources.\n\n deleteFromStorage: Whether to delete this fabric's details from persistent storage.\n "
if self._isActive:
builtins.chipStack.Call((lambda : self._handle.pychip_OpCreds_FreeDelegate(ctypes.c_void_p(self.closure))))
FabricAdmin.activeFabricIdList.remove(self._fabricId)
FabricAdmin.activeFabricIndexList.remove(self._fabricIndex)
if deleteFromStorage:
adminList = builtins.chipStack.GetStorageManager().GetReplKey('fabricAdmins')
del adminList[str(self._fabricIndex)]
if (len(adminList) == 0):
adminList = None
builtins.chipStack.GetStorageManager().SetReplKey('fabricAdmins', adminList)
FabricAdmin.activeAdmins.remove(self)
self._isActive = False
|
def Shutdown(self, deleteFromStorage: bool=True):
" Shutdown this fabric and free up its resources. This is important since relying\n solely on the destructor will not guarantee relishining of C++-side resources.\n\n deleteFromStorage: Whether to delete this fabric's details from persistent storage.\n "
if self._isActive:
builtins.chipStack.Call((lambda : self._handle.pychip_OpCreds_FreeDelegate(ctypes.c_void_p(self.closure))))
FabricAdmin.activeFabricIdList.remove(self._fabricId)
FabricAdmin.activeFabricIndexList.remove(self._fabricIndex)
if deleteFromStorage:
adminList = builtins.chipStack.GetStorageManager().GetReplKey('fabricAdmins')
del adminList[str(self._fabricIndex)]
if (len(adminList) == 0):
adminList = None
builtins.chipStack.GetStorageManager().SetReplKey('fabricAdmins', adminList)
FabricAdmin.activeAdmins.remove(self)
self._isActive = False<|docstring|>Shutdown this fabric and free up its resources. This is important since relying
solely on the destructor will not guarantee relishining of C++-side resources.
deleteFromStorage: Whether to delete this fabric's details from persistent storage.<|endoftext|>
|
bad3c6cee652fa2aac746a704bc2929bc3ddd971bca0c3b66247b7578d5b41a5
|
def find_apple_crash_report_referenced_images(binary_images, threads):
'Given some binary images from an apple crash report and a thread\n list this returns a list of image UUIDs to load.\n '
image_map = {}
for image in binary_images:
image_map[image['image_addr']] = image['uuid']
to_load = set()
for thread in threads:
if ('backtrace' not in thread):
continue
for frame in thread['backtrace']['contents']:
img_uuid = image_map.get(frame['object_addr'])
if (img_uuid is not None):
to_load.add(img_uuid)
return list(to_load)
|
Given some binary images from an apple crash report and a thread
list this returns a list of image UUIDs to load.
|
src/sentry/lang/native/utils.py
|
find_apple_crash_report_referenced_images
|
mitsuhiko/sentry
| 4 |
python
|
def find_apple_crash_report_referenced_images(binary_images, threads):
'Given some binary images from an apple crash report and a thread\n list this returns a list of image UUIDs to load.\n '
image_map = {}
for image in binary_images:
image_map[image['image_addr']] = image['uuid']
to_load = set()
for thread in threads:
if ('backtrace' not in thread):
continue
for frame in thread['backtrace']['contents']:
img_uuid = image_map.get(frame['object_addr'])
if (img_uuid is not None):
to_load.add(img_uuid)
return list(to_load)
|
def find_apple_crash_report_referenced_images(binary_images, threads):
'Given some binary images from an apple crash report and a thread\n list this returns a list of image UUIDs to load.\n '
image_map = {}
for image in binary_images:
image_map[image['image_addr']] = image['uuid']
to_load = set()
for thread in threads:
if ('backtrace' not in thread):
continue
for frame in thread['backtrace']['contents']:
img_uuid = image_map.get(frame['object_addr'])
if (img_uuid is not None):
to_load.add(img_uuid)
return list(to_load)<|docstring|>Given some binary images from an apple crash report and a thread
list this returns a list of image UUIDs to load.<|endoftext|>
|
8f1166ed5b2a07a9b4632a5bd81479be8e7ea1f2855f9871cf1bab04c33e5cf9
|
def find_all_stacktraces(data):
'Given a data dictionary from an event this returns all\n relevant stacktraces in a list.\n '
rv = []
exc_container = data.get('sentry.interfaces.Exception')
if exc_container:
for exc in exc_container['values']:
stacktrace = exc.get('stacktrace')
if stacktrace:
rv.append(stacktrace)
stacktrace = data.get('sentry.interfaces.Stacktrace')
if stacktrace:
rv.append(stacktrace)
threads = data.get('threads')
if threads:
for thread in threads['values']:
stacktrace = thread.get('stacktrace')
if stacktrace:
rv.append(stacktrace)
return rv
|
Given a data dictionary from an event this returns all
relevant stacktraces in a list.
|
src/sentry/lang/native/utils.py
|
find_all_stacktraces
|
mitsuhiko/sentry
| 4 |
python
|
def find_all_stacktraces(data):
'Given a data dictionary from an event this returns all\n relevant stacktraces in a list.\n '
rv = []
exc_container = data.get('sentry.interfaces.Exception')
if exc_container:
for exc in exc_container['values']:
stacktrace = exc.get('stacktrace')
if stacktrace:
rv.append(stacktrace)
stacktrace = data.get('sentry.interfaces.Stacktrace')
if stacktrace:
rv.append(stacktrace)
threads = data.get('threads')
if threads:
for thread in threads['values']:
stacktrace = thread.get('stacktrace')
if stacktrace:
rv.append(stacktrace)
return rv
|
def find_all_stacktraces(data):
'Given a data dictionary from an event this returns all\n relevant stacktraces in a list.\n '
rv = []
exc_container = data.get('sentry.interfaces.Exception')
if exc_container:
for exc in exc_container['values']:
stacktrace = exc.get('stacktrace')
if stacktrace:
rv.append(stacktrace)
stacktrace = data.get('sentry.interfaces.Stacktrace')
if stacktrace:
rv.append(stacktrace)
threads = data.get('threads')
if threads:
for thread in threads['values']:
stacktrace = thread.get('stacktrace')
if stacktrace:
rv.append(stacktrace)
return rv<|docstring|>Given a data dictionary from an event this returns all
relevant stacktraces in a list.<|endoftext|>
|
5fdf2a306f7337a4543b03c9b30d55b545c60f2136c8a2c897236365fd49170a
|
def shquote(s):
'Return a shell-escaped version of the string *s*.'
if (not s):
return "''"
if (_find_unsafe(s) is None):
return s
return (("'" + s.replace("'", '\'"\'"\'')) + "'")
|
Return a shell-escaped version of the string *s*.
|
docker/contrail/contrail-database/my_init.py
|
shquote
|
kklimonda/kolla
| 0 |
python
|
def shquote(s):
if (not s):
return
if (_find_unsafe(s) is None):
return s
return (("'" + s.replace("'", '\'"\'"\)) + "'")
|
def shquote(s):
if (not s):
return
if (_find_unsafe(s) is None):
return s
return (("'" + s.replace("'", '\'"\'"\)) + "'")<|docstring|>Return a shell-escaped version of the string *s*.<|endoftext|>
|
857c4dfdec10c78d019943ce5419a0d7e3b89c52ea743559e651bb42f513e83f
|
def onedrive_clean(fn):
' Removes characters unsafe for OneDrive from string '
for unsupported_char in UNSUPPORTED_CHARACTERS:
new_path = fn.translate({ord(unsupported_char): None})
return new_path
|
Removes characters unsafe for OneDrive from string
|
main.py
|
onedrive_clean
|
urbanblight/google2onedrive
| 0 |
python
|
def onedrive_clean(fn):
' '
for unsupported_char in UNSUPPORTED_CHARACTERS:
new_path = fn.translate({ord(unsupported_char): None})
return new_path
|
def onedrive_clean(fn):
' '
for unsupported_char in UNSUPPORTED_CHARACTERS:
new_path = fn.translate({ord(unsupported_char): None})
return new_path<|docstring|>Removes characters unsafe for OneDrive from string<|endoftext|>
|
d030b29938b3ba35d411618aa828f223d894a8b5943f67d4bb7483b80728ad20
|
def onedrive_safe(fn):
' Returns false for file names that unsafe for OneDrive '
for s in UNSUPPORTED_EXTENSIONS:
if (s in fn):
return False
return True
|
Returns false for file names that unsafe for OneDrive
|
main.py
|
onedrive_safe
|
urbanblight/google2onedrive
| 0 |
python
|
def onedrive_safe(fn):
' '
for s in UNSUPPORTED_EXTENSIONS:
if (s in fn):
return False
return True
|
def onedrive_safe(fn):
' '
for s in UNSUPPORTED_EXTENSIONS:
if (s in fn):
return False
return True<|docstring|>Returns false for file names that unsafe for OneDrive<|endoftext|>
|
f06720190945c26396860e2a6e03fd18c952635b3448200f995028ce25597298
|
def loop_dir(local_path, onedrive_path, midstring=''):
' Recurses through dirs and copies '
for fn in os.listdir(local_path):
if onedrive_safe(fn):
src = os.path.join(local_path, fn)
if (not os.path.isdir(src)):
dst = ((onedrive_path + onedrive_clean(fn)) if (midstring == '') else (((onedrive_path + midstring) + '/') + onedrive_clean(fn)))
logger.info('Copying to: {}'.format(dst))
os.makedirs(os.path.dirname(dst), exist_ok=True)
shutil.copy(src, dst)
else:
logger.debug('Recursing directory: {}'.format({src}))
loop_dir(src, onedrive_path, (fn if (midstring == '') else ((midstring + '/') + fn)))
else:
logger.debug('skipping GoogleDrive file: {}'.format(fn))
|
Recurses through dirs and copies
|
main.py
|
loop_dir
|
urbanblight/google2onedrive
| 0 |
python
|
def loop_dir(local_path, onedrive_path, midstring=):
' '
for fn in os.listdir(local_path):
if onedrive_safe(fn):
src = os.path.join(local_path, fn)
if (not os.path.isdir(src)):
dst = ((onedrive_path + onedrive_clean(fn)) if (midstring == ) else (((onedrive_path + midstring) + '/') + onedrive_clean(fn)))
logger.info('Copying to: {}'.format(dst))
os.makedirs(os.path.dirname(dst), exist_ok=True)
shutil.copy(src, dst)
else:
logger.debug('Recursing directory: {}'.format({src}))
loop_dir(src, onedrive_path, (fn if (midstring == ) else ((midstring + '/') + fn)))
else:
logger.debug('skipping GoogleDrive file: {}'.format(fn))
|
def loop_dir(local_path, onedrive_path, midstring=):
' '
for fn in os.listdir(local_path):
if onedrive_safe(fn):
src = os.path.join(local_path, fn)
if (not os.path.isdir(src)):
dst = ((onedrive_path + onedrive_clean(fn)) if (midstring == ) else (((onedrive_path + midstring) + '/') + onedrive_clean(fn)))
logger.info('Copying to: {}'.format(dst))
os.makedirs(os.path.dirname(dst), exist_ok=True)
shutil.copy(src, dst)
else:
logger.debug('Recursing directory: {}'.format({src}))
loop_dir(src, onedrive_path, (fn if (midstring == ) else ((midstring + '/') + fn)))
else:
logger.debug('skipping GoogleDrive file: {}'.format(fn))<|docstring|>Recurses through dirs and copies<|endoftext|>
|
de53bef66abad9393fb17218dec81204f07913fbb4777660c12863f0be79b687
|
def test_do_change_is_admin(self) -> None:
'\n Ensures change_is_admin raises an AssertionError when invalid permissions\n are provided to it.\n '
user_profile = self.example_user('hamlet')
do_change_is_admin(user_profile, True)
do_change_is_admin(user_profile, True, permission='administer')
with self.assertRaises(AssertionError):
do_change_is_admin(user_profile, True, permission='totally-not-valid-perm')
|
Ensures change_is_admin raises an AssertionError when invalid permissions
are provided to it.
|
zerver/tests/test_users.py
|
test_do_change_is_admin
|
stableapple/zulip
| 1 |
python
|
def test_do_change_is_admin(self) -> None:
'\n Ensures change_is_admin raises an AssertionError when invalid permissions\n are provided to it.\n '
user_profile = self.example_user('hamlet')
do_change_is_admin(user_profile, True)
do_change_is_admin(user_profile, True, permission='administer')
with self.assertRaises(AssertionError):
do_change_is_admin(user_profile, True, permission='totally-not-valid-perm')
|
def test_do_change_is_admin(self) -> None:
'\n Ensures change_is_admin raises an AssertionError when invalid permissions\n are provided to it.\n '
user_profile = self.example_user('hamlet')
do_change_is_admin(user_profile, True)
do_change_is_admin(user_profile, True, permission='administer')
with self.assertRaises(AssertionError):
do_change_is_admin(user_profile, True, permission='totally-not-valid-perm')<|docstring|>Ensures change_is_admin raises an AssertionError when invalid permissions
are provided to it.<|endoftext|>
|
7e35c87caed6514c0ef6df54748af419c90e1347845775110f5442c98d85f029
|
def test_cache_behavior(self) -> None:
'Tests whether fetching a user object the normal way, with\n `get_user`, makes 1 cache query and 1 database query.\n '
realm = get_realm('zulip')
email = self.example_email('hamlet')
with queries_captured() as queries:
with simulated_empty_cache() as cache_queries:
user_profile = get_user(email, realm)
self.assert_length(queries, 1)
self.assert_length(cache_queries, 1)
self.assertEqual(user_profile.email, email)
|
Tests whether fetching a user object the normal way, with
`get_user`, makes 1 cache query and 1 database query.
|
zerver/tests/test_users.py
|
test_cache_behavior
|
stableapple/zulip
| 1 |
python
|
def test_cache_behavior(self) -> None:
'Tests whether fetching a user object the normal way, with\n `get_user`, makes 1 cache query and 1 database query.\n '
realm = get_realm('zulip')
email = self.example_email('hamlet')
with queries_captured() as queries:
with simulated_empty_cache() as cache_queries:
user_profile = get_user(email, realm)
self.assert_length(queries, 1)
self.assert_length(cache_queries, 1)
self.assertEqual(user_profile.email, email)
|
def test_cache_behavior(self) -> None:
'Tests whether fetching a user object the normal way, with\n `get_user`, makes 1 cache query and 1 database query.\n '
realm = get_realm('zulip')
email = self.example_email('hamlet')
with queries_captured() as queries:
with simulated_empty_cache() as cache_queries:
user_profile = get_user(email, realm)
self.assert_length(queries, 1)
self.assert_length(cache_queries, 1)
self.assertEqual(user_profile.email, email)<|docstring|>Tests whether fetching a user object the normal way, with
`get_user`, makes 1 cache query and 1 database query.<|endoftext|>
|
42ad6b50020403976c0d4515108b98c90e335b261283323d0321497bb9cb1b6a
|
def test_api_get_empty_profile(self) -> None:
'\n Ensure GET /users/me returns a max message id and returns successfully\n '
json = self.common_get_profile('othello')
self.assertEqual(json['pointer'], (- 1))
|
Ensure GET /users/me returns a max message id and returns successfully
|
zerver/tests/test_users.py
|
test_api_get_empty_profile
|
stableapple/zulip
| 1 |
python
|
def test_api_get_empty_profile(self) -> None:
'\n \n '
json = self.common_get_profile('othello')
self.assertEqual(json['pointer'], (- 1))
|
def test_api_get_empty_profile(self) -> None:
'\n \n '
json = self.common_get_profile('othello')
self.assertEqual(json['pointer'], (- 1))<|docstring|>Ensure GET /users/me returns a max message id and returns successfully<|endoftext|>
|
f542679d2a4af55c9e56d979b8e45a3ba21a6b5a321923227dd4df4d0816b54b
|
def test_profile_with_pointer(self) -> None:
'\n Ensure GET /users/me returns a proper pointer id after the pointer is updated\n '
id1 = self.send_stream_message(self.example_email('othello'), 'Verona')
id2 = self.send_stream_message(self.example_email('othello'), 'Verona')
json = self.common_get_profile('hamlet')
self.common_update_pointer(self.example_email('hamlet'), id2)
json = self.common_get_profile('hamlet')
self.assertEqual(json['pointer'], id2)
self.common_update_pointer(self.example_email('hamlet'), id1)
json = self.common_get_profile('hamlet')
self.assertEqual(json['pointer'], id2)
result = self.client_post('/json/users/me/pointer', {'pointer': 99999999})
self.assert_json_error(result, 'Invalid message ID')
|
Ensure GET /users/me returns a proper pointer id after the pointer is updated
|
zerver/tests/test_users.py
|
test_profile_with_pointer
|
stableapple/zulip
| 1 |
python
|
def test_profile_with_pointer(self) -> None:
'\n \n '
id1 = self.send_stream_message(self.example_email('othello'), 'Verona')
id2 = self.send_stream_message(self.example_email('othello'), 'Verona')
json = self.common_get_profile('hamlet')
self.common_update_pointer(self.example_email('hamlet'), id2)
json = self.common_get_profile('hamlet')
self.assertEqual(json['pointer'], id2)
self.common_update_pointer(self.example_email('hamlet'), id1)
json = self.common_get_profile('hamlet')
self.assertEqual(json['pointer'], id2)
result = self.client_post('/json/users/me/pointer', {'pointer': 99999999})
self.assert_json_error(result, 'Invalid message ID')
|
def test_profile_with_pointer(self) -> None:
'\n \n '
id1 = self.send_stream_message(self.example_email('othello'), 'Verona')
id2 = self.send_stream_message(self.example_email('othello'), 'Verona')
json = self.common_get_profile('hamlet')
self.common_update_pointer(self.example_email('hamlet'), id2)
json = self.common_get_profile('hamlet')
self.assertEqual(json['pointer'], id2)
self.common_update_pointer(self.example_email('hamlet'), id1)
json = self.common_get_profile('hamlet')
self.assertEqual(json['pointer'], id2)
result = self.client_post('/json/users/me/pointer', {'pointer': 99999999})
self.assert_json_error(result, 'Invalid message ID')<|docstring|>Ensure GET /users/me returns a proper pointer id after the pointer is updated<|endoftext|>
|
bda52d9c4ca655b6978d5871c0d73a57544043fd795a4a620f8b0bd95388d7ce
|
@property
def all_observation_spaces_equal(self) -> bool:
'Check if observation spaces equal.\n\n # Returns\n\n True if all Tasks that can be sampled by this sampler have the\n same observation space. Otherwise False.\n '
return True
|
Check if observation spaces equal.
# Returns
True if all Tasks that can be sampled by this sampler have the
same observation space. Otherwise False.
|
allenact_plugins/manipulathor_plugin/manipulathor_task_samplers.py
|
all_observation_spaces_equal
|
brandontrabucco/allenact
| 187 |
python
|
@property
def all_observation_spaces_equal(self) -> bool:
'Check if observation spaces equal.\n\n # Returns\n\n True if all Tasks that can be sampled by this sampler have the\n same observation space. Otherwise False.\n '
return True
|
@property
def all_observation_spaces_equal(self) -> bool:
'Check if observation spaces equal.\n\n # Returns\n\n True if all Tasks that can be sampled by this sampler have the\n same observation space. Otherwise False.\n '
return True<|docstring|>Check if observation spaces equal.
# Returns
True if all Tasks that can be sampled by this sampler have the
same observation space. Otherwise False.<|endoftext|>
|
bd38d19b167661dd17cd04217b0bfda4be0017700dba97efaf89aa7efbda83e2
|
@property
def length(self) -> Union[(int, float)]:
"Length.\n\n # Returns\n\n Number of total tasks remaining that can be sampled. Can be float('inf').\n "
return ((self.total_unique - self.sampler_index) if (self.sampler_mode != 'train') else (float('inf') if (self.max_tasks is None) else self.max_tasks))
|
Length.
# Returns
Number of total tasks remaining that can be sampled. Can be float('inf').
|
allenact_plugins/manipulathor_plugin/manipulathor_task_samplers.py
|
length
|
brandontrabucco/allenact
| 187 |
python
|
@property
def length(self) -> Union[(int, float)]:
"Length.\n\n # Returns\n\n Number of total tasks remaining that can be sampled. Can be float('inf').\n "
return ((self.total_unique - self.sampler_index) if (self.sampler_mode != 'train') else (float('inf') if (self.max_tasks is None) else self.max_tasks))
|
@property
def length(self) -> Union[(int, float)]:
"Length.\n\n # Returns\n\n Number of total tasks remaining that can be sampled. Can be float('inf').\n "
return ((self.total_unique - self.sampler_index) if (self.sampler_mode != 'train') else (float('inf') if (self.max_tasks is None) else self.max_tasks))<|docstring|>Length.
# Returns
Number of total tasks remaining that can be sampled. Can be float('inf').<|endoftext|>
|
273528f57283ebd9b796e77777b6c0165485c00e5a5700f6ed51f745c9cf8d53
|
def fetch_parquet_dts():
'Computes dates for which Parquet files need to be created\n '
ret = []
pfxDT = datetime(2020, 1, 1)
utcNow = datetime.utcnow()
dts = []
while ((pfxDT + timedelta(days=1, hours=8, minutes=10)) < utcNow):
dts.append(pfxDT)
pfxDT += timedelta(days=1)
with DBConn() as conn:
exD = dbtables.PqDates.select_existing_pqdates(conn)
for dt in dts:
if (dt.date() not in exD):
ret.append(dt)
return ret
|
Computes dates for which Parquet files need to be created
|
airtasks/spk_writeparquets.py
|
fetch_parquet_dts
|
alxga/awstest
| 0 |
python
|
def fetch_parquet_dts():
'\n '
ret = []
pfxDT = datetime(2020, 1, 1)
utcNow = datetime.utcnow()
dts = []
while ((pfxDT + timedelta(days=1, hours=8, minutes=10)) < utcNow):
dts.append(pfxDT)
pfxDT += timedelta(days=1)
with DBConn() as conn:
exD = dbtables.PqDates.select_existing_pqdates(conn)
for dt in dts:
if (dt.date() not in exD):
ret.append(dt)
return ret
|
def fetch_parquet_dts():
'\n '
ret = []
pfxDT = datetime(2020, 1, 1)
utcNow = datetime.utcnow()
dts = []
while ((pfxDT + timedelta(days=1, hours=8, minutes=10)) < utcNow):
dts.append(pfxDT)
pfxDT += timedelta(days=1)
with DBConn() as conn:
exD = dbtables.PqDates.select_existing_pqdates(conn)
for dt in dts:
if (dt.date() not in exD):
ret.append(dt)
return ret<|docstring|>Computes dates for which Parquet files need to be created<|endoftext|>
|
c1d05ce0abb359054ad35ca8e3c757efbabc72d8b68d21d3956df57a1a169a76
|
def fetch_keys_for_date(dt):
'Retrieves Protobuf files S3 keys for a Parquet date\n\n Args:\n dt: target Parquet file date\n '
with DBConn() as conn:
dt1 = datetime(dt.year, dt.month, dt.day, 8)
dt2 = (dt1 + timedelta(days=1))
return dbtables.VehPosPb.select_protobuf_keys_between_dates(conn, dt1, dt2)
|
Retrieves Protobuf files S3 keys for a Parquet date
Args:
dt: target Parquet file date
|
airtasks/spk_writeparquets.py
|
fetch_keys_for_date
|
alxga/awstest
| 0 |
python
|
def fetch_keys_for_date(dt):
'Retrieves Protobuf files S3 keys for a Parquet date\n\n Args:\n dt: target Parquet file date\n '
with DBConn() as conn:
dt1 = datetime(dt.year, dt.month, dt.day, 8)
dt2 = (dt1 + timedelta(days=1))
return dbtables.VehPosPb.select_protobuf_keys_between_dates(conn, dt1, dt2)
|
def fetch_keys_for_date(dt):
'Retrieves Protobuf files S3 keys for a Parquet date\n\n Args:\n dt: target Parquet file date\n '
with DBConn() as conn:
dt1 = datetime(dt.year, dt.month, dt.day, 8)
dt2 = (dt1 + timedelta(days=1))
return dbtables.VehPosPb.select_protobuf_keys_between_dates(conn, dt1, dt2)<|docstring|>Retrieves Protobuf files S3 keys for a Parquet date
Args:
dt: target Parquet file date<|endoftext|>
|
7cef10a41a57c25a8cf90532ea7b8d3b80993c83ee9c1e0ae0e876475d7c5d87
|
def run(spark):
'Updates Parquet files in S3 and the PqDate table\n\n Args:\n spark: Spark Session object\n '
log = utils.get_logger()
with DBConnCommonQueries() as conn:
dbtables.create_if_not_exists(conn, dbtables.PqDates)
targetDates = fetch_parquet_dts()
for targetDate in targetDates:
keys = fetch_keys_for_date(targetDate)
log.info('Got %d keys of %s', len(keys), str(targetDate))
if (len(keys) > 0):
rddKeys = spark.sparkContext.parallelize(keys).map((lambda x: (x, x))).partitionBy(Settings.NumPartitions).map((lambda x: x[0]))
rddVP = rddKeys.flatMap(dbtables.VehPos.build_df_tuples_from_pb).map((lambda tpl: ((tpl[1], tpl[3]), tpl))).reduceByKey((lambda x, y: x)).map((lambda x: x[1]))
schema = StructType([StructField('RouteId', StringType(), True), StructField('DT', TimestampType(), False), StructField('VehicleId', StringType(), False), StructField('TripId', StringType(), False), StructField('Lat', DoubleType(), False), StructField('Lon', DoubleType(), False), StructField('Status', IntegerType(), True), StructField('StopSeq', IntegerType(), True), StructField('StopId', StringType(), True)])
dfVP = spark.createDataFrame(rddVP, schema)
log.info('Created dataframe for %d keys of %s', len(keys), str(targetDate))
pqKey = targetDate.strftime('%Y%m%d')
pqKey = '/'.join(['parquet', ('VP-' + pqKey)])
pqKey = ('s3a://alxga-insde/%s' % pqKey)
dfVP.write.format('parquet').mode('overwrite').save(pqKey)
log.info('Written to Parquet %d keys of %s', len(keys), str(targetDate))
numRecs = dfVP.count()
else:
numRecs = 0
with DBConn() as conn:
dbtables.PqDates.insert_values(conn, targetDate, len(keys), numRecs)
conn.commit()
|
Updates Parquet files in S3 and the PqDate table
Args:
spark: Spark Session object
|
airtasks/spk_writeparquets.py
|
run
|
alxga/awstest
| 0 |
python
|
def run(spark):
'Updates Parquet files in S3 and the PqDate table\n\n Args:\n spark: Spark Session object\n '
log = utils.get_logger()
with DBConnCommonQueries() as conn:
dbtables.create_if_not_exists(conn, dbtables.PqDates)
targetDates = fetch_parquet_dts()
for targetDate in targetDates:
keys = fetch_keys_for_date(targetDate)
log.info('Got %d keys of %s', len(keys), str(targetDate))
if (len(keys) > 0):
rddKeys = spark.sparkContext.parallelize(keys).map((lambda x: (x, x))).partitionBy(Settings.NumPartitions).map((lambda x: x[0]))
rddVP = rddKeys.flatMap(dbtables.VehPos.build_df_tuples_from_pb).map((lambda tpl: ((tpl[1], tpl[3]), tpl))).reduceByKey((lambda x, y: x)).map((lambda x: x[1]))
schema = StructType([StructField('RouteId', StringType(), True), StructField('DT', TimestampType(), False), StructField('VehicleId', StringType(), False), StructField('TripId', StringType(), False), StructField('Lat', DoubleType(), False), StructField('Lon', DoubleType(), False), StructField('Status', IntegerType(), True), StructField('StopSeq', IntegerType(), True), StructField('StopId', StringType(), True)])
dfVP = spark.createDataFrame(rddVP, schema)
log.info('Created dataframe for %d keys of %s', len(keys), str(targetDate))
pqKey = targetDate.strftime('%Y%m%d')
pqKey = '/'.join(['parquet', ('VP-' + pqKey)])
pqKey = ('s3a://alxga-insde/%s' % pqKey)
dfVP.write.format('parquet').mode('overwrite').save(pqKey)
log.info('Written to Parquet %d keys of %s', len(keys), str(targetDate))
numRecs = dfVP.count()
else:
numRecs = 0
with DBConn() as conn:
dbtables.PqDates.insert_values(conn, targetDate, len(keys), numRecs)
conn.commit()
|
def run(spark):
'Updates Parquet files in S3 and the PqDate table\n\n Args:\n spark: Spark Session object\n '
log = utils.get_logger()
with DBConnCommonQueries() as conn:
dbtables.create_if_not_exists(conn, dbtables.PqDates)
targetDates = fetch_parquet_dts()
for targetDate in targetDates:
keys = fetch_keys_for_date(targetDate)
log.info('Got %d keys of %s', len(keys), str(targetDate))
if (len(keys) > 0):
rddKeys = spark.sparkContext.parallelize(keys).map((lambda x: (x, x))).partitionBy(Settings.NumPartitions).map((lambda x: x[0]))
rddVP = rddKeys.flatMap(dbtables.VehPos.build_df_tuples_from_pb).map((lambda tpl: ((tpl[1], tpl[3]), tpl))).reduceByKey((lambda x, y: x)).map((lambda x: x[1]))
schema = StructType([StructField('RouteId', StringType(), True), StructField('DT', TimestampType(), False), StructField('VehicleId', StringType(), False), StructField('TripId', StringType(), False), StructField('Lat', DoubleType(), False), StructField('Lon', DoubleType(), False), StructField('Status', IntegerType(), True), StructField('StopSeq', IntegerType(), True), StructField('StopId', StringType(), True)])
dfVP = spark.createDataFrame(rddVP, schema)
log.info('Created dataframe for %d keys of %s', len(keys), str(targetDate))
pqKey = targetDate.strftime('%Y%m%d')
pqKey = '/'.join(['parquet', ('VP-' + pqKey)])
pqKey = ('s3a://alxga-insde/%s' % pqKey)
dfVP.write.format('parquet').mode('overwrite').save(pqKey)
log.info('Written to Parquet %d keys of %s', len(keys), str(targetDate))
numRecs = dfVP.count()
else:
numRecs = 0
with DBConn() as conn:
dbtables.PqDates.insert_values(conn, targetDate, len(keys), numRecs)
conn.commit()<|docstring|>Updates Parquet files in S3 and the PqDate table
Args:
spark: Spark Session object<|endoftext|>
|
ddaf39228c112b501128d0ff277f0fea5f0d22bf06ba48e54155863829e877f9
|
@admin.route('/admin')
@login_required
def tables() -> str:
'Main blog'
tbls = db.metadata.tables.keys()
return render_template('account/admin.html', tables=tbls, title='Admin')
|
Main blog
|
routes/admin.py
|
tables
|
barretobrock/bobrock.dev
| 0 |
python
|
@admin.route('/admin')
@login_required
def tables() -> str:
tbls = db.metadata.tables.keys()
return render_template('account/admin.html', tables=tbls, title='Admin')
|
@admin.route('/admin')
@login_required
def tables() -> str:
tbls = db.metadata.tables.keys()
return render_template('account/admin.html', tables=tbls, title='Admin')<|docstring|>Main blog<|endoftext|>
|
e3ae645847f97b82fcca03fa436d5b8a781b9350d21e5a8e8be8667c2a6777c9
|
def test_write_sensitivity_file(tmp_path):
'\n Write sensitivity file containing default sensitivities and ensure\n that sensitivities in files match the original ones.\n '
nedts_ref = DEFAULT_SENSITIVITIES
sensitivity_file = (tmp_path / 'sensitivities.txt')
write_sensitivity_file(sensors.GMI, sensitivity_file, nedts=nedts_ref)
nedts = np.loadtxt(sensitivity_file)
assert np.all(np.isclose(nedts_ref, nedts))
|
Write sensitivity file containing default sensitivities and ensure
that sensitivities in files match the original ones.
|
test/test_legacy.py
|
test_write_sensitivity_file
|
simonpf/gprof_nn
| 1 |
python
|
def test_write_sensitivity_file(tmp_path):
'\n Write sensitivity file containing default sensitivities and ensure\n that sensitivities in files match the original ones.\n '
nedts_ref = DEFAULT_SENSITIVITIES
sensitivity_file = (tmp_path / 'sensitivities.txt')
write_sensitivity_file(sensors.GMI, sensitivity_file, nedts=nedts_ref)
nedts = np.loadtxt(sensitivity_file)
assert np.all(np.isclose(nedts_ref, nedts))
|
def test_write_sensitivity_file(tmp_path):
'\n Write sensitivity file containing default sensitivities and ensure\n that sensitivities in files match the original ones.\n '
nedts_ref = DEFAULT_SENSITIVITIES
sensitivity_file = (tmp_path / 'sensitivities.txt')
write_sensitivity_file(sensors.GMI, sensitivity_file, nedts=nedts_ref)
nedts = np.loadtxt(sensitivity_file)
assert np.all(np.isclose(nedts_ref, nedts))<|docstring|>Write sensitivity file containing default sensitivities and ensure
that sensitivities in files match the original ones.<|endoftext|>
|
d026b4c5b14495882d4ec81e419743094e941b876d38d51e8d2299c349461c28
|
@pytest.mark.skipif((not HAS_GPROF), reason='GPROF executable missing.')
def test_run_gprof_training_data():
'\n Test running the legacy GPROF algorithm on training data.\n '
path = Path(__file__).parent
input_file = ((DATA_PATH / 'gmi') / 'gprof_nn_gmi_era5.nc')
results = run_gprof_training_data(sensors.GMI, 'ERA5', input_file, 'STANDARD', False)
assert ('surface_precip' in results.variables)
assert ('surface_precip_true' in results.variables)
|
Test running the legacy GPROF algorithm on training data.
|
test/test_legacy.py
|
test_run_gprof_training_data
|
simonpf/gprof_nn
| 1 |
python
|
@pytest.mark.skipif((not HAS_GPROF), reason='GPROF executable missing.')
def test_run_gprof_training_data():
'\n \n '
path = Path(__file__).parent
input_file = ((DATA_PATH / 'gmi') / 'gprof_nn_gmi_era5.nc')
results = run_gprof_training_data(sensors.GMI, 'ERA5', input_file, 'STANDARD', False)
assert ('surface_precip' in results.variables)
assert ('surface_precip_true' in results.variables)
|
@pytest.mark.skipif((not HAS_GPROF), reason='GPROF executable missing.')
def test_run_gprof_training_data():
'\n \n '
path = Path(__file__).parent
input_file = ((DATA_PATH / 'gmi') / 'gprof_nn_gmi_era5.nc')
results = run_gprof_training_data(sensors.GMI, 'ERA5', input_file, 'STANDARD', False)
assert ('surface_precip' in results.variables)
assert ('surface_precip_true' in results.variables)<|docstring|>Test running the legacy GPROF algorithm on training data.<|endoftext|>
|
d5d4efc7685671149f5581b996b5efb3d78aa02bddfb30df7c90d665852063fc
|
@pytest.mark.skipif((not HAS_GPROF), reason='GPROF executable missing.')
def test_run_gprof_training_data_preserve_structure():
'\n Test running the legacy GPROF algorithm on training data while\n preserving the spatial structure.\n '
input_file = ((DATA_PATH / 'gmi') / 'gprof_nn_gmi_era5.nc')
results = run_gprof_training_data(sensors.GMI, 'ERA5', input_file, 'STANDARD', False, preserve_structure=True)
assert ('surface_precip' in results.variables)
assert ('surface_precip_true' in results.variables)
|
Test running the legacy GPROF algorithm on training data while
preserving the spatial structure.
|
test/test_legacy.py
|
test_run_gprof_training_data_preserve_structure
|
simonpf/gprof_nn
| 1 |
python
|
@pytest.mark.skipif((not HAS_GPROF), reason='GPROF executable missing.')
def test_run_gprof_training_data_preserve_structure():
'\n Test running the legacy GPROF algorithm on training data while\n preserving the spatial structure.\n '
input_file = ((DATA_PATH / 'gmi') / 'gprof_nn_gmi_era5.nc')
results = run_gprof_training_data(sensors.GMI, 'ERA5', input_file, 'STANDARD', False, preserve_structure=True)
assert ('surface_precip' in results.variables)
assert ('surface_precip_true' in results.variables)
|
@pytest.mark.skipif((not HAS_GPROF), reason='GPROF executable missing.')
def test_run_gprof_training_data_preserve_structure():
'\n Test running the legacy GPROF algorithm on training data while\n preserving the spatial structure.\n '
input_file = ((DATA_PATH / 'gmi') / 'gprof_nn_gmi_era5.nc')
results = run_gprof_training_data(sensors.GMI, 'ERA5', input_file, 'STANDARD', False, preserve_structure=True)
assert ('surface_precip' in results.variables)
assert ('surface_precip_true' in results.variables)<|docstring|>Test running the legacy GPROF algorithm on training data while
preserving the spatial structure.<|endoftext|>
|
c3bf5ae2bd05ad351a87d63b86642c5a3024f35e919c33d1ad2920c92f8b058d
|
@pytest.mark.skipif((not HAS_GPROF), reason='GPROF executable missing.')
def test_run_gprof_standard():
'\n Test running legacy GPROF on a preprocessor input file.\n '
input_file = (((DATA_PATH / 'gmi') / 'pp') / 'GMIERA5_190101_027510.pp')
results = run_gprof_standard(sensors.GMI, 'ERA5', input_file, 'STANDARD', False)
assert ('surface_precip' in results.variables)
|
Test running legacy GPROF on a preprocessor input file.
|
test/test_legacy.py
|
test_run_gprof_standard
|
simonpf/gprof_nn
| 1 |
python
|
@pytest.mark.skipif((not HAS_GPROF), reason='GPROF executable missing.')
def test_run_gprof_standard():
'\n \n '
input_file = (((DATA_PATH / 'gmi') / 'pp') / 'GMIERA5_190101_027510.pp')
results = run_gprof_standard(sensors.GMI, 'ERA5', input_file, 'STANDARD', False)
assert ('surface_precip' in results.variables)
|
@pytest.mark.skipif((not HAS_GPROF), reason='GPROF executable missing.')
def test_run_gprof_standard():
'\n \n '
input_file = (((DATA_PATH / 'gmi') / 'pp') / 'GMIERA5_190101_027510.pp')
results = run_gprof_standard(sensors.GMI, 'ERA5', input_file, 'STANDARD', False)
assert ('surface_precip' in results.variables)<|docstring|>Test running legacy GPROF on a preprocessor input file.<|endoftext|>
|
1bede5cb009e13743ad29f357b02e86eaaf3295594b9e6f0620d837441e34cea
|
def get(self, title):
' This function return a book searched by the title\n\n Args:\n title (str): Title of the book.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where((table.c.title == title))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()
|
This function return a book searched by the title
Args:
title (str): Title of the book.
|
import-tool/database.py
|
get
|
andygarcia86/python-read-opf-files
| 1 |
python
|
def get(self, title):
' This function return a book searched by the title\n\n Args:\n title (str): Title of the book.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where((table.c.title == title))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()
|
def get(self, title):
' This function return a book searched by the title\n\n Args:\n title (str): Title of the book.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where((table.c.title == title))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()<|docstring|>This function return a book searched by the title
Args:
title (str): Title of the book.<|endoftext|>
|
08e3e9bd7009d71758a30b551c4be4932b1aa7b595e97f8752bba063b7a7aefa
|
def put(self, author_id, title, description, language):
' This function save a new book record in the database\n\n Args:\n title (str): Title of the book.\n description (str): Small description of the book.\n language (str): Language is written the book.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(author_id=author_id, title=title, description=description, language=language)
connection.execute(query)
|
This function save a new book record in the database
Args:
title (str): Title of the book.
description (str): Small description of the book.
language (str): Language is written the book.
|
import-tool/database.py
|
put
|
andygarcia86/python-read-opf-files
| 1 |
python
|
def put(self, author_id, title, description, language):
' This function save a new book record in the database\n\n Args:\n title (str): Title of the book.\n description (str): Small description of the book.\n language (str): Language is written the book.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(author_id=author_id, title=title, description=description, language=language)
connection.execute(query)
|
def put(self, author_id, title, description, language):
' This function save a new book record in the database\n\n Args:\n title (str): Title of the book.\n description (str): Small description of the book.\n language (str): Language is written the book.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(author_id=author_id, title=title, description=description, language=language)
connection.execute(query)<|docstring|>This function save a new book record in the database
Args:
title (str): Title of the book.
description (str): Small description of the book.
language (str): Language is written the book.<|endoftext|>
|
8573696ffa2b3f8b4a51207f17102be0fd22f86042de9d914b17fe8aefb4783d
|
def get(self, name):
' This function search for a Author by a given name\n\n Args:\n name (str): Name of the author.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where((table.c.name == name))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()
|
This function search for a Author by a given name
Args:
name (str): Name of the author.
|
import-tool/database.py
|
get
|
andygarcia86/python-read-opf-files
| 1 |
python
|
def get(self, name):
' This function search for a Author by a given name\n\n Args:\n name (str): Name of the author.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where((table.c.name == name))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()
|
def get(self, name):
' This function search for a Author by a given name\n\n Args:\n name (str): Name of the author.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where((table.c.name == name))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()<|docstring|>This function search for a Author by a given name
Args:
name (str): Name of the author.<|endoftext|>
|
f4533844e4ffcdd82c3d2b1ad4d49c3bf551c64d539bc68fa35b4e8ab9a3413f
|
def put(self, name):
' This function save a new author record in the database\n\n Args:\n name (str): Name of the author.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(name=name)
result = connection.execute(query)
return result.lastrowid
|
This function save a new author record in the database
Args:
name (str): Name of the author.
|
import-tool/database.py
|
put
|
andygarcia86/python-read-opf-files
| 1 |
python
|
def put(self, name):
' This function save a new author record in the database\n\n Args:\n name (str): Name of the author.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(name=name)
result = connection.execute(query)
return result.lastrowid
|
def put(self, name):
' This function save a new author record in the database\n\n Args:\n name (str): Name of the author.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(name=name)
result = connection.execute(query)
return result.lastrowid<|docstring|>This function save a new author record in the database
Args:
name (str): Name of the author.<|endoftext|>
|
b28cdbbdcaa495697cf6f964989b543cdf02f889bc603ca962a87e7afde959d3
|
def get(self, name):
' This function search for a Subject by a given name\n\n Args:\n name (str): Name of the subject.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where((table.c.name == name))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()
|
This function search for a Subject by a given name
Args:
name (str): Name of the subject.
|
import-tool/database.py
|
get
|
andygarcia86/python-read-opf-files
| 1 |
python
|
def get(self, name):
' This function search for a Subject by a given name\n\n Args:\n name (str): Name of the subject.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where((table.c.name == name))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()
|
def get(self, name):
' This function search for a Subject by a given name\n\n Args:\n name (str): Name of the subject.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where((table.c.name == name))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()<|docstring|>This function search for a Subject by a given name
Args:
name (str): Name of the subject.<|endoftext|>
|
6c3d0a000011f0f58e0eb8a0c4b8d1abe6543697c6737be1b324d0d815b33f09
|
def put(self, name):
' This function save a new subject record in the database\n\n Args:\n name (str): Name of the subject.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(name=name)
connection.execute(query)
|
This function save a new subject record in the database
Args:
name (str): Name of the subject.
|
import-tool/database.py
|
put
|
andygarcia86/python-read-opf-files
| 1 |
python
|
def put(self, name):
' This function save a new subject record in the database\n\n Args:\n name (str): Name of the subject.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(name=name)
connection.execute(query)
|
def put(self, name):
' This function save a new subject record in the database\n\n Args:\n name (str): Name of the subject.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(name=name)
connection.execute(query)<|docstring|>This function save a new subject record in the database
Args:
name (str): Name of the subject.<|endoftext|>
|
94b047872cdc08cd6223f1bf55fab0d1c545c620689008ad58513da1c9c1d7fe
|
def get(self, book_id, subject_id):
' This function search for a Book Subject relationship by the two keys\n\n Args:\n book_id (int): Book id.\n subject_id (int): Subject id.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where(((table.c.book_id == book_id) and (table.c.subject_id == subject_id)))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()
|
This function search for a Book Subject relationship by the two keys
Args:
book_id (int): Book id.
subject_id (int): Subject id.
|
import-tool/database.py
|
get
|
andygarcia86/python-read-opf-files
| 1 |
python
|
def get(self, book_id, subject_id):
' This function search for a Book Subject relationship by the two keys\n\n Args:\n book_id (int): Book id.\n subject_id (int): Subject id.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where(((table.c.book_id == book_id) and (table.c.subject_id == subject_id)))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()
|
def get(self, book_id, subject_id):
' This function search for a Book Subject relationship by the two keys\n\n Args:\n book_id (int): Book id.\n subject_id (int): Subject id.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = db.select([table]).where(((table.c.book_id == book_id) and (table.c.subject_id == subject_id)))
ResultProxy = connection.execute(query)
return ResultProxy.fetchone()<|docstring|>This function search for a Book Subject relationship by the two keys
Args:
book_id (int): Book id.
subject_id (int): Subject id.<|endoftext|>
|
9302426e6549cae72790ca3570bb35b8d176ac593e51b86ddba7e5803474fbef
|
def put(self, book_id, subject_id):
' This function saves a new book subject record in the database, making a relationship between a book and a subject\n\n Args:\n book_id (int): Book id.\n subject_id (int): Subject id.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(book_id=book_id, subject_id=subject_id)
connection.execute(query)
|
This function saves a new book subject record in the database, making a relationship between a book and a subject
Args:
book_id (int): Book id.
subject_id (int): Subject id.
|
import-tool/database.py
|
put
|
andygarcia86/python-read-opf-files
| 1 |
python
|
def put(self, book_id, subject_id):
' This function saves a new book subject record in the database, making a relationship between a book and a subject\n\n Args:\n book_id (int): Book id.\n subject_id (int): Subject id.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(book_id=book_id, subject_id=subject_id)
connection.execute(query)
|
def put(self, book_id, subject_id):
' This function saves a new book subject record in the database, making a relationship between a book and a subject\n\n Args:\n book_id (int): Book id.\n subject_id (int): Subject id.\n '
engine = db.create_engine(self.uri)
connection = engine.connect()
metadata = db.MetaData()
table = db.Table(self.__table_name, metadata, autoload=True, autoload_with=engine)
query = table.insert().values(book_id=book_id, subject_id=subject_id)
connection.execute(query)<|docstring|>This function saves a new book subject record in the database, making a relationship between a book and a subject
Args:
book_id (int): Book id.
subject_id (int): Subject id.<|endoftext|>
|
e018b7799baabf7b7d08284b7d70f6868dcb9bf88294d69fc673d2ceb63b869b
|
def listen(self, key=None, backlog=128):
'Create and start listening on socket.\n\n Call before forking worker processes.\n\n Raises Exception if this has already been called.\n '
info = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM)[0]
try:
self.socket = eventlet.listen(info[(- 1)], family=info[0], backlog=backlog)
except EnvironmentError:
LOG.error(_LE('Could not bind to %(host)s:%(port)s'), {'host': self.host, 'port': self.port})
raise
LOG.info(_LI('Starting %(arg0)s on %(host)s:%(port)s'), {'arg0': sys.argv[0], 'host': self.host, 'port': self.port})
|
Create and start listening on socket.
Call before forking worker processes.
Raises Exception if this has already been called.
|
sidserver/common/environment/eventlet_server.py
|
listen
|
UTSA-ICS/sidserver
| 0 |
python
|
def listen(self, key=None, backlog=128):
'Create and start listening on socket.\n\n Call before forking worker processes.\n\n Raises Exception if this has already been called.\n '
info = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM)[0]
try:
self.socket = eventlet.listen(info[(- 1)], family=info[0], backlog=backlog)
except EnvironmentError:
LOG.error(_LE('Could not bind to %(host)s:%(port)s'), {'host': self.host, 'port': self.port})
raise
LOG.info(_LI('Starting %(arg0)s on %(host)s:%(port)s'), {'arg0': sys.argv[0], 'host': self.host, 'port': self.port})
|
def listen(self, key=None, backlog=128):
'Create and start listening on socket.\n\n Call before forking worker processes.\n\n Raises Exception if this has already been called.\n '
info = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM)[0]
try:
self.socket = eventlet.listen(info[(- 1)], family=info[0], backlog=backlog)
except EnvironmentError:
LOG.error(_LE('Could not bind to %(host)s:%(port)s'), {'host': self.host, 'port': self.port})
raise
LOG.info(_LI('Starting %(arg0)s on %(host)s:%(port)s'), {'arg0': sys.argv[0], 'host': self.host, 'port': self.port})<|docstring|>Create and start listening on socket.
Call before forking worker processes.
Raises Exception if this has already been called.<|endoftext|>
|
a8b76a2a49a822f10eeb2bcadacbdc3fb5204e84b8ef8d0f5c1e3ff405ae8d8b
|
def start(self, key=None, backlog=128):
'Run a WSGI server with the given application.'
if (self.socket is None):
self.listen(key=key, backlog=backlog)
dup_socket = self.socket.dup()
if key:
self.socket_info[key] = self.socket.getsockname()
if self.do_ssl:
if self.cert_required:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
dup_socket = eventlet.wrap_ssl(dup_socket, certfile=self.certfile, keyfile=self.keyfile, server_side=True, cert_reqs=cert_reqs, ca_certs=self.ca_certs)
if self.keepalive:
dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
if (self.keepidle is not None):
dup_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, self.keepidle)
self.greenthread = self.pool.spawn(self._run, self.application, dup_socket)
|
Run a WSGI server with the given application.
|
sidserver/common/environment/eventlet_server.py
|
start
|
UTSA-ICS/sidserver
| 0 |
python
|
def start(self, key=None, backlog=128):
if (self.socket is None):
self.listen(key=key, backlog=backlog)
dup_socket = self.socket.dup()
if key:
self.socket_info[key] = self.socket.getsockname()
if self.do_ssl:
if self.cert_required:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
dup_socket = eventlet.wrap_ssl(dup_socket, certfile=self.certfile, keyfile=self.keyfile, server_side=True, cert_reqs=cert_reqs, ca_certs=self.ca_certs)
if self.keepalive:
dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
if (self.keepidle is not None):
dup_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, self.keepidle)
self.greenthread = self.pool.spawn(self._run, self.application, dup_socket)
|
def start(self, key=None, backlog=128):
if (self.socket is None):
self.listen(key=key, backlog=backlog)
dup_socket = self.socket.dup()
if key:
self.socket_info[key] = self.socket.getsockname()
if self.do_ssl:
if self.cert_required:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
dup_socket = eventlet.wrap_ssl(dup_socket, certfile=self.certfile, keyfile=self.keyfile, server_side=True, cert_reqs=cert_reqs, ca_certs=self.ca_certs)
if self.keepalive:
dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
if (self.keepidle is not None):
dup_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, self.keepidle)
self.greenthread = self.pool.spawn(self._run, self.application, dup_socket)<|docstring|>Run a WSGI server with the given application.<|endoftext|>
|
01ebe88ad051a04b2cbc8889236106f2e347addbe8b39a2614f0e1716398b24d
|
def wait(self):
'Wait until all servers have completed running.'
try:
self.pool.waitall()
except KeyboardInterrupt:
pass
except greenlet.GreenletExit:
pass
|
Wait until all servers have completed running.
|
sidserver/common/environment/eventlet_server.py
|
wait
|
UTSA-ICS/sidserver
| 0 |
python
|
def wait(self):
try:
self.pool.waitall()
except KeyboardInterrupt:
pass
except greenlet.GreenletExit:
pass
|
def wait(self):
try:
self.pool.waitall()
except KeyboardInterrupt:
pass
except greenlet.GreenletExit:
pass<|docstring|>Wait until all servers have completed running.<|endoftext|>
|
fd375b9ed5c9d86e112137a097ba3bbbd56a5a607c8fd706038fa43a1d89213a
|
def reset(self):
'Required by the service interface.\n\n The service interface is used by the launcher when receiving a\n SIGHUP. The service interface is defined in\n sidserver.openstack.common.service.Service.\n\n Keystone does not need to do anything here.\n '
pass
|
Required by the service interface.
The service interface is used by the launcher when receiving a
SIGHUP. The service interface is defined in
sidserver.openstack.common.service.Service.
Keystone does not need to do anything here.
|
sidserver/common/environment/eventlet_server.py
|
reset
|
UTSA-ICS/sidserver
| 0 |
python
|
def reset(self):
'Required by the service interface.\n\n The service interface is used by the launcher when receiving a\n SIGHUP. The service interface is defined in\n sidserver.openstack.common.service.Service.\n\n Keystone does not need to do anything here.\n '
pass
|
def reset(self):
'Required by the service interface.\n\n The service interface is used by the launcher when receiving a\n SIGHUP. The service interface is defined in\n sidserver.openstack.common.service.Service.\n\n Keystone does not need to do anything here.\n '
pass<|docstring|>Required by the service interface.
The service interface is used by the launcher when receiving a
SIGHUP. The service interface is defined in
sidserver.openstack.common.service.Service.
Keystone does not need to do anything here.<|endoftext|>
|
15428b677f33ee698b6851c41c9c6bca1498088492d41644256a631b4285d6f5
|
def _run(self, application, socket):
'Start a WSGI server with a new green thread pool.'
logger = log.getLogger('eventlet.wsgi.server')
socket_timeout = (CONF.eventlet_server.client_socket_timeout or None)
try:
eventlet.wsgi.server(socket, application, log=EventletFilteringLogger(logger), debug=False, keepalive=CONF.eventlet_server.wsgi_keep_alive, socket_timeout=socket_timeout)
except greenlet.GreenletExit:
pass
except Exception:
LOG.exception(_LE('Server error'))
raise
|
Start a WSGI server with a new green thread pool.
|
sidserver/common/environment/eventlet_server.py
|
_run
|
UTSA-ICS/sidserver
| 0 |
python
|
def _run(self, application, socket):
logger = log.getLogger('eventlet.wsgi.server')
socket_timeout = (CONF.eventlet_server.client_socket_timeout or None)
try:
eventlet.wsgi.server(socket, application, log=EventletFilteringLogger(logger), debug=False, keepalive=CONF.eventlet_server.wsgi_keep_alive, socket_timeout=socket_timeout)
except greenlet.GreenletExit:
pass
except Exception:
LOG.exception(_LE('Server error'))
raise
|
def _run(self, application, socket):
logger = log.getLogger('eventlet.wsgi.server')
socket_timeout = (CONF.eventlet_server.client_socket_timeout or None)
try:
eventlet.wsgi.server(socket, application, log=EventletFilteringLogger(logger), debug=False, keepalive=CONF.eventlet_server.wsgi_keep_alive, socket_timeout=socket_timeout)
except greenlet.GreenletExit:
pass
except Exception:
LOG.exception(_LE('Server error'))
raise<|docstring|>Start a WSGI server with a new green thread pool.<|endoftext|>
|
260c4b9a754a75e57b6dce3a71cb037844387a6004c55782ba4560d939a4f3a5
|
def _check_settings(self):
' Check we have all the required settings defined. '
if (not self.FTP_HOST):
raise StorageError(('%s storage requires DBBACKUP_FTP_HOST to be defined in settings.' % self.name))
|
Check we have all the required settings defined.
|
dbbackup/storage/ftp_storage.py
|
_check_settings
|
UbuntuEvangelist/django-dbbackup
| 0 |
python
|
def _check_settings(self):
' '
if (not self.FTP_HOST):
raise StorageError(('%s storage requires DBBACKUP_FTP_HOST to be defined in settings.' % self.name))
|
def _check_settings(self):
' '
if (not self.FTP_HOST):
raise StorageError(('%s storage requires DBBACKUP_FTP_HOST to be defined in settings.' % self.name))<|docstring|>Check we have all the required settings defined.<|endoftext|>
|
ec0299799adfc3eca3fad73797efaba78826c68794c58271741d2c5f09de717d
|
def delete_file(self, filepath):
' Delete the specified filepath. '
self.ftp.delete(filepath)
|
Delete the specified filepath.
|
dbbackup/storage/ftp_storage.py
|
delete_file
|
UbuntuEvangelist/django-dbbackup
| 0 |
python
|
def delete_file(self, filepath):
' '
self.ftp.delete(filepath)
|
def delete_file(self, filepath):
' '
self.ftp.delete(filepath)<|docstring|>Delete the specified filepath.<|endoftext|>
|
fda3e75a8f869b7b13ea9ef7b3ca92d213d7cfaa4a11b84db098675d0719bcd0
|
def list_directory(self, raw=False):
' List all stored backups for the specified. '
return sorted(self.ftp.nlst(self.FTP_PATH))
|
List all stored backups for the specified.
|
dbbackup/storage/ftp_storage.py
|
list_directory
|
UbuntuEvangelist/django-dbbackup
| 0 |
python
|
def list_directory(self, raw=False):
' '
return sorted(self.ftp.nlst(self.FTP_PATH))
|
def list_directory(self, raw=False):
' '
return sorted(self.ftp.nlst(self.FTP_PATH))<|docstring|>List all stored backups for the specified.<|endoftext|>
|
4eb9882aaa0603fd246ed0290efa353b1809ed844bbecaa276cce56b170be8bd
|
def write_file(self, filehandle, filename):
' Write the specified file. '
filehandle.seek(0)
backuppath = os.path.join(self.FTP_PATH, filename)
self.ftp.storbinary(('STOR ' + backuppath), filehandle)
|
Write the specified file.
|
dbbackup/storage/ftp_storage.py
|
write_file
|
UbuntuEvangelist/django-dbbackup
| 0 |
python
|
def write_file(self, filehandle, filename):
' '
filehandle.seek(0)
backuppath = os.path.join(self.FTP_PATH, filename)
self.ftp.storbinary(('STOR ' + backuppath), filehandle)
|
def write_file(self, filehandle, filename):
' '
filehandle.seek(0)
backuppath = os.path.join(self.FTP_PATH, filename)
self.ftp.storbinary(('STOR ' + backuppath), filehandle)<|docstring|>Write the specified file.<|endoftext|>
|
9db39f3099ddc6ae68c1f9d3a7e67bef884de415460c01cb20350224832deac9
|
def read_file(self, filepath):
" Read the specified file and return it's handle. "
outputfile = tempfile.SpooledTemporaryFile(max_size=dbbackup_settings.TMP_FILE_MAX_SIZE, dir=dbbackup_settings.TMP_DIR)
self.ftp.retrbinary(('RETR ' + filepath), outputfile.write)
return outputfile
|
Read the specified file and return it's handle.
|
dbbackup/storage/ftp_storage.py
|
read_file
|
UbuntuEvangelist/django-dbbackup
| 0 |
python
|
def read_file(self, filepath):
" "
outputfile = tempfile.SpooledTemporaryFile(max_size=dbbackup_settings.TMP_FILE_MAX_SIZE, dir=dbbackup_settings.TMP_DIR)
self.ftp.retrbinary(('RETR ' + filepath), outputfile.write)
return outputfile
|
def read_file(self, filepath):
" "
outputfile = tempfile.SpooledTemporaryFile(max_size=dbbackup_settings.TMP_FILE_MAX_SIZE, dir=dbbackup_settings.TMP_DIR)
self.ftp.retrbinary(('RETR ' + filepath), outputfile.write)
return outputfile<|docstring|>Read the specified file and return it's handle.<|endoftext|>
|
f37116f11d6ceaf4cf14eddc107c575559e9a6038a44b306e4b3f2bb9ad70502
|
@distributed_trace
def list(self, resource_group_name: str, network_manager_name: str, top: Optional[int]=None, skip_token: Optional[str]=None, **kwargs: Any) -> AsyncIterable['_models.SecurityConfigurationListResult']:
'Lists all the network manager security user configurations in a network manager, in a paginated\n format.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param top: An optional query parameter which specifies the maximum number of records to be\n returned by the server.\n :type top: int\n :param skip_token: SkipToken is only used if a previous operation returned a partial result. If\n a previous response contains a nextLink element, the value of the nextLink element will include\n a skipToken parameter that specifies a starting point to use for subsequent calls.\n :type skip_token: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either SecurityConfigurationListResult or the result of\n cls(response)\n :rtype:\n ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfigurationListResult]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if (not next_link):
request = build_list_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, top=top, skip_token=skip_token, template_url=self.list.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, top=top, skip_token=skip_token, template_url=next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = 'GET'
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return ((deserialized.next_link or None), AsyncList(list_of_elem))
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
|
Lists all the network manager security user configurations in a network manager, in a paginated
format.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_manager_name: The name of the network manager.
:type network_manager_name: str
:param top: An optional query parameter which specifies the maximum number of records to be
returned by the server.
:type top: int
:param skip_token: SkipToken is only used if a previous operation returned a partial result. If
a previous response contains a nextLink element, the value of the nextLink element will include
a skipToken parameter that specifies a starting point to use for subsequent calls.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecurityConfigurationListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
|
src/network-manager/azext_network_manager/vendored_sdks/aio/operations/_security_user_configurations_operations.py
|
list
|
hsrivast/azure-cli-extensions
| 1 |
python
|
@distributed_trace
def list(self, resource_group_name: str, network_manager_name: str, top: Optional[int]=None, skip_token: Optional[str]=None, **kwargs: Any) -> AsyncIterable['_models.SecurityConfigurationListResult']:
'Lists all the network manager security user configurations in a network manager, in a paginated\n format.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param top: An optional query parameter which specifies the maximum number of records to be\n returned by the server.\n :type top: int\n :param skip_token: SkipToken is only used if a previous operation returned a partial result. If\n a previous response contains a nextLink element, the value of the nextLink element will include\n a skipToken parameter that specifies a starting point to use for subsequent calls.\n :type skip_token: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either SecurityConfigurationListResult or the result of\n cls(response)\n :rtype:\n ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfigurationListResult]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if (not next_link):
request = build_list_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, top=top, skip_token=skip_token, template_url=self.list.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, top=top, skip_token=skip_token, template_url=next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = 'GET'
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return ((deserialized.next_link or None), AsyncList(list_of_elem))
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
|
@distributed_trace
def list(self, resource_group_name: str, network_manager_name: str, top: Optional[int]=None, skip_token: Optional[str]=None, **kwargs: Any) -> AsyncIterable['_models.SecurityConfigurationListResult']:
'Lists all the network manager security user configurations in a network manager, in a paginated\n format.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param top: An optional query parameter which specifies the maximum number of records to be\n returned by the server.\n :type top: int\n :param skip_token: SkipToken is only used if a previous operation returned a partial result. If\n a previous response contains a nextLink element, the value of the nextLink element will include\n a skipToken parameter that specifies a starting point to use for subsequent calls.\n :type skip_token: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either SecurityConfigurationListResult or the result of\n cls(response)\n :rtype:\n ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfigurationListResult]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if (not next_link):
request = build_list_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, top=top, skip_token=skip_token, template_url=self.list.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, top=top, skip_token=skip_token, template_url=next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = 'GET'
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return ((deserialized.next_link or None), AsyncList(list_of_elem))
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)<|docstring|>Lists all the network manager security user configurations in a network manager, in a paginated
format.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_manager_name: The name of the network manager.
:type network_manager_name: str
:param top: An optional query parameter which specifies the maximum number of records to be
returned by the server.
:type top: int
:param skip_token: SkipToken is only used if a previous operation returned a partial result. If
a previous response contains a nextLink element, the value of the nextLink element will include
a skipToken parameter that specifies a starting point to use for subsequent calls.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecurityConfigurationListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError<|endoftext|>
|
f028357ecc246c0f72d67370f4ea3e35d31e8a23f35fde596d5b1660cddff7aa
|
@distributed_trace_async
async def get(self, resource_group_name: str, network_manager_name: str, configuration_name: str, **kwargs: Any) -> '_models.SecurityConfiguration':
'Retrieves a network manager security user configuration.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param configuration_name: The name of the network manager Security Configuration.\n :type configuration_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: SecurityConfiguration, or the result of cls(response)\n :rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, configuration_name=configuration_name, template_url=self.get.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
|
Retrieves a network manager security user configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_manager_name: The name of the network manager.
:type network_manager_name: str
:param configuration_name: The name of the network manager Security Configuration.
:type configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
|
src/network-manager/azext_network_manager/vendored_sdks/aio/operations/_security_user_configurations_operations.py
|
get
|
hsrivast/azure-cli-extensions
| 1 |
python
|
@distributed_trace_async
async def get(self, resource_group_name: str, network_manager_name: str, configuration_name: str, **kwargs: Any) -> '_models.SecurityConfiguration':
'Retrieves a network manager security user configuration.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param configuration_name: The name of the network manager Security Configuration.\n :type configuration_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: SecurityConfiguration, or the result of cls(response)\n :rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, configuration_name=configuration_name, template_url=self.get.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
|
@distributed_trace_async
async def get(self, resource_group_name: str, network_manager_name: str, configuration_name: str, **kwargs: Any) -> '_models.SecurityConfiguration':
'Retrieves a network manager security user configuration.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param configuration_name: The name of the network manager Security Configuration.\n :type configuration_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: SecurityConfiguration, or the result of cls(response)\n :rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, configuration_name=configuration_name, template_url=self.get.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized<|docstring|>Retrieves a network manager security user configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_manager_name: The name of the network manager.
:type network_manager_name: str
:param configuration_name: The name of the network manager Security Configuration.
:type configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration
:raises: ~azure.core.exceptions.HttpResponseError<|endoftext|>
|
ab76dbd03dff344ccc8d2ccda344783f792d9e520b5b0dc330cb6768999b3eaf
|
@distributed_trace_async
async def create_or_update(self, resource_group_name: str, network_manager_name: str, configuration_name: str, security_user_configuration: '_models.SecurityConfiguration', **kwargs: Any) -> '_models.SecurityConfiguration':
'Creates or updates a network manager security user configuration.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param configuration_name: The name of the network manager Security Configuration.\n :type configuration_name: str\n :param security_user_configuration: The security user configuration to create or update.\n :type security_user_configuration:\n ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: SecurityConfiguration, or the result of cls(response)\n :rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', 'application/json')
_json = self._serialize.body(security_user_configuration, 'SecurityConfiguration')
request = build_create_or_update_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, configuration_name=configuration_name, content_type=content_type, json=_json, template_url=self.create_or_update.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200, 201]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if (response.status_code == 200):
deserialized = self._deserialize('SecurityConfiguration', pipeline_response)
if (response.status_code == 201):
deserialized = self._deserialize('SecurityConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
|
Creates or updates a network manager security user configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_manager_name: The name of the network manager.
:type network_manager_name: str
:param configuration_name: The name of the network manager Security Configuration.
:type configuration_name: str
:param security_user_configuration: The security user configuration to create or update.
:type security_user_configuration:
~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
|
src/network-manager/azext_network_manager/vendored_sdks/aio/operations/_security_user_configurations_operations.py
|
create_or_update
|
hsrivast/azure-cli-extensions
| 1 |
python
|
@distributed_trace_async
async def create_or_update(self, resource_group_name: str, network_manager_name: str, configuration_name: str, security_user_configuration: '_models.SecurityConfiguration', **kwargs: Any) -> '_models.SecurityConfiguration':
'Creates or updates a network manager security user configuration.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param configuration_name: The name of the network manager Security Configuration.\n :type configuration_name: str\n :param security_user_configuration: The security user configuration to create or update.\n :type security_user_configuration:\n ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: SecurityConfiguration, or the result of cls(response)\n :rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', 'application/json')
_json = self._serialize.body(security_user_configuration, 'SecurityConfiguration')
request = build_create_or_update_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, configuration_name=configuration_name, content_type=content_type, json=_json, template_url=self.create_or_update.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200, 201]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if (response.status_code == 200):
deserialized = self._deserialize('SecurityConfiguration', pipeline_response)
if (response.status_code == 201):
deserialized = self._deserialize('SecurityConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
|
@distributed_trace_async
async def create_or_update(self, resource_group_name: str, network_manager_name: str, configuration_name: str, security_user_configuration: '_models.SecurityConfiguration', **kwargs: Any) -> '_models.SecurityConfiguration':
'Creates or updates a network manager security user configuration.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param configuration_name: The name of the network manager Security Configuration.\n :type configuration_name: str\n :param security_user_configuration: The security user configuration to create or update.\n :type security_user_configuration:\n ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: SecurityConfiguration, or the result of cls(response)\n :rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', 'application/json')
_json = self._serialize.body(security_user_configuration, 'SecurityConfiguration')
request = build_create_or_update_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, configuration_name=configuration_name, content_type=content_type, json=_json, template_url=self.create_or_update.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200, 201]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if (response.status_code == 200):
deserialized = self._deserialize('SecurityConfiguration', pipeline_response)
if (response.status_code == 201):
deserialized = self._deserialize('SecurityConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized<|docstring|>Creates or updates a network manager security user configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_manager_name: The name of the network manager.
:type network_manager_name: str
:param configuration_name: The name of the network manager Security Configuration.
:type configuration_name: str
:param security_user_configuration: The security user configuration to create or update.
:type security_user_configuration:
~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_05_01_preview.models.SecurityConfiguration
:raises: ~azure.core.exceptions.HttpResponseError<|endoftext|>
|
709810d41d45a4b5fd0b2f927248f3ce9b30e91eac33dbfa04c1332189b2a981
|
@distributed_trace_async
async def delete(self, resource_group_name: str, network_manager_name: str, configuration_name: str, force: Optional[bool]=None, recursive: Optional[bool]=None, **kwargs: Any) -> None:
'Deletes a network manager security user configuration.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param configuration_name: The name of the network manager Security Configuration.\n :type configuration_name: str\n :param force: Deletes the resource even if it is part of a deployed configuration. If the\n configuration has been deployed, the service will do a cleanup deployment in the background,\n prior to the delete.\n :type force: bool\n :param recursive: Deletes the resource recursively. When present in a security configuration\n delete, all rule collections and rules within the configuration will be deleted. When present\n in a rule collection delete, all rules within the collection will be deleted.\n :type recursive: bool\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: None, or the result of cls(response)\n :rtype: None\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, configuration_name=configuration_name, force=force, recursive=recursive, template_url=self.delete.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200, 204]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
|
Deletes a network manager security user configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_manager_name: The name of the network manager.
:type network_manager_name: str
:param configuration_name: The name of the network manager Security Configuration.
:type configuration_name: str
:param force: Deletes the resource even if it is part of a deployed configuration. If the
configuration has been deployed, the service will do a cleanup deployment in the background,
prior to the delete.
:type force: bool
:param recursive: Deletes the resource recursively. When present in a security configuration
delete, all rule collections and rules within the configuration will be deleted. When present
in a rule collection delete, all rules within the collection will be deleted.
:type recursive: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
|
src/network-manager/azext_network_manager/vendored_sdks/aio/operations/_security_user_configurations_operations.py
|
delete
|
hsrivast/azure-cli-extensions
| 1 |
python
|
@distributed_trace_async
async def delete(self, resource_group_name: str, network_manager_name: str, configuration_name: str, force: Optional[bool]=None, recursive: Optional[bool]=None, **kwargs: Any) -> None:
'Deletes a network manager security user configuration.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param configuration_name: The name of the network manager Security Configuration.\n :type configuration_name: str\n :param force: Deletes the resource even if it is part of a deployed configuration. If the\n configuration has been deployed, the service will do a cleanup deployment in the background,\n prior to the delete.\n :type force: bool\n :param recursive: Deletes the resource recursively. When present in a security configuration\n delete, all rule collections and rules within the configuration will be deleted. When present\n in a rule collection delete, all rules within the collection will be deleted.\n :type recursive: bool\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: None, or the result of cls(response)\n :rtype: None\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, configuration_name=configuration_name, force=force, recursive=recursive, template_url=self.delete.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200, 204]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
|
@distributed_trace_async
async def delete(self, resource_group_name: str, network_manager_name: str, configuration_name: str, force: Optional[bool]=None, recursive: Optional[bool]=None, **kwargs: Any) -> None:
'Deletes a network manager security user configuration.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param network_manager_name: The name of the network manager.\n :type network_manager_name: str\n :param configuration_name: The name of the network manager Security Configuration.\n :type configuration_name: str\n :param force: Deletes the resource even if it is part of a deployed configuration. If the\n configuration has been deployed, the service will do a cleanup deployment in the background,\n prior to the delete.\n :type force: bool\n :param recursive: Deletes the resource recursively. When present in a security configuration\n delete, all rule collections and rules within the configuration will be deleted. When present\n in a rule collection delete, all rules within the collection will be deleted.\n :type recursive: bool\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: None, or the result of cls(response)\n :rtype: None\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, network_manager_name=network_manager_name, configuration_name=configuration_name, force=force, recursive=recursive, template_url=self.delete.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200, 204]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})<|docstring|>Deletes a network manager security user configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_manager_name: The name of the network manager.
:type network_manager_name: str
:param configuration_name: The name of the network manager Security Configuration.
:type configuration_name: str
:param force: Deletes the resource even if it is part of a deployed configuration. If the
configuration has been deployed, the service will do a cleanup deployment in the background,
prior to the delete.
:type force: bool
:param recursive: Deletes the resource recursively. When present in a security configuration
delete, all rule collections and rules within the configuration will be deleted. When present
in a rule collection delete, all rules within the collection will be deleted.
:type recursive: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError<|endoftext|>
|
2a4285a771ac3cbcdbcff163a773157b72f5bc4f9c47814059c484a31d502612
|
def feature_template_df(no_of_rxns_thres):
'Creates a template dataframe\n containing all the feature columns'
feature_data_columns = []
for n in range(no_of_rxns_thres):
smarts = (('Rxn' + str((n + 1))) + '_SMARTS')
rxn_delta_g = (('Rxn' + str((n + 1))) + '_DeltaG')
rxn_rule_score = (('Rxn' + str((n + 1))) + '_Rule_Score')
feature_data_columns.extend([smarts, rxn_delta_g, rxn_rule_score])
feature_data_columns.extend(['Pathway_Delta_G', 'Pathway_Flux', 'Pathway_Score', 'Round1'])
feature_data = pd_DataFrame(columns=feature_data_columns, index=None)
return feature_data
|
Creates a template dataframe
containing all the feature columns
|
rptools/rpscore/rpScore.py
|
feature_template_df
|
brsynth/rpTools
| 4 |
python
|
def feature_template_df(no_of_rxns_thres):
'Creates a template dataframe\n containing all the feature columns'
feature_data_columns = []
for n in range(no_of_rxns_thres):
smarts = (('Rxn' + str((n + 1))) + '_SMARTS')
rxn_delta_g = (('Rxn' + str((n + 1))) + '_DeltaG')
rxn_rule_score = (('Rxn' + str((n + 1))) + '_Rule_Score')
feature_data_columns.extend([smarts, rxn_delta_g, rxn_rule_score])
feature_data_columns.extend(['Pathway_Delta_G', 'Pathway_Flux', 'Pathway_Score', 'Round1'])
feature_data = pd_DataFrame(columns=feature_data_columns, index=None)
return feature_data
|
def feature_template_df(no_of_rxns_thres):
'Creates a template dataframe\n containing all the feature columns'
feature_data_columns = []
for n in range(no_of_rxns_thres):
smarts = (('Rxn' + str((n + 1))) + '_SMARTS')
rxn_delta_g = (('Rxn' + str((n + 1))) + '_DeltaG')
rxn_rule_score = (('Rxn' + str((n + 1))) + '_Rule_Score')
feature_data_columns.extend([smarts, rxn_delta_g, rxn_rule_score])
feature_data_columns.extend(['Pathway_Delta_G', 'Pathway_Flux', 'Pathway_Score', 'Round1'])
feature_data = pd_DataFrame(columns=feature_data_columns, index=None)
return feature_data<|docstring|>Creates a template dataframe
containing all the feature columns<|endoftext|>
|
f9759a72c22247ad9db18771b7f36232bc35453e665d8f2ac5224b403723b628
|
def loop(i, temp, data):
'Returns the indices of all the reactions\n for a pathway in the dataset'
temp_list = []
break_index = None
flag = True
j = 1
for index in range(i, len(data)):
if ((temp == data.loc[(index, 'Pathway Name')]) and (data.loc[(index, 'Reaction')] == ('RP' + str(j)))):
j = (j + 1)
temp_list.append(index)
if ((index + 1) == len(data)):
flag = False
else:
break_index = index
break
return (temp_list, break_index, flag)
|
Returns the indices of all the reactions
for a pathway in the dataset
|
rptools/rpscore/rpScore.py
|
loop
|
brsynth/rpTools
| 4 |
python
|
def loop(i, temp, data):
'Returns the indices of all the reactions\n for a pathway in the dataset'
temp_list = []
break_index = None
flag = True
j = 1
for index in range(i, len(data)):
if ((temp == data.loc[(index, 'Pathway Name')]) and (data.loc[(index, 'Reaction')] == ('RP' + str(j)))):
j = (j + 1)
temp_list.append(index)
if ((index + 1) == len(data)):
flag = False
else:
break_index = index
break
return (temp_list, break_index, flag)
|
def loop(i, temp, data):
'Returns the indices of all the reactions\n for a pathway in the dataset'
temp_list = []
break_index = None
flag = True
j = 1
for index in range(i, len(data)):
if ((temp == data.loc[(index, 'Pathway Name')]) and (data.loc[(index, 'Reaction')] == ('RP' + str(j)))):
j = (j + 1)
temp_list.append(index)
if ((index + 1) == len(data)):
flag = False
else:
break_index = index
break
return (temp_list, break_index, flag)<|docstring|>Returns the indices of all the reactions
for a pathway in the dataset<|endoftext|>
|
fbcaebf9771e32f55f492abcf2441d02d8f98657de66770b07283c9717e4e30b
|
def pathways_index_list(data):
'Returns the indices of all the reactions\n for each of the pathways in dataset'
pathways_index_list = []
i = 0
flag = True
while flag:
temp = data.loc[(i, 'Pathway Name')]
(temp_list, i, flag) = loop(i, temp, data)
pathways_index_list.append(temp_list)
return pathways_index_list
|
Returns the indices of all the reactions
for each of the pathways in dataset
|
rptools/rpscore/rpScore.py
|
pathways_index_list
|
brsynth/rpTools
| 4 |
python
|
def pathways_index_list(data):
'Returns the indices of all the reactions\n for each of the pathways in dataset'
pathways_index_list = []
i = 0
flag = True
while flag:
temp = data.loc[(i, 'Pathway Name')]
(temp_list, i, flag) = loop(i, temp, data)
pathways_index_list.append(temp_list)
return pathways_index_list
|
def pathways_index_list(data):
'Returns the indices of all the reactions\n for each of the pathways in dataset'
pathways_index_list = []
i = 0
flag = True
while flag:
temp = data.loc[(i, 'Pathway Name')]
(temp_list, i, flag) = loop(i, temp, data)
pathways_index_list.append(temp_list)
return pathways_index_list<|docstring|>Returns the indices of all the reactions
for each of the pathways in dataset<|endoftext|>
|
f44cd1513abc7e87df0503e850073a778e78915ff8573b7b07d3c502725050bd
|
def transform_into_pathway_features(data, scores, flag, no_of_rxns_thres):
'Generates the dataframe containing\n all the features.\n data and scores ate the 2 inputs files\n The reactions are represented in SMILES'
df = feature_template_df(no_of_rxns_thres)
pathways_list = pathways_index_list(data)
drop_list = []
print('Transforming into pathway features...')
for (count_p, rxn_list) in tqdm(enumerate(pathways_list)):
if (len(rxn_list) > 10):
drop_list.append(count_p)
continue
for (n, index) in enumerate(rxn_list):
smarts = (('Rxn' + str((n + 1))) + '_SMARTS')
rxn_delta_g = (('Rxn' + str((n + 1))) + '_DeltaG')
rxn_rule_score = (('Rxn' + str((n + 1))) + '_Rule_Score')
df.loc[(count_p, smarts)] = data.loc[(index, 'Reaction Rule')]
df.loc[(count_p, rxn_delta_g)] = data.loc[(index, 'Normalised dfG_prime_m')]
df.loc[(count_p, rxn_rule_score)] = data.loc[(index, 'Rule Score')]
df.loc[(count_p, 'Pathway_Delta_G')] = scores.loc[(count_p, 'dfG_prime_m')]
df.loc[(count_p, 'Pathway_Flux')] = float(scores.loc[(count_p, 'FBA Flux')].split(';')[1])
df.loc[(count_p, 'Pathway_Score')] = scores.loc[(count_p, 'Global Score')]
df.loc[(count_p, 'Lit')] = scores.loc[(count_p, 'Lit')]
df.loc[(count_p, 'Round1')] = scores.loc[(count_p, 'Round1')]
df = df.drop(drop_list)
df = df.fillna(0)
if flag:
df = df[(~ (df.Round1 < 0))]
df['Round1'][(df['Round1'] > 0)] = 1
df['Round1_OR'] = df['Round1']
df = shuffle(df, random_state=42).reset_index(drop=True)
for row in range(len(df)):
if (df.loc[(row, 'Lit')] == 1):
df.loc[(row, 'Round1_OR')] = 1
else:
df['Round1_OR'] = df['Round1']
return df
|
Generates the dataframe containing
all the features.
data and scores ate the 2 inputs files
The reactions are represented in SMILES
|
rptools/rpscore/rpScore.py
|
transform_into_pathway_features
|
brsynth/rpTools
| 4 |
python
|
def transform_into_pathway_features(data, scores, flag, no_of_rxns_thres):
'Generates the dataframe containing\n all the features.\n data and scores ate the 2 inputs files\n The reactions are represented in SMILES'
df = feature_template_df(no_of_rxns_thres)
pathways_list = pathways_index_list(data)
drop_list = []
print('Transforming into pathway features...')
for (count_p, rxn_list) in tqdm(enumerate(pathways_list)):
if (len(rxn_list) > 10):
drop_list.append(count_p)
continue
for (n, index) in enumerate(rxn_list):
smarts = (('Rxn' + str((n + 1))) + '_SMARTS')
rxn_delta_g = (('Rxn' + str((n + 1))) + '_DeltaG')
rxn_rule_score = (('Rxn' + str((n + 1))) + '_Rule_Score')
df.loc[(count_p, smarts)] = data.loc[(index, 'Reaction Rule')]
df.loc[(count_p, rxn_delta_g)] = data.loc[(index, 'Normalised dfG_prime_m')]
df.loc[(count_p, rxn_rule_score)] = data.loc[(index, 'Rule Score')]
df.loc[(count_p, 'Pathway_Delta_G')] = scores.loc[(count_p, 'dfG_prime_m')]
df.loc[(count_p, 'Pathway_Flux')] = float(scores.loc[(count_p, 'FBA Flux')].split(';')[1])
df.loc[(count_p, 'Pathway_Score')] = scores.loc[(count_p, 'Global Score')]
df.loc[(count_p, 'Lit')] = scores.loc[(count_p, 'Lit')]
df.loc[(count_p, 'Round1')] = scores.loc[(count_p, 'Round1')]
df = df.drop(drop_list)
df = df.fillna(0)
if flag:
df = df[(~ (df.Round1 < 0))]
df['Round1'][(df['Round1'] > 0)] = 1
df['Round1_OR'] = df['Round1']
df = shuffle(df, random_state=42).reset_index(drop=True)
for row in range(len(df)):
if (df.loc[(row, 'Lit')] == 1):
df.loc[(row, 'Round1_OR')] = 1
else:
df['Round1_OR'] = df['Round1']
return df
|
def transform_into_pathway_features(data, scores, flag, no_of_rxns_thres):
'Generates the dataframe containing\n all the features.\n data and scores ate the 2 inputs files\n The reactions are represented in SMILES'
df = feature_template_df(no_of_rxns_thres)
pathways_list = pathways_index_list(data)
drop_list = []
print('Transforming into pathway features...')
for (count_p, rxn_list) in tqdm(enumerate(pathways_list)):
if (len(rxn_list) > 10):
drop_list.append(count_p)
continue
for (n, index) in enumerate(rxn_list):
smarts = (('Rxn' + str((n + 1))) + '_SMARTS')
rxn_delta_g = (('Rxn' + str((n + 1))) + '_DeltaG')
rxn_rule_score = (('Rxn' + str((n + 1))) + '_Rule_Score')
df.loc[(count_p, smarts)] = data.loc[(index, 'Reaction Rule')]
df.loc[(count_p, rxn_delta_g)] = data.loc[(index, 'Normalised dfG_prime_m')]
df.loc[(count_p, rxn_rule_score)] = data.loc[(index, 'Rule Score')]
df.loc[(count_p, 'Pathway_Delta_G')] = scores.loc[(count_p, 'dfG_prime_m')]
df.loc[(count_p, 'Pathway_Flux')] = float(scores.loc[(count_p, 'FBA Flux')].split(';')[1])
df.loc[(count_p, 'Pathway_Score')] = scores.loc[(count_p, 'Global Score')]
df.loc[(count_p, 'Lit')] = scores.loc[(count_p, 'Lit')]
df.loc[(count_p, 'Round1')] = scores.loc[(count_p, 'Round1')]
df = df.drop(drop_list)
df = df.fillna(0)
if flag:
df = df[(~ (df.Round1 < 0))]
df['Round1'][(df['Round1'] > 0)] = 1
df['Round1_OR'] = df['Round1']
df = shuffle(df, random_state=42).reset_index(drop=True)
for row in range(len(df)):
if (df.loc[(row, 'Lit')] == 1):
df.loc[(row, 'Round1_OR')] = 1
else:
df['Round1_OR'] = df['Round1']
return df<|docstring|>Generates the dataframe containing
all the features.
data and scores ate the 2 inputs files
The reactions are represented in SMILES<|endoftext|>
|
5e783be91aa23557e1639a57c543dfe9465ad9cd719c61446e3331813827e717
|
def features_encoding(df, flag):
'Creates a HDF5 file containing\n all the features\n Rnx features are encoded in fingerprints'
no_of_rxns = 10
fp_len = 4096
rxn_len = (fp_len + 2)
pathway_len = 3
y_len = 1
if (flag == 'train'):
sys_exit('Encoding feature for training data not available file data_train.h5 must be present in models folder')
print('Encodining features for the Test set......')
f = h5py_File(NamedTemporaryFile(delete=True), 'w')
number = (((rxn_len * no_of_rxns) + pathway_len) + y_len)
dset = f.create_dataset('data', (0, number), dtype='i2', maxshape=(None, number), compression='gzip')
for row in tqdm(range(len(df))):
pathway_rxns = np.array([]).reshape(0, (rxn_len * no_of_rxns))
rxns_list = []
for rxn_no_ in range(no_of_rxns):
rxn_smiles_index = (rxn_no_ * 3)
rxn_dg_index = (((rxn_no_ + 1) * 3) - 2)
rxn_rule_score_index = (((rxn_no_ + 1) * 3) - 1)
if (str(df.iloc[(row, rxn_smiles_index)]) != '0'):
rxn_smiles = df.iloc[(row, rxn_smiles_index)]
rxn_smiles_list = rxn_smiles.split('>>')
if (len(rxn_smiles_list) == 2):
sub_smiles = rxn_smiles_list[0]
sub_m = Chem.MolFromSmiles(sub_smiles)
sub_fp = AllChem.GetMorganFingerprintAsBitVect(sub_m, 2, nBits=2048)
sub_arr = np.array([])
DataStructs.ConvertToNumpyArray(sub_fp, sub_arr)
sub_fp = sub_arr.reshape(1, (- 1))
pro_smiles = rxn_smiles_list[1]
pro_m = Chem.MolFromSmiles(pro_smiles)
pro_fp = AllChem.GetMorganFingerprintAsBitVect(pro_m, 2, nBits=2048)
pro_arr = np.zeros((1,))
DataStructs.ConvertToNumpyArray(pro_fp, pro_arr)
pro_fp = pro_arr.reshape(1, (- 1))
rxn_fp = np.concatenate([sub_fp, pro_fp]).reshape(1, (- 1))
elif (len(rxn_smiles_list) < 2):
pro_smiles = rxn_smiles_list[0]
pro_m = Chem.MolFromSmiles(pro_smiles)
pro_fp = AllChem.GetMorganFingerprintAsBitVect(pro_m, 2, nBits=fp_len)
pro_arr = np.zeros((1,))
DataStructs.ConvertToNumpyArray(pro_fp, pro_arr)
rxn_fp = pro_arr.reshape(1, (- 1))
else:
print('There is a problem with the number of components in the reaction')
else:
rxn_fp = np.zeros(fp_len).reshape(1, (- 1))
rxn_dg = df.iloc[(row, rxn_dg_index)].reshape(1, (- 1))
rxn_rule_score = df.iloc[(row, rxn_rule_score_index)].reshape(1, (- 1))
rxns_list.extend([rxn_fp, rxn_dg, rxn_rule_score])
pathway_rxns = np.concatenate(rxns_list, axis=1).reshape(1, (- 1))
pathway_dg = df.loc[(row, 'Pathway_Delta_G')].reshape(1, (- 1))
pathway_flux = df.loc[(row, 'Pathway_Flux')].reshape(1, (- 1))
pathway_score = df.loc[(row, 'Pathway_Score')].reshape(1, (- 1))
pathway_y = df.loc[(row, 'Round1_OR')].reshape(1, (- 1))
feature = np.concatenate((pathway_rxns, pathway_dg, pathway_flux, pathway_score, pathway_y), axis=1)
dset.resize((dset.shape[0] + feature.shape[0]), axis=0)
dset[(- feature.shape[0]):] = feature
return dset
|
Creates a HDF5 file containing
all the features
Rnx features are encoded in fingerprints
|
rptools/rpscore/rpScore.py
|
features_encoding
|
brsynth/rpTools
| 4 |
python
|
def features_encoding(df, flag):
'Creates a HDF5 file containing\n all the features\n Rnx features are encoded in fingerprints'
no_of_rxns = 10
fp_len = 4096
rxn_len = (fp_len + 2)
pathway_len = 3
y_len = 1
if (flag == 'train'):
sys_exit('Encoding feature for training data not available file data_train.h5 must be present in models folder')
print('Encodining features for the Test set......')
f = h5py_File(NamedTemporaryFile(delete=True), 'w')
number = (((rxn_len * no_of_rxns) + pathway_len) + y_len)
dset = f.create_dataset('data', (0, number), dtype='i2', maxshape=(None, number), compression='gzip')
for row in tqdm(range(len(df))):
pathway_rxns = np.array([]).reshape(0, (rxn_len * no_of_rxns))
rxns_list = []
for rxn_no_ in range(no_of_rxns):
rxn_smiles_index = (rxn_no_ * 3)
rxn_dg_index = (((rxn_no_ + 1) * 3) - 2)
rxn_rule_score_index = (((rxn_no_ + 1) * 3) - 1)
if (str(df.iloc[(row, rxn_smiles_index)]) != '0'):
rxn_smiles = df.iloc[(row, rxn_smiles_index)]
rxn_smiles_list = rxn_smiles.split('>>')
if (len(rxn_smiles_list) == 2):
sub_smiles = rxn_smiles_list[0]
sub_m = Chem.MolFromSmiles(sub_smiles)
sub_fp = AllChem.GetMorganFingerprintAsBitVect(sub_m, 2, nBits=2048)
sub_arr = np.array([])
DataStructs.ConvertToNumpyArray(sub_fp, sub_arr)
sub_fp = sub_arr.reshape(1, (- 1))
pro_smiles = rxn_smiles_list[1]
pro_m = Chem.MolFromSmiles(pro_smiles)
pro_fp = AllChem.GetMorganFingerprintAsBitVect(pro_m, 2, nBits=2048)
pro_arr = np.zeros((1,))
DataStructs.ConvertToNumpyArray(pro_fp, pro_arr)
pro_fp = pro_arr.reshape(1, (- 1))
rxn_fp = np.concatenate([sub_fp, pro_fp]).reshape(1, (- 1))
elif (len(rxn_smiles_list) < 2):
pro_smiles = rxn_smiles_list[0]
pro_m = Chem.MolFromSmiles(pro_smiles)
pro_fp = AllChem.GetMorganFingerprintAsBitVect(pro_m, 2, nBits=fp_len)
pro_arr = np.zeros((1,))
DataStructs.ConvertToNumpyArray(pro_fp, pro_arr)
rxn_fp = pro_arr.reshape(1, (- 1))
else:
print('There is a problem with the number of components in the reaction')
else:
rxn_fp = np.zeros(fp_len).reshape(1, (- 1))
rxn_dg = df.iloc[(row, rxn_dg_index)].reshape(1, (- 1))
rxn_rule_score = df.iloc[(row, rxn_rule_score_index)].reshape(1, (- 1))
rxns_list.extend([rxn_fp, rxn_dg, rxn_rule_score])
pathway_rxns = np.concatenate(rxns_list, axis=1).reshape(1, (- 1))
pathway_dg = df.loc[(row, 'Pathway_Delta_G')].reshape(1, (- 1))
pathway_flux = df.loc[(row, 'Pathway_Flux')].reshape(1, (- 1))
pathway_score = df.loc[(row, 'Pathway_Score')].reshape(1, (- 1))
pathway_y = df.loc[(row, 'Round1_OR')].reshape(1, (- 1))
feature = np.concatenate((pathway_rxns, pathway_dg, pathway_flux, pathway_score, pathway_y), axis=1)
dset.resize((dset.shape[0] + feature.shape[0]), axis=0)
dset[(- feature.shape[0]):] = feature
return dset
|
def features_encoding(df, flag):
'Creates a HDF5 file containing\n all the features\n Rnx features are encoded in fingerprints'
no_of_rxns = 10
fp_len = 4096
rxn_len = (fp_len + 2)
pathway_len = 3
y_len = 1
if (flag == 'train'):
sys_exit('Encoding feature for training data not available file data_train.h5 must be present in models folder')
print('Encodining features for the Test set......')
f = h5py_File(NamedTemporaryFile(delete=True), 'w')
number = (((rxn_len * no_of_rxns) + pathway_len) + y_len)
dset = f.create_dataset('data', (0, number), dtype='i2', maxshape=(None, number), compression='gzip')
for row in tqdm(range(len(df))):
pathway_rxns = np.array([]).reshape(0, (rxn_len * no_of_rxns))
rxns_list = []
for rxn_no_ in range(no_of_rxns):
rxn_smiles_index = (rxn_no_ * 3)
rxn_dg_index = (((rxn_no_ + 1) * 3) - 2)
rxn_rule_score_index = (((rxn_no_ + 1) * 3) - 1)
if (str(df.iloc[(row, rxn_smiles_index)]) != '0'):
rxn_smiles = df.iloc[(row, rxn_smiles_index)]
rxn_smiles_list = rxn_smiles.split('>>')
if (len(rxn_smiles_list) == 2):
sub_smiles = rxn_smiles_list[0]
sub_m = Chem.MolFromSmiles(sub_smiles)
sub_fp = AllChem.GetMorganFingerprintAsBitVect(sub_m, 2, nBits=2048)
sub_arr = np.array([])
DataStructs.ConvertToNumpyArray(sub_fp, sub_arr)
sub_fp = sub_arr.reshape(1, (- 1))
pro_smiles = rxn_smiles_list[1]
pro_m = Chem.MolFromSmiles(pro_smiles)
pro_fp = AllChem.GetMorganFingerprintAsBitVect(pro_m, 2, nBits=2048)
pro_arr = np.zeros((1,))
DataStructs.ConvertToNumpyArray(pro_fp, pro_arr)
pro_fp = pro_arr.reshape(1, (- 1))
rxn_fp = np.concatenate([sub_fp, pro_fp]).reshape(1, (- 1))
elif (len(rxn_smiles_list) < 2):
pro_smiles = rxn_smiles_list[0]
pro_m = Chem.MolFromSmiles(pro_smiles)
pro_fp = AllChem.GetMorganFingerprintAsBitVect(pro_m, 2, nBits=fp_len)
pro_arr = np.zeros((1,))
DataStructs.ConvertToNumpyArray(pro_fp, pro_arr)
rxn_fp = pro_arr.reshape(1, (- 1))
else:
print('There is a problem with the number of components in the reaction')
else:
rxn_fp = np.zeros(fp_len).reshape(1, (- 1))
rxn_dg = df.iloc[(row, rxn_dg_index)].reshape(1, (- 1))
rxn_rule_score = df.iloc[(row, rxn_rule_score_index)].reshape(1, (- 1))
rxns_list.extend([rxn_fp, rxn_dg, rxn_rule_score])
pathway_rxns = np.concatenate(rxns_list, axis=1).reshape(1, (- 1))
pathway_dg = df.loc[(row, 'Pathway_Delta_G')].reshape(1, (- 1))
pathway_flux = df.loc[(row, 'Pathway_Flux')].reshape(1, (- 1))
pathway_score = df.loc[(row, 'Pathway_Score')].reshape(1, (- 1))
pathway_y = df.loc[(row, 'Round1_OR')].reshape(1, (- 1))
feature = np.concatenate((pathway_rxns, pathway_dg, pathway_flux, pathway_score, pathway_y), axis=1)
dset.resize((dset.shape[0] + feature.shape[0]), axis=0)
dset[(- feature.shape[0]):] = feature
return dset<|docstring|>Creates a HDF5 file containing
all the features
Rnx features are encoded in fingerprints<|endoftext|>
|
62461e38a6ff41a7e5f41d950ec670cddb95ae3166eecd309a5492fa05aed3e5
|
def transform_to_matrix(dset, model_file):
"'Transforms the prediction dataset into\n an appropriate matrix which is suitable for\n XGBoost"
X_test = dset[(:, :(- 1))]
Y_test = dset[(:, (- 1))]
if (not os_path.exists(model_file)):
sys_exit(f'{model_file} not found')
else:
trained_model = pickle_load(open(model_file, 'rb'))
dset_matrix = DMatrix(X_test, label=Y_test)
trained_model_score = trained_model.predict(dset_matrix)
trained_model_score_1 = trained_model_score[(:, 1)].reshape((- 1), 1)
X_test = np.concatenate((X_test, trained_model_score_1), axis=1)
dset_matrix = DMatrix(X_test)
return dset_matrix
|
'Transforms the prediction dataset into
an appropriate matrix which is suitable for
XGBoost
|
rptools/rpscore/rpScore.py
|
transform_to_matrix
|
brsynth/rpTools
| 4 |
python
|
def transform_to_matrix(dset, model_file):
"'Transforms the prediction dataset into\n an appropriate matrix which is suitable for\n XGBoost"
X_test = dset[(:, :(- 1))]
Y_test = dset[(:, (- 1))]
if (not os_path.exists(model_file)):
sys_exit(f'{model_file} not found')
else:
trained_model = pickle_load(open(model_file, 'rb'))
dset_matrix = DMatrix(X_test, label=Y_test)
trained_model_score = trained_model.predict(dset_matrix)
trained_model_score_1 = trained_model_score[(:, 1)].reshape((- 1), 1)
X_test = np.concatenate((X_test, trained_model_score_1), axis=1)
dset_matrix = DMatrix(X_test)
return dset_matrix
|
def transform_to_matrix(dset, model_file):
"'Transforms the prediction dataset into\n an appropriate matrix which is suitable for\n XGBoost"
X_test = dset[(:, :(- 1))]
Y_test = dset[(:, (- 1))]
if (not os_path.exists(model_file)):
sys_exit(f'{model_file} not found')
else:
trained_model = pickle_load(open(model_file, 'rb'))
dset_matrix = DMatrix(X_test, label=Y_test)
trained_model_score = trained_model.predict(dset_matrix)
trained_model_score_1 = trained_model_score[(:, 1)].reshape((- 1), 1)
X_test = np.concatenate((X_test, trained_model_score_1), axis=1)
dset_matrix = DMatrix(X_test)
return dset_matrix<|docstring|>'Transforms the prediction dataset into
an appropriate matrix which is suitable for
XGBoost<|endoftext|>
|
a76cded193f75f48b51490bd006db2026fa8affdff13f8c49cb6e5983205c33f
|
def log_entrance_exit(func):
'\n Decorator to log the entrance and exit of method calls.\n\n Args:\n func: The function to be wrapped.\n\n Returns:\n Wrapped function.\n\n '
@functools.wraps(func)
def wrapper(*args, **kwargs):
logger.info('Entering %s', func.__name__)
t0 = timer()
try:
f_result = func(*args, **kwargs)
except Exception:
logger.exception('Unhandled exception occurred.')
raise
logger.info('Exiting %s [%s]', func.__name__, '{0:.4f}s'.format((timer() - t0)))
return f_result
return wrapper
|
Decorator to log the entrance and exit of method calls.
Args:
func: The function to be wrapped.
Returns:
Wrapped function.
|
helios/utilities/logging_utils.py
|
log_entrance_exit
|
harris-helios/helios-sdk-python
| 3 |
python
|
def log_entrance_exit(func):
'\n Decorator to log the entrance and exit of method calls.\n\n Args:\n func: The function to be wrapped.\n\n Returns:\n Wrapped function.\n\n '
@functools.wraps(func)
def wrapper(*args, **kwargs):
logger.info('Entering %s', func.__name__)
t0 = timer()
try:
f_result = func(*args, **kwargs)
except Exception:
logger.exception('Unhandled exception occurred.')
raise
logger.info('Exiting %s [%s]', func.__name__, '{0:.4f}s'.format((timer() - t0)))
return f_result
return wrapper
|
def log_entrance_exit(func):
'\n Decorator to log the entrance and exit of method calls.\n\n Args:\n func: The function to be wrapped.\n\n Returns:\n Wrapped function.\n\n '
@functools.wraps(func)
def wrapper(*args, **kwargs):
logger.info('Entering %s', func.__name__)
t0 = timer()
try:
f_result = func(*args, **kwargs)
except Exception:
logger.exception('Unhandled exception occurred.')
raise
logger.info('Exiting %s [%s]', func.__name__, '{0:.4f}s'.format((timer() - t0)))
return f_result
return wrapper<|docstring|>Decorator to log the entrance and exit of method calls.
Args:
func: The function to be wrapped.
Returns:
Wrapped function.<|endoftext|>
|
f14f5150508151543cadf42963327329ef83e73e44fce59731859e5e069e4606
|
@property
def BgpEvpnJoinSynchIgmp(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchigmp_f89f38fca85b1442229391afe5b95e76.BgpEvpnJoinSynchIgmp): An instance of the BgpEvpnJoinSynchIgmp class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchigmp_f89f38fca85b1442229391afe5b95e76 import BgpEvpnJoinSynchIgmp
if (self._properties.get('BgpEvpnJoinSynchIgmp', None) is not None):
return self._properties.get('BgpEvpnJoinSynchIgmp')
else:
return BgpEvpnJoinSynchIgmp(self)._select()
|
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchigmp_f89f38fca85b1442229391afe5b95e76.BgpEvpnJoinSynchIgmp): An instance of the BgpEvpnJoinSynchIgmp class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
BgpEvpnJoinSynchIgmp
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def BgpEvpnJoinSynchIgmp(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchigmp_f89f38fca85b1442229391afe5b95e76.BgpEvpnJoinSynchIgmp): An instance of the BgpEvpnJoinSynchIgmp class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchigmp_f89f38fca85b1442229391afe5b95e76 import BgpEvpnJoinSynchIgmp
if (self._properties.get('BgpEvpnJoinSynchIgmp', None) is not None):
return self._properties.get('BgpEvpnJoinSynchIgmp')
else:
return BgpEvpnJoinSynchIgmp(self)._select()
|
@property
def BgpEvpnJoinSynchIgmp(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchigmp_f89f38fca85b1442229391afe5b95e76.BgpEvpnJoinSynchIgmp): An instance of the BgpEvpnJoinSynchIgmp class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchigmp_f89f38fca85b1442229391afe5b95e76 import BgpEvpnJoinSynchIgmp
if (self._properties.get('BgpEvpnJoinSynchIgmp', None) is not None):
return self._properties.get('BgpEvpnJoinSynchIgmp')
else:
return BgpEvpnJoinSynchIgmp(self)._select()<|docstring|>Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchigmp_f89f38fca85b1442229391afe5b95e76.BgpEvpnJoinSynchIgmp): An instance of the BgpEvpnJoinSynchIgmp class
Raises
------
- ServerError: The server has encountered an uncategorized error condition<|endoftext|>
|
05746c732b6cde40884317f59fd23ff7bbeee6bfe9a7235050aad0aa680dd6b6
|
@property
def BgpEvpnJoinSynchMld(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchmld_4f5a831aa8e923cbdbff69a4f078837d.BgpEvpnJoinSynchMld): An instance of the BgpEvpnJoinSynchMld class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchmld_4f5a831aa8e923cbdbff69a4f078837d import BgpEvpnJoinSynchMld
if (self._properties.get('BgpEvpnJoinSynchMld', None) is not None):
return self._properties.get('BgpEvpnJoinSynchMld')
else:
return BgpEvpnJoinSynchMld(self)._select()
|
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchmld_4f5a831aa8e923cbdbff69a4f078837d.BgpEvpnJoinSynchMld): An instance of the BgpEvpnJoinSynchMld class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
BgpEvpnJoinSynchMld
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def BgpEvpnJoinSynchMld(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchmld_4f5a831aa8e923cbdbff69a4f078837d.BgpEvpnJoinSynchMld): An instance of the BgpEvpnJoinSynchMld class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchmld_4f5a831aa8e923cbdbff69a4f078837d import BgpEvpnJoinSynchMld
if (self._properties.get('BgpEvpnJoinSynchMld', None) is not None):
return self._properties.get('BgpEvpnJoinSynchMld')
else:
return BgpEvpnJoinSynchMld(self)._select()
|
@property
def BgpEvpnJoinSynchMld(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchmld_4f5a831aa8e923cbdbff69a4f078837d.BgpEvpnJoinSynchMld): An instance of the BgpEvpnJoinSynchMld class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchmld_4f5a831aa8e923cbdbff69a4f078837d import BgpEvpnJoinSynchMld
if (self._properties.get('BgpEvpnJoinSynchMld', None) is not None):
return self._properties.get('BgpEvpnJoinSynchMld')
else:
return BgpEvpnJoinSynchMld(self)._select()<|docstring|>Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnjoinsynchmld_4f5a831aa8e923cbdbff69a4f078837d.BgpEvpnJoinSynchMld): An instance of the BgpEvpnJoinSynchMld class
Raises
------
- ServerError: The server has encountered an uncategorized error condition<|endoftext|>
|
d43ec4f2fffd99f8e6ff1503f5d5c89a34f66e0487f9d813378c9a335855b14b
|
@property
def BgpEvpnLeaveSynchIgmp(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchigmp_411f258090ec14c0d716cabc5159977e.BgpEvpnLeaveSynchIgmp): An instance of the BgpEvpnLeaveSynchIgmp class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchigmp_411f258090ec14c0d716cabc5159977e import BgpEvpnLeaveSynchIgmp
if (self._properties.get('BgpEvpnLeaveSynchIgmp', None) is not None):
return self._properties.get('BgpEvpnLeaveSynchIgmp')
else:
return BgpEvpnLeaveSynchIgmp(self)._select()
|
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchigmp_411f258090ec14c0d716cabc5159977e.BgpEvpnLeaveSynchIgmp): An instance of the BgpEvpnLeaveSynchIgmp class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
BgpEvpnLeaveSynchIgmp
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def BgpEvpnLeaveSynchIgmp(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchigmp_411f258090ec14c0d716cabc5159977e.BgpEvpnLeaveSynchIgmp): An instance of the BgpEvpnLeaveSynchIgmp class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchigmp_411f258090ec14c0d716cabc5159977e import BgpEvpnLeaveSynchIgmp
if (self._properties.get('BgpEvpnLeaveSynchIgmp', None) is not None):
return self._properties.get('BgpEvpnLeaveSynchIgmp')
else:
return BgpEvpnLeaveSynchIgmp(self)._select()
|
@property
def BgpEvpnLeaveSynchIgmp(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchigmp_411f258090ec14c0d716cabc5159977e.BgpEvpnLeaveSynchIgmp): An instance of the BgpEvpnLeaveSynchIgmp class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchigmp_411f258090ec14c0d716cabc5159977e import BgpEvpnLeaveSynchIgmp
if (self._properties.get('BgpEvpnLeaveSynchIgmp', None) is not None):
return self._properties.get('BgpEvpnLeaveSynchIgmp')
else:
return BgpEvpnLeaveSynchIgmp(self)._select()<|docstring|>Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchigmp_411f258090ec14c0d716cabc5159977e.BgpEvpnLeaveSynchIgmp): An instance of the BgpEvpnLeaveSynchIgmp class
Raises
------
- ServerError: The server has encountered an uncategorized error condition<|endoftext|>
|
62bc6b0a7e41d09f163ddf52c00324b931db761bcabdf22007a022e23c0d1459
|
@property
def BgpEvpnLeaveSynchMld(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchmld_226fbb8fe75f87a6460aecae872f059a.BgpEvpnLeaveSynchMld): An instance of the BgpEvpnLeaveSynchMld class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchmld_226fbb8fe75f87a6460aecae872f059a import BgpEvpnLeaveSynchMld
if (self._properties.get('BgpEvpnLeaveSynchMld', None) is not None):
return self._properties.get('BgpEvpnLeaveSynchMld')
else:
return BgpEvpnLeaveSynchMld(self)._select()
|
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchmld_226fbb8fe75f87a6460aecae872f059a.BgpEvpnLeaveSynchMld): An instance of the BgpEvpnLeaveSynchMld class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
BgpEvpnLeaveSynchMld
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def BgpEvpnLeaveSynchMld(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchmld_226fbb8fe75f87a6460aecae872f059a.BgpEvpnLeaveSynchMld): An instance of the BgpEvpnLeaveSynchMld class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchmld_226fbb8fe75f87a6460aecae872f059a import BgpEvpnLeaveSynchMld
if (self._properties.get('BgpEvpnLeaveSynchMld', None) is not None):
return self._properties.get('BgpEvpnLeaveSynchMld')
else:
return BgpEvpnLeaveSynchMld(self)._select()
|
@property
def BgpEvpnLeaveSynchMld(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchmld_226fbb8fe75f87a6460aecae872f059a.BgpEvpnLeaveSynchMld): An instance of the BgpEvpnLeaveSynchMld class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchmld_226fbb8fe75f87a6460aecae872f059a import BgpEvpnLeaveSynchMld
if (self._properties.get('BgpEvpnLeaveSynchMld', None) is not None):
return self._properties.get('BgpEvpnLeaveSynchMld')
else:
return BgpEvpnLeaveSynchMld(self)._select()<|docstring|>Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnleavesynchmld_226fbb8fe75f87a6460aecae872f059a.BgpEvpnLeaveSynchMld): An instance of the BgpEvpnLeaveSynchMld class
Raises
------
- ServerError: The server has encountered an uncategorized error condition<|endoftext|>
|
d0e73b24f58316f44f530687fb953e2521d3f2954fb6055f168d659793c7c456
|
@property
def BgpEvpnSmetIgmp(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetigmp_68fa5fa63ce581945025c1253038bccb.BgpEvpnSmetIgmp): An instance of the BgpEvpnSmetIgmp class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetigmp_68fa5fa63ce581945025c1253038bccb import BgpEvpnSmetIgmp
if (self._properties.get('BgpEvpnSmetIgmp', None) is not None):
return self._properties.get('BgpEvpnSmetIgmp')
else:
return BgpEvpnSmetIgmp(self)._select()
|
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetigmp_68fa5fa63ce581945025c1253038bccb.BgpEvpnSmetIgmp): An instance of the BgpEvpnSmetIgmp class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
BgpEvpnSmetIgmp
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def BgpEvpnSmetIgmp(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetigmp_68fa5fa63ce581945025c1253038bccb.BgpEvpnSmetIgmp): An instance of the BgpEvpnSmetIgmp class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetigmp_68fa5fa63ce581945025c1253038bccb import BgpEvpnSmetIgmp
if (self._properties.get('BgpEvpnSmetIgmp', None) is not None):
return self._properties.get('BgpEvpnSmetIgmp')
else:
return BgpEvpnSmetIgmp(self)._select()
|
@property
def BgpEvpnSmetIgmp(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetigmp_68fa5fa63ce581945025c1253038bccb.BgpEvpnSmetIgmp): An instance of the BgpEvpnSmetIgmp class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetigmp_68fa5fa63ce581945025c1253038bccb import BgpEvpnSmetIgmp
if (self._properties.get('BgpEvpnSmetIgmp', None) is not None):
return self._properties.get('BgpEvpnSmetIgmp')
else:
return BgpEvpnSmetIgmp(self)._select()<|docstring|>Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetigmp_68fa5fa63ce581945025c1253038bccb.BgpEvpnSmetIgmp): An instance of the BgpEvpnSmetIgmp class
Raises
------
- ServerError: The server has encountered an uncategorized error condition<|endoftext|>
|
f757abc68ee1b2bea661e37e0fc13e76e89c41dff215d1b64d3808924b968d41
|
@property
def BgpEvpnSmetMld(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetmld_8d81cf97f583ad4547c03c6110c5168a.BgpEvpnSmetMld): An instance of the BgpEvpnSmetMld class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetmld_8d81cf97f583ad4547c03c6110c5168a import BgpEvpnSmetMld
if (self._properties.get('BgpEvpnSmetMld', None) is not None):
return self._properties.get('BgpEvpnSmetMld')
else:
return BgpEvpnSmetMld(self)._select()
|
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetmld_8d81cf97f583ad4547c03c6110c5168a.BgpEvpnSmetMld): An instance of the BgpEvpnSmetMld class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
BgpEvpnSmetMld
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def BgpEvpnSmetMld(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetmld_8d81cf97f583ad4547c03c6110c5168a.BgpEvpnSmetMld): An instance of the BgpEvpnSmetMld class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetmld_8d81cf97f583ad4547c03c6110c5168a import BgpEvpnSmetMld
if (self._properties.get('BgpEvpnSmetMld', None) is not None):
return self._properties.get('BgpEvpnSmetMld')
else:
return BgpEvpnSmetMld(self)._select()
|
@property
def BgpEvpnSmetMld(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetmld_8d81cf97f583ad4547c03c6110c5168a.BgpEvpnSmetMld): An instance of the BgpEvpnSmetMld class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetmld_8d81cf97f583ad4547c03c6110c5168a import BgpEvpnSmetMld
if (self._properties.get('BgpEvpnSmetMld', None) is not None):
return self._properties.get('BgpEvpnSmetMld')
else:
return BgpEvpnSmetMld(self)._select()<|docstring|>Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.bgpevpnsmetmld_8d81cf97f583ad4547c03c6110c5168a.BgpEvpnSmetMld): An instance of the BgpEvpnSmetMld class
Raises
------
- ServerError: The server has encountered an uncategorized error condition<|endoftext|>
|
1bbef29c3b6d51d665284224202ab8559f679173129132becb1a91f30b75a1f3
|
@property
def PnTLVList(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.pntlvlist_f29efa99695d122f75b5efd68698cd57.PnTLVList): An instance of the PnTLVList class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.pntlvlist_f29efa99695d122f75b5efd68698cd57 import PnTLVList
if (self._properties.get('PnTLVList', None) is not None):
return self._properties.get('PnTLVList')
else:
return PnTLVList(self)
|
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.pntlvlist_f29efa99695d122f75b5efd68698cd57.PnTLVList): An instance of the PnTLVList class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
PnTLVList
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def PnTLVList(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.pntlvlist_f29efa99695d122f75b5efd68698cd57.PnTLVList): An instance of the PnTLVList class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.pntlvlist_f29efa99695d122f75b5efd68698cd57 import PnTLVList
if (self._properties.get('PnTLVList', None) is not None):
return self._properties.get('PnTLVList')
else:
return PnTLVList(self)
|
@property
def PnTLVList(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.pntlvlist_f29efa99695d122f75b5efd68698cd57.PnTLVList): An instance of the PnTLVList class\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.pntlvlist_f29efa99695d122f75b5efd68698cd57 import PnTLVList
if (self._properties.get('PnTLVList', None) is not None):
return self._properties.get('PnTLVList')
else:
return PnTLVList(self)<|docstring|>Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.pntlvlist_f29efa99695d122f75b5efd68698cd57.PnTLVList): An instance of the PnTLVList class
Raises
------
- ServerError: The server has encountered an uncategorized error condition<|endoftext|>
|
d85ced5ab7c85250a140f0b4979ab964269eba02450abae5203a254082cda050
|
@property
def Active(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): Activate/Deactivate Configuration.\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active']))
|
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Activate/Deactivate Configuration.
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
Active
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def Active(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): Activate/Deactivate Configuration.\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active']))
|
@property
def Active(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): Activate/Deactivate Configuration.\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active']))<|docstring|>Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Activate/Deactivate Configuration.<|endoftext|>
|
948509925c0d2d8b2999abf271c815ec2dfc487cd3a5cccd07dc78ad61b285b7
|
@property
def AdRouteLabel(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): AD Route Label\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AdRouteLabel']))
|
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): AD Route Label
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
AdRouteLabel
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def AdRouteLabel(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): AD Route Label\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AdRouteLabel']))
|
@property
def AdRouteLabel(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): AD Route Label\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AdRouteLabel']))<|docstring|>Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): AD Route Label<|endoftext|>
|
00f4e6f5c40e77357230924b21b9c31264b210da6daa62c22bfee9565a16201f
|
@property
def AsNumber2Bytes(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): AS 2-Bytes\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AsNumber2Bytes']))
|
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): AS 2-Bytes
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
AsNumber2Bytes
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def AsNumber2Bytes(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): AS 2-Bytes\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AsNumber2Bytes']))
|
@property
def AsNumber2Bytes(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): AS 2-Bytes\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AsNumber2Bytes']))<|docstring|>Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): AS 2-Bytes<|endoftext|>
|
bc890691b0d77455ef58d7eea42a0cc019dcc5c8f4809408303c7b6a31a1a797
|
@property
def AsNumber4Bytes(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): AS 4-Bytes\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AsNumber4Bytes']))
|
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): AS 4-Bytes
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
AsNumber4Bytes
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def AsNumber4Bytes(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): AS 4-Bytes\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AsNumber4Bytes']))
|
@property
def AsNumber4Bytes(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): AS 4-Bytes\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AsNumber4Bytes']))<|docstring|>Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): AS 4-Bytes<|endoftext|>
|
f303ae99832c8ebf588b375be279679fb68d85cacc6b99d4e16394b92b1be4ff
|
@property
def BVlanId(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN ID\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BVlanId']))
|
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN ID
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
BVlanId
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def BVlanId(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN ID\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BVlanId']))
|
@property
def BVlanId(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN ID\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BVlanId']))<|docstring|>Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN ID<|endoftext|>
|
c0161e16efe7391776cc06b96ec427e39cf7ab1bec9ebb9011e9779618bd08f0
|
@property
def BVlanPriority(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN Priority\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BVlanPriority']))
|
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN Priority
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/broadcastdomainv4_ada8e5062c0947bde8a3de0fc7b9d534.py
|
BVlanPriority
|
OpenIxia/ixnetwork_restpy
| 20 |
python
|
@property
def BVlanPriority(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN Priority\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BVlanPriority']))
|
@property
def BVlanPriority(self):
'\n Returns\n -------\n - obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN Priority\n '
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BVlanPriority']))<|docstring|>Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): B VLAN Priority<|endoftext|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.