body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
7b3b156b65ea9e2a096ef8fbc33275f8a80977cfa0323b7b098f1525ea56e03e
@property def title(self): 'Obtain the title of the Relat' return self.story.title
Obtain the title of the Relat
relaty/relat.py
title
PaPablo/relaty
0
python
@property def title(self): return self.story.title
@property def title(self): return self.story.title<|docstring|>Obtain the title of the Relat<|endoftext|>
61f008f0e7e284c8897781d197f3610923a240a5470b807e96622b2271d15cb7
@property def screens(self): 'Access the initial screens of the Relat' return self.story.screens
Access the initial screens of the Relat
relaty/relat.py
screens
PaPablo/relaty
0
python
@property def screens(self): return self.story.screens
@property def screens(self): return self.story.screens<|docstring|>Access the initial screens of the Relat<|endoftext|>
9e3e796b0edd68765413f3b4bda387a238fe4773a9e064e4da969ea32dd2fb0d
@property def options(self): 'Access the root-level options of the Relat' return self.story.options
Access the root-level options of the Relat
relaty/relat.py
options
PaPablo/relaty
0
python
@property def options(self): return self.story.options
@property def options(self): return self.story.options<|docstring|>Access the root-level options of the Relat<|endoftext|>
831909f851d1b315835f0d6b7953774bf7d4aabdd82dc5a290f6558f8638b595
def add_screen(self, screen: str): 'Adds a screen at the end of the initial screens' self.story.add_screen(screen)
Adds a screen at the end of the initial screens
relaty/relat.py
add_screen
PaPablo/relaty
0
python
def add_screen(self, screen: str): self.story.add_screen(screen)
def add_screen(self, screen: str): self.story.add_screen(screen)<|docstring|>Adds a screen at the end of the initial screens<|endoftext|>
a4b2c7f0a5ae118945db0c1eaa0a4f74a1c88611ccec59ce96a3ac41fdf9e673
def add_option(self, option: Story): 'Adds and option to the root-level options of the Relat' self.story.add_option(option)
Adds and option to the root-level options of the Relat
relaty/relat.py
add_option
PaPablo/relaty
0
python
def add_option(self, option: Story): self.story.add_option(option)
def add_option(self, option: Story): self.story.add_option(option)<|docstring|>Adds and option to the root-level options of the Relat<|endoftext|>
6c1ad624ff806bdd60f3246a9f02e8daa071a3f7f95dcb2d2cf4dcac1399c64d
def navigate(self, path): 'Returns the story at the path' return self.story.navigate(path)
Returns the story at the path
relaty/relat.py
navigate
PaPablo/relaty
0
python
def navigate(self, path): return self.story.navigate(path)
def navigate(self, path): return self.story.navigate(path)<|docstring|>Returns the story at the path<|endoftext|>
c641290b1e028a60b37075fbc8aa6d09947b7d941ac474b87a701d4d1b71d8c8
def play(self): 'Play the story in cli mode\n\n When this method is invoked, the story can be played in the terminal.\n It shows every screen and ask for confirmation to continue.\n\n Then displays each options and asks the user to choose one\n\n ' self.story.play()
Play the story in cli mode When this method is invoked, the story can be played in the terminal. It shows every screen and ask for confirmation to continue. Then displays each options and asks the user to choose one
relaty/relat.py
play
PaPablo/relaty
0
python
def play(self): 'Play the story in cli mode\n\n When this method is invoked, the story can be played in the terminal.\n It shows every screen and ask for confirmation to continue.\n\n Then displays each options and asks the user to choose one\n\n ' self.story.play()
def play(self): 'Play the story in cli mode\n\n When this method is invoked, the story can be played in the terminal.\n It shows every screen and ask for confirmation to continue.\n\n Then displays each options and asks the user to choose one\n\n ' self.story.play()<|docstring|>Play the story in cli mode When this method is invoked, the story can be played in the terminal. It shows every screen and ask for confirmation to continue. Then displays each options and asks the user to choose one<|endoftext|>
ece596ba1c7b99b0c62774c28793226bf9151b81a97a15a90852681e2c91fbd4
@abstractmethod def resolve_target(self, node_task, target, results_dir, node_paths): 'Resolve a NodePackage target.'
Resolve a NodePackage target.
contrib/node/src/python/pants/contrib/node/subsystems/resolvers/node_resolver_base.py
resolve_target
mosesn/pants
0
python
@abstractmethod def resolve_target(self, node_task, target, results_dir, node_paths):
@abstractmethod def resolve_target(self, node_task, target, results_dir, node_paths): <|docstring|>Resolve a NodePackage target.<|endoftext|>
94eef45eea416e61e589e1a1dbb7a38d7ca8f4e7bf723a5b87ed435feaca4a73
def _copy_sources(self, target, results_dir): 'Copy sources from a target to a results directory.\n\n :param NodePackage target: A subclass of NodePackage\n :param string results_dir: The results directory\n ' buildroot = get_buildroot() source_relative_to = target.address.spec_path for source in target.sources_relative_to_buildroot(): dest = os.path.join(results_dir, os.path.relpath(source, source_relative_to)) safe_mkdir(os.path.dirname(dest)) shutil.copyfile(os.path.join(buildroot, source), dest)
Copy sources from a target to a results directory. :param NodePackage target: A subclass of NodePackage :param string results_dir: The results directory
contrib/node/src/python/pants/contrib/node/subsystems/resolvers/node_resolver_base.py
_copy_sources
mosesn/pants
0
python
def _copy_sources(self, target, results_dir): 'Copy sources from a target to a results directory.\n\n :param NodePackage target: A subclass of NodePackage\n :param string results_dir: The results directory\n ' buildroot = get_buildroot() source_relative_to = target.address.spec_path for source in target.sources_relative_to_buildroot(): dest = os.path.join(results_dir, os.path.relpath(source, source_relative_to)) safe_mkdir(os.path.dirname(dest)) shutil.copyfile(os.path.join(buildroot, source), dest)
def _copy_sources(self, target, results_dir): 'Copy sources from a target to a results directory.\n\n :param NodePackage target: A subclass of NodePackage\n :param string results_dir: The results directory\n ' buildroot = get_buildroot() source_relative_to = target.address.spec_path for source in target.sources_relative_to_buildroot(): dest = os.path.join(results_dir, os.path.relpath(source, source_relative_to)) safe_mkdir(os.path.dirname(dest)) shutil.copyfile(os.path.join(buildroot, source), dest)<|docstring|>Copy sources from a target to a results directory. :param NodePackage target: A subclass of NodePackage :param string results_dir: The results directory<|endoftext|>
f84b4d7ad5d34be62d157fe854bb5b6d7242298c11f1d5c54ab873267d257096
def mean_square_error(y_true: np.ndarray, y_pred: np.ndarray) -> float: '\n Calculate MSE loss\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n\n Returns\n -------\n MSE of given predictions\n ' raise NotImplementedError()
Calculate MSE loss Parameters ---------- y_true: ndarray of shape (n_samples, ) True response values y_pred: ndarray of shape (n_samples, ) Predicted response values Returns ------- MSE of given predictions
IMLearn/metrics/loss_functions.py
mean_square_error
tehilaogen/IML.HUJI
2
python
def mean_square_error(y_true: np.ndarray, y_pred: np.ndarray) -> float: '\n Calculate MSE loss\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n\n Returns\n -------\n MSE of given predictions\n ' raise NotImplementedError()
def mean_square_error(y_true: np.ndarray, y_pred: np.ndarray) -> float: '\n Calculate MSE loss\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n\n Returns\n -------\n MSE of given predictions\n ' raise NotImplementedError()<|docstring|>Calculate MSE loss Parameters ---------- y_true: ndarray of shape (n_samples, ) True response values y_pred: ndarray of shape (n_samples, ) Predicted response values Returns ------- MSE of given predictions<|endoftext|>
a23e1e56b2c8d5b2977464e228867ad24e498a70272bf0ad10d84a7cd2a27919
def misclassification_error(y_true: np.ndarray, y_pred: np.ndarray, normalize: bool=True) -> float: '\n Calculate misclassification loss\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n normalize: bool, default = True\n Normalize by number of samples or not\n\n Returns\n -------\n Misclassification of given predictions\n ' raise NotImplementedError()
Calculate misclassification loss Parameters ---------- y_true: ndarray of shape (n_samples, ) True response values y_pred: ndarray of shape (n_samples, ) Predicted response values normalize: bool, default = True Normalize by number of samples or not Returns ------- Misclassification of given predictions
IMLearn/metrics/loss_functions.py
misclassification_error
tehilaogen/IML.HUJI
2
python
def misclassification_error(y_true: np.ndarray, y_pred: np.ndarray, normalize: bool=True) -> float: '\n Calculate misclassification loss\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n normalize: bool, default = True\n Normalize by number of samples or not\n\n Returns\n -------\n Misclassification of given predictions\n ' raise NotImplementedError()
def misclassification_error(y_true: np.ndarray, y_pred: np.ndarray, normalize: bool=True) -> float: '\n Calculate misclassification loss\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n normalize: bool, default = True\n Normalize by number of samples or not\n\n Returns\n -------\n Misclassification of given predictions\n ' raise NotImplementedError()<|docstring|>Calculate misclassification loss Parameters ---------- y_true: ndarray of shape (n_samples, ) True response values y_pred: ndarray of shape (n_samples, ) Predicted response values normalize: bool, default = True Normalize by number of samples or not Returns ------- Misclassification of given predictions<|endoftext|>
2d96a9c4f48ee4f5db257f711deb452779af720c26a6f2748fcc0fea13b7b69b
def accuracy(y_true: np.ndarray, y_pred: np.ndarray) -> float: '\n Calculate accuracy of given predictions\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n\n Returns\n -------\n Accuracy of given predictions\n ' raise NotImplementedError()
Calculate accuracy of given predictions Parameters ---------- y_true: ndarray of shape (n_samples, ) True response values y_pred: ndarray of shape (n_samples, ) Predicted response values Returns ------- Accuracy of given predictions
IMLearn/metrics/loss_functions.py
accuracy
tehilaogen/IML.HUJI
2
python
def accuracy(y_true: np.ndarray, y_pred: np.ndarray) -> float: '\n Calculate accuracy of given predictions\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n\n Returns\n -------\n Accuracy of given predictions\n ' raise NotImplementedError()
def accuracy(y_true: np.ndarray, y_pred: np.ndarray) -> float: '\n Calculate accuracy of given predictions\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n\n Returns\n -------\n Accuracy of given predictions\n ' raise NotImplementedError()<|docstring|>Calculate accuracy of given predictions Parameters ---------- y_true: ndarray of shape (n_samples, ) True response values y_pred: ndarray of shape (n_samples, ) Predicted response values Returns ------- Accuracy of given predictions<|endoftext|>
dd104679bf0fff585019c379fe84f5f2fd7b3841c9d44ce818fe2765d194e0d9
def cross_entropy(y_true: np.ndarray, y_pred: np.ndarray) -> float: '\n Calculate the cross entropy of given predictions\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n\n Returns\n -------\n Cross entropy of given predictions\n ' raise NotImplementedError()
Calculate the cross entropy of given predictions Parameters ---------- y_true: ndarray of shape (n_samples, ) True response values y_pred: ndarray of shape (n_samples, ) Predicted response values Returns ------- Cross entropy of given predictions
IMLearn/metrics/loss_functions.py
cross_entropy
tehilaogen/IML.HUJI
2
python
def cross_entropy(y_true: np.ndarray, y_pred: np.ndarray) -> float: '\n Calculate the cross entropy of given predictions\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n\n Returns\n -------\n Cross entropy of given predictions\n ' raise NotImplementedError()
def cross_entropy(y_true: np.ndarray, y_pred: np.ndarray) -> float: '\n Calculate the cross entropy of given predictions\n\n Parameters\n ----------\n y_true: ndarray of shape (n_samples, )\n True response values\n y_pred: ndarray of shape (n_samples, )\n Predicted response values\n\n Returns\n -------\n Cross entropy of given predictions\n ' raise NotImplementedError()<|docstring|>Calculate the cross entropy of given predictions Parameters ---------- y_true: ndarray of shape (n_samples, ) True response values y_pred: ndarray of shape (n_samples, ) Predicted response values Returns ------- Cross entropy of given predictions<|endoftext|>
a44d3704b26bfb796b3e0faba587844a7e20b2c329e57a822d7754fa58a32059
def test_old_files_redirect(self): "pre-2.0 'files/' prefixed links are properly redirected" nbdir = self.notebook_dir.name base = self.base_url() os.mkdir(pjoin(nbdir, 'files')) os.makedirs(pjoin(nbdir, 'sub', 'files')) for prefix in ('', 'sub'): with open(pjoin(nbdir, prefix, 'files', 'f1.txt'), 'w') as f: f.write((prefix + '/files/f1')) with open(pjoin(nbdir, prefix, 'files', 'f2.txt'), 'w') as f: f.write((prefix + '/files/f2')) with open(pjoin(nbdir, prefix, 'f2.txt'), 'w') as f: f.write((prefix + '/f2')) with open(pjoin(nbdir, prefix, 'f3.txt'), 'w') as f: f.write((prefix + '/f3')) url = url_path_join(base, 'notebooks', prefix, 'files', 'f1.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, (prefix + '/files/f1')) url = url_path_join(base, 'notebooks', prefix, 'files', 'f2.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, (prefix + '/files/f2')) url = url_path_join(base, 'notebooks', prefix, 'files', 'f3.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, (prefix + '/f3'))
pre-2.0 'files/' prefixed links are properly redirected
IPython/html/tests/test_files.py
test_old_files_redirect
mineo/ipython
2
python
def test_old_files_redirect(self): nbdir = self.notebook_dir.name base = self.base_url() os.mkdir(pjoin(nbdir, 'files')) os.makedirs(pjoin(nbdir, 'sub', 'files')) for prefix in (, 'sub'): with open(pjoin(nbdir, prefix, 'files', 'f1.txt'), 'w') as f: f.write((prefix + '/files/f1')) with open(pjoin(nbdir, prefix, 'files', 'f2.txt'), 'w') as f: f.write((prefix + '/files/f2')) with open(pjoin(nbdir, prefix, 'f2.txt'), 'w') as f: f.write((prefix + '/f2')) with open(pjoin(nbdir, prefix, 'f3.txt'), 'w') as f: f.write((prefix + '/f3')) url = url_path_join(base, 'notebooks', prefix, 'files', 'f1.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, (prefix + '/files/f1')) url = url_path_join(base, 'notebooks', prefix, 'files', 'f2.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, (prefix + '/files/f2')) url = url_path_join(base, 'notebooks', prefix, 'files', 'f3.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, (prefix + '/f3'))
def test_old_files_redirect(self): nbdir = self.notebook_dir.name base = self.base_url() os.mkdir(pjoin(nbdir, 'files')) os.makedirs(pjoin(nbdir, 'sub', 'files')) for prefix in (, 'sub'): with open(pjoin(nbdir, prefix, 'files', 'f1.txt'), 'w') as f: f.write((prefix + '/files/f1')) with open(pjoin(nbdir, prefix, 'files', 'f2.txt'), 'w') as f: f.write((prefix + '/files/f2')) with open(pjoin(nbdir, prefix, 'f2.txt'), 'w') as f: f.write((prefix + '/f2')) with open(pjoin(nbdir, prefix, 'f3.txt'), 'w') as f: f.write((prefix + '/f3')) url = url_path_join(base, 'notebooks', prefix, 'files', 'f1.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, (prefix + '/files/f1')) url = url_path_join(base, 'notebooks', prefix, 'files', 'f2.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, (prefix + '/files/f2')) url = url_path_join(base, 'notebooks', prefix, 'files', 'f3.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, (prefix + '/f3'))<|docstring|>pre-2.0 'files/' prefixed links are properly redirected<|endoftext|>
af60f25473b318acb147040ea2e3ea6931670f7e6662c6cd0f5bd482df2d395a
def distance(point1, point2): '\n Calculate distance between two points\n\n Parameters\n ---------\n point1 : array_like\n point2 : array_like\n\n Return:\n --------\n float\n The distance between point1 and point2.\n ' point1 = np.asarray(point1) point2 = np.asarray(point2) return np.linalg.norm((point1 - point2))
Calculate distance between two points Parameters --------- point1 : array_like point2 : array_like Return: -------- float The distance between point1 and point2.
molpy/util.py
distance
yonghui-cc/molpy
0
python
def distance(point1, point2): '\n Calculate distance between two points\n\n Parameters\n ---------\n point1 : array_like\n point2 : array_like\n\n Return:\n --------\n float\n The distance between point1 and point2.\n ' point1 = np.asarray(point1) point2 = np.asarray(point2) return np.linalg.norm((point1 - point2))
def distance(point1, point2): '\n Calculate distance between two points\n\n Parameters\n ---------\n point1 : array_like\n point2 : array_like\n\n Return:\n --------\n float\n The distance between point1 and point2.\n ' point1 = np.asarray(point1) point2 = np.asarray(point2) return np.linalg.norm((point1 - point2))<|docstring|>Calculate distance between two points Parameters --------- point1 : array_like point2 : array_like Return: -------- float The distance between point1 and point2.<|endoftext|>
0cdd53378489b4b40209194800ca3e37a19bb23364b5dddd59e19cd6b5fab125
def read_xyz(filename): '\n Read the molecule\n \n Parameters\n ----------\n filename : str\n name of the molecule.xyz\n \n Return\n ------\n symbols :\n coords :\n ' with open(filename, 'r') as handle: data = handle.readlines() data = data[2:] data = [x.split() for x in data] symbols = [x[0] for x in data] xyz = [] for line in data: xyz.append([float(line[1]), float(line[2]), float(line[3])]) return {'symbols': np.array(symbols), 'geometry': xyz}
Read the molecule Parameters ---------- filename : str name of the molecule.xyz Return ------ symbols : coords :
molpy/util.py
read_xyz
yonghui-cc/molpy
0
python
def read_xyz(filename): '\n Read the molecule\n \n Parameters\n ----------\n filename : str\n name of the molecule.xyz\n \n Return\n ------\n symbols :\n coords :\n ' with open(filename, 'r') as handle: data = handle.readlines() data = data[2:] data = [x.split() for x in data] symbols = [x[0] for x in data] xyz = [] for line in data: xyz.append([float(line[1]), float(line[2]), float(line[3])]) return {'symbols': np.array(symbols), 'geometry': xyz}
def read_xyz(filename): '\n Read the molecule\n \n Parameters\n ----------\n filename : str\n name of the molecule.xyz\n \n Return\n ------\n symbols :\n coords :\n ' with open(filename, 'r') as handle: data = handle.readlines() data = data[2:] data = [x.split() for x in data] symbols = [x[0] for x in data] xyz = [] for line in data: xyz.append([float(line[1]), float(line[2]), float(line[3])]) return {'symbols': np.array(symbols), 'geometry': xyz}<|docstring|>Read the molecule Parameters ---------- filename : str name of the molecule.xyz Return ------ symbols : coords :<|endoftext|>
def4b486df7c3d23d57753243c346548ca8babf54c2ad0b9a4600700dd921a7e
def check_already_mounted(mountpoint): 'Check that the mount device is mounted on the specific mount point.\n\n :param devpath: The path of mount deivce.\n :param mountpoint: The path of mount point.\n :rtype: bool\n ' mounts = Mounter().read_mounts() for m in mounts: if (mountpoint == m.mountpoint): return True return False
Check that the mount device is mounted on the specific mount point. :param devpath: The path of mount deivce. :param mountpoint: The path of mount point. :rtype: bool
zun/common/mount.py
check_already_mounted
hualingson/zun
83
python
def check_already_mounted(mountpoint): 'Check that the mount device is mounted on the specific mount point.\n\n :param devpath: The path of mount deivce.\n :param mountpoint: The path of mount point.\n :rtype: bool\n ' mounts = Mounter().read_mounts() for m in mounts: if (mountpoint == m.mountpoint): return True return False
def check_already_mounted(mountpoint): 'Check that the mount device is mounted on the specific mount point.\n\n :param devpath: The path of mount deivce.\n :param mountpoint: The path of mount point.\n :rtype: bool\n ' mounts = Mounter().read_mounts() for m in mounts: if (mountpoint == m.mountpoint): return True return False<|docstring|>Check that the mount device is mounted on the specific mount point. :param devpath: The path of mount deivce. :param mountpoint: The path of mount point. :rtype: bool<|endoftext|>
27c904860568762a86f9f462f0d1850e947315114e7115029e8fd8e83fbe2654
def do_mount(devpath, mountpoint, fstype): 'Execute device mount operation.\n\n :param devpath: The path of mount device.\n :param mountpoint: The path of mount point.\n :param fstype: The file system type.\n ' if check_already_mounted(mountpoint): return mounter = Mounter() try: mounter.mount(devpath, mountpoint, fstype) except exception.MountException: try: mounter.make_filesystem(devpath, fstype) mounter.mount(devpath, mountpoint, fstype) except exception.ZunException as e: with excutils.save_and_reraise_exception(): LOG.error(e.message)
Execute device mount operation. :param devpath: The path of mount device. :param mountpoint: The path of mount point. :param fstype: The file system type.
zun/common/mount.py
do_mount
hualingson/zun
83
python
def do_mount(devpath, mountpoint, fstype): 'Execute device mount operation.\n\n :param devpath: The path of mount device.\n :param mountpoint: The path of mount point.\n :param fstype: The file system type.\n ' if check_already_mounted(mountpoint): return mounter = Mounter() try: mounter.mount(devpath, mountpoint, fstype) except exception.MountException: try: mounter.make_filesystem(devpath, fstype) mounter.mount(devpath, mountpoint, fstype) except exception.ZunException as e: with excutils.save_and_reraise_exception(): LOG.error(e.message)
def do_mount(devpath, mountpoint, fstype): 'Execute device mount operation.\n\n :param devpath: The path of mount device.\n :param mountpoint: The path of mount point.\n :param fstype: The file system type.\n ' if check_already_mounted(mountpoint): return mounter = Mounter() try: mounter.mount(devpath, mountpoint, fstype) except exception.MountException: try: mounter.make_filesystem(devpath, fstype) mounter.mount(devpath, mountpoint, fstype) except exception.ZunException as e: with excutils.save_and_reraise_exception(): LOG.error(e.message)<|docstring|>Execute device mount operation. :param devpath: The path of mount device. :param mountpoint: The path of mount point. :param fstype: The file system type.<|endoftext|>
72780002e500d1b95526faec74054058b0c76626d5246559973080424e7fcaab
def read_mounts(self, filter_device=None, filter_fstype=None): 'Read all mounted filesystems.\n\n Read all mounted filesystems except filtered option.\n\n :param filter_device: Filter for device, the result will not contain\n the mounts whose device argument in it.\n :param filter_fstype: Filter for mount point.\n :return: All mounts.\n ' if (filter_device is None): filter_device = () if (filter_fstype is None): filter_fstype = () try: (out, err) = utils.execute('cat', PROC_MOUNTS_PATH, check_exit_code=0) except exception.CommandError: msg = _('Failed to read mounts.') raise exception.FileNotFound(msg) lines = out.split('\n') mounts = [] for line in lines: if (not line): continue tokens = line.split() if (len(tokens) < 4): continue if ((tokens[0] in filter_device) or (tokens[1] in filter_fstype)): continue mounts.append(MountInfo(device=tokens[0], mountpoint=tokens[1], fstype=tokens[2], opts=tokens[3])) return mounts
Read all mounted filesystems. Read all mounted filesystems except filtered option. :param filter_device: Filter for device, the result will not contain the mounts whose device argument in it. :param filter_fstype: Filter for mount point. :return: All mounts.
zun/common/mount.py
read_mounts
hualingson/zun
83
python
def read_mounts(self, filter_device=None, filter_fstype=None): 'Read all mounted filesystems.\n\n Read all mounted filesystems except filtered option.\n\n :param filter_device: Filter for device, the result will not contain\n the mounts whose device argument in it.\n :param filter_fstype: Filter for mount point.\n :return: All mounts.\n ' if (filter_device is None): filter_device = () if (filter_fstype is None): filter_fstype = () try: (out, err) = utils.execute('cat', PROC_MOUNTS_PATH, check_exit_code=0) except exception.CommandError: msg = _('Failed to read mounts.') raise exception.FileNotFound(msg) lines = out.split('\n') mounts = [] for line in lines: if (not line): continue tokens = line.split() if (len(tokens) < 4): continue if ((tokens[0] in filter_device) or (tokens[1] in filter_fstype)): continue mounts.append(MountInfo(device=tokens[0], mountpoint=tokens[1], fstype=tokens[2], opts=tokens[3])) return mounts
def read_mounts(self, filter_device=None, filter_fstype=None): 'Read all mounted filesystems.\n\n Read all mounted filesystems except filtered option.\n\n :param filter_device: Filter for device, the result will not contain\n the mounts whose device argument in it.\n :param filter_fstype: Filter for mount point.\n :return: All mounts.\n ' if (filter_device is None): filter_device = () if (filter_fstype is None): filter_fstype = () try: (out, err) = utils.execute('cat', PROC_MOUNTS_PATH, check_exit_code=0) except exception.CommandError: msg = _('Failed to read mounts.') raise exception.FileNotFound(msg) lines = out.split('\n') mounts = [] for line in lines: if (not line): continue tokens = line.split() if (len(tokens) < 4): continue if ((tokens[0] in filter_device) or (tokens[1] in filter_fstype)): continue mounts.append(MountInfo(device=tokens[0], mountpoint=tokens[1], fstype=tokens[2], opts=tokens[3])) return mounts<|docstring|>Read all mounted filesystems. Read all mounted filesystems except filtered option. :param filter_device: Filter for device, the result will not contain the mounts whose device argument in it. :param filter_fstype: Filter for mount point. :return: All mounts.<|endoftext|>
0e133461a19dd005a67d8a9cdbfa3e1f12d33c4dfe74f2c52597abd525c3da6f
def get_mps_by_device(self, devpath): 'Get all mountpoints that device mounted on.\n\n :param devpath: The path of mount device.\n :return: All mountpoints.\n ' mps = [] mounts = self.read_mounts() for m in mounts: if (devpath == m.device): mps.append(m.mountpoint) return mps
Get all mountpoints that device mounted on. :param devpath: The path of mount device. :return: All mountpoints.
zun/common/mount.py
get_mps_by_device
hualingson/zun
83
python
def get_mps_by_device(self, devpath): 'Get all mountpoints that device mounted on.\n\n :param devpath: The path of mount device.\n :return: All mountpoints.\n ' mps = [] mounts = self.read_mounts() for m in mounts: if (devpath == m.device): mps.append(m.mountpoint) return mps
def get_mps_by_device(self, devpath): 'Get all mountpoints that device mounted on.\n\n :param devpath: The path of mount device.\n :return: All mountpoints.\n ' mps = [] mounts = self.read_mounts() for m in mounts: if (devpath == m.device): mps.append(m.mountpoint) return mps<|docstring|>Get all mountpoints that device mounted on. :param devpath: The path of mount device. :return: All mountpoints.<|endoftext|>
63d70a89e3fabfe06aa3cd2d39f3e8f7babc1fc4675238ca91978303cbfa27bb
def establecer_destino_archivo_ubicacion(instance, filename): '\n Establece la ruta de destino para el archivo de ubicación cargado a la instancia.\n ' ruta_archivos_ubicacion = 'app_reservas/ubicaciones/aulas/' extension_archivo = (filename.split('.')[(- 1)] if ('.' in filename) else '') nombre_archivo = '{0!s}.{1!s}'.format(slugify(str(instance)), extension_archivo) return os.path.join(ruta_archivos_ubicacion, nombre_archivo)
Establece la ruta de destino para el archivo de ubicación cargado a la instancia.
app_reservas/models/aula.py
establecer_destino_archivo_ubicacion
fedegallar/reservas
1
python
def establecer_destino_archivo_ubicacion(instance, filename): '\n \n ' ruta_archivos_ubicacion = 'app_reservas/ubicaciones/aulas/' extension_archivo = (filename.split('.')[(- 1)] if ('.' in filename) else ) nombre_archivo = '{0!s}.{1!s}'.format(slugify(str(instance)), extension_archivo) return os.path.join(ruta_archivos_ubicacion, nombre_archivo)
def establecer_destino_archivo_ubicacion(instance, filename): '\n \n ' ruta_archivos_ubicacion = 'app_reservas/ubicaciones/aulas/' extension_archivo = (filename.split('.')[(- 1)] if ('.' in filename) else ) nombre_archivo = '{0!s}.{1!s}'.format(slugify(str(instance)), extension_archivo) return os.path.join(ruta_archivos_ubicacion, nombre_archivo)<|docstring|>Establece la ruta de destino para el archivo de ubicación cargado a la instancia.<|endoftext|>
e038ffa3155b2d52e37fad36ab5e48535889c9cd1c6910939fb60be651e4608a
def __str__(self): '\n Representación de la instancia.\n ' return '{0!s} - {1!s}'.format(self.get_nombre_corto(), self.nivel)
Representación de la instancia.
app_reservas/models/aula.py
__str__
fedegallar/reservas
1
python
def __str__(self): '\n \n ' return '{0!s} - {1!s}'.format(self.get_nombre_corto(), self.nivel)
def __str__(self): '\n \n ' return '{0!s} - {1!s}'.format(self.get_nombre_corto(), self.nivel)<|docstring|>Representación de la instancia.<|endoftext|>
89b26bb84642638e4f6cca58e1c5430bcb13dd70a35a9e61642bd48cbd341ecf
def get_nombre_corto(self): '\n Retorna el nombre corto de la instancia.\n ' nombre_corto = (self.nombre or 'Aula {0:d}'.format(self.numero)) return nombre_corto
Retorna el nombre corto de la instancia.
app_reservas/models/aula.py
get_nombre_corto
fedegallar/reservas
1
python
def get_nombre_corto(self): '\n \n ' nombre_corto = (self.nombre or 'Aula {0:d}'.format(self.numero)) return nombre_corto
def get_nombre_corto(self): '\n \n ' nombre_corto = (self.nombre or 'Aula {0:d}'.format(self.numero)) return nombre_corto<|docstring|>Retorna el nombre corto de la instancia.<|endoftext|>
1c31b7a82570f37344bb71c4f58764ca3581ebf00ddf61c6311bea96c7a649c2
def get_identificador_url(self): '\n Retorna el identificador utilizado para acceder a la URL de detalle de\n la instancia.\n ' return str(self.id)
Retorna el identificador utilizado para acceder a la URL de detalle de la instancia.
app_reservas/models/aula.py
get_identificador_url
fedegallar/reservas
1
python
def get_identificador_url(self): '\n Retorna el identificador utilizado para acceder a la URL de detalle de\n la instancia.\n ' return str(self.id)
def get_identificador_url(self): '\n Retorna el identificador utilizado para acceder a la URL de detalle de\n la instancia.\n ' return str(self.id)<|docstring|>Retorna el identificador utilizado para acceder a la URL de detalle de la instancia.<|endoftext|>
011b2dd5617deb83ac8c3f9b81463149a25f2a9f2b41c324d07a04160dac4161
@exp_factory.register_config_factory('panoptic_maskrcnn_resnetfpn_coco') def panoptic_maskrcnn_resnetfpn_coco() -> cfg.ExperimentConfig: 'COCO panoptic segmentation with Panoptic Mask R-CNN.' train_batch_size = 64 eval_batch_size = 8 steps_per_epoch = (_COCO_TRAIN_EXAMPLES // train_batch_size) validation_steps = (_COCO_VAL_EXAMPLES // eval_batch_size) config = cfg.ExperimentConfig(runtime=cfg.RuntimeConfig(mixed_precision_dtype='bfloat16'), task=PanopticMaskRCNNTask(init_checkpoint='gs://cloud-tpu-checkpoints/vision-2.0/resnet50_imagenet/ckpt-28080', init_checkpoint_modules=['backbone'], model=PanopticMaskRCNN(num_classes=91, input_size=[1024, 1024, 3], segmentation_model=SEGMENTATION_MODEL(num_classes=91, head=SEGMENTATION_HEAD(level=3))), losses=Losses(l2_weight_decay=4e-05), train_data=DataConfig(input_path=os.path.join(_COCO_INPUT_PATH_BASE, 'train*'), is_training=True, global_batch_size=train_batch_size, parser=Parser(aug_rand_hflip=True, aug_scale_min=0.8, aug_scale_max=1.25)), validation_data=DataConfig(input_path=os.path.join(_COCO_INPUT_PATH_BASE, 'val*'), is_training=False, global_batch_size=eval_batch_size, drop_remainder=False), annotation_file=os.path.join(_COCO_INPUT_PATH_BASE, 'instances_val2017.json')), trainer=cfg.TrainerConfig(train_steps=22500, validation_steps=validation_steps, validation_interval=steps_per_epoch, steps_per_loop=steps_per_epoch, summary_interval=steps_per_epoch, checkpoint_interval=steps_per_epoch, optimizer_config=optimization.OptimizationConfig({'optimizer': {'type': 'sgd', 'sgd': {'momentum': 0.9}}, 'learning_rate': {'type': 'stepwise', 'stepwise': {'boundaries': [15000, 20000], 'values': [0.12, 0.012, 0.0012]}}, 'warmup': {'type': 'linear', 'linear': {'warmup_steps': 500, 'warmup_learning_rate': 0.0067}}})), restrictions=['task.train_data.is_training != None', 'task.validation_data.is_training != None']) return config
COCO panoptic segmentation with Panoptic Mask R-CNN.
official/vision/beta/projects/panoptic_maskrcnn/configs/panoptic_maskrcnn.py
panoptic_maskrcnn_resnetfpn_coco
kisna-aryan/models
3
python
@exp_factory.register_config_factory('panoptic_maskrcnn_resnetfpn_coco') def panoptic_maskrcnn_resnetfpn_coco() -> cfg.ExperimentConfig: train_batch_size = 64 eval_batch_size = 8 steps_per_epoch = (_COCO_TRAIN_EXAMPLES // train_batch_size) validation_steps = (_COCO_VAL_EXAMPLES // eval_batch_size) config = cfg.ExperimentConfig(runtime=cfg.RuntimeConfig(mixed_precision_dtype='bfloat16'), task=PanopticMaskRCNNTask(init_checkpoint='gs://cloud-tpu-checkpoints/vision-2.0/resnet50_imagenet/ckpt-28080', init_checkpoint_modules=['backbone'], model=PanopticMaskRCNN(num_classes=91, input_size=[1024, 1024, 3], segmentation_model=SEGMENTATION_MODEL(num_classes=91, head=SEGMENTATION_HEAD(level=3))), losses=Losses(l2_weight_decay=4e-05), train_data=DataConfig(input_path=os.path.join(_COCO_INPUT_PATH_BASE, 'train*'), is_training=True, global_batch_size=train_batch_size, parser=Parser(aug_rand_hflip=True, aug_scale_min=0.8, aug_scale_max=1.25)), validation_data=DataConfig(input_path=os.path.join(_COCO_INPUT_PATH_BASE, 'val*'), is_training=False, global_batch_size=eval_batch_size, drop_remainder=False), annotation_file=os.path.join(_COCO_INPUT_PATH_BASE, 'instances_val2017.json')), trainer=cfg.TrainerConfig(train_steps=22500, validation_steps=validation_steps, validation_interval=steps_per_epoch, steps_per_loop=steps_per_epoch, summary_interval=steps_per_epoch, checkpoint_interval=steps_per_epoch, optimizer_config=optimization.OptimizationConfig({'optimizer': {'type': 'sgd', 'sgd': {'momentum': 0.9}}, 'learning_rate': {'type': 'stepwise', 'stepwise': {'boundaries': [15000, 20000], 'values': [0.12, 0.012, 0.0012]}}, 'warmup': {'type': 'linear', 'linear': {'warmup_steps': 500, 'warmup_learning_rate': 0.0067}}})), restrictions=['task.train_data.is_training != None', 'task.validation_data.is_training != None']) return config
@exp_factory.register_config_factory('panoptic_maskrcnn_resnetfpn_coco') def panoptic_maskrcnn_resnetfpn_coco() -> cfg.ExperimentConfig: train_batch_size = 64 eval_batch_size = 8 steps_per_epoch = (_COCO_TRAIN_EXAMPLES // train_batch_size) validation_steps = (_COCO_VAL_EXAMPLES // eval_batch_size) config = cfg.ExperimentConfig(runtime=cfg.RuntimeConfig(mixed_precision_dtype='bfloat16'), task=PanopticMaskRCNNTask(init_checkpoint='gs://cloud-tpu-checkpoints/vision-2.0/resnet50_imagenet/ckpt-28080', init_checkpoint_modules=['backbone'], model=PanopticMaskRCNN(num_classes=91, input_size=[1024, 1024, 3], segmentation_model=SEGMENTATION_MODEL(num_classes=91, head=SEGMENTATION_HEAD(level=3))), losses=Losses(l2_weight_decay=4e-05), train_data=DataConfig(input_path=os.path.join(_COCO_INPUT_PATH_BASE, 'train*'), is_training=True, global_batch_size=train_batch_size, parser=Parser(aug_rand_hflip=True, aug_scale_min=0.8, aug_scale_max=1.25)), validation_data=DataConfig(input_path=os.path.join(_COCO_INPUT_PATH_BASE, 'val*'), is_training=False, global_batch_size=eval_batch_size, drop_remainder=False), annotation_file=os.path.join(_COCO_INPUT_PATH_BASE, 'instances_val2017.json')), trainer=cfg.TrainerConfig(train_steps=22500, validation_steps=validation_steps, validation_interval=steps_per_epoch, steps_per_loop=steps_per_epoch, summary_interval=steps_per_epoch, checkpoint_interval=steps_per_epoch, optimizer_config=optimization.OptimizationConfig({'optimizer': {'type': 'sgd', 'sgd': {'momentum': 0.9}}, 'learning_rate': {'type': 'stepwise', 'stepwise': {'boundaries': [15000, 20000], 'values': [0.12, 0.012, 0.0012]}}, 'warmup': {'type': 'linear', 'linear': {'warmup_steps': 500, 'warmup_learning_rate': 0.0067}}})), restrictions=['task.train_data.is_training != None', 'task.validation_data.is_training != None']) return config<|docstring|>COCO panoptic segmentation with Panoptic Mask R-CNN.<|endoftext|>
989bbf46ca19bed3b3ff5ed423eaaf2aa94442bd68e66286470da59909591fd3
def store_initial_buy(self, coin_pair, buy_order_uuid): "\n Used to place an initial trade in the database\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n :param buy_order_uuid: The buy order's UUID\n :type buy_order_uuid: str\n " if (coin_pair in self.trades['trackedCoinPairs']): return logger.warning('Trying to buy on the {} market which is already tracked.'.format(coin_pair)) new_buy_object = {'coinPair': coin_pair, 'quantity': 0, 'buy': {'orderUuid': buy_order_uuid}} self.trades['trackedCoinPairs'].append(coin_pair) self.trades['trades'].append(new_buy_object) write_json_to_file(self.trades_file_string, self.trades)
Used to place an initial trade in the database :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str :param buy_order_uuid: The buy order's UUID :type buy_order_uuid: str
src/database.py
store_initial_buy
CryptoRye/Crypto-Trading-Bot
301
python
def store_initial_buy(self, coin_pair, buy_order_uuid): "\n Used to place an initial trade in the database\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n :param buy_order_uuid: The buy order's UUID\n :type buy_order_uuid: str\n " if (coin_pair in self.trades['trackedCoinPairs']): return logger.warning('Trying to buy on the {} market which is already tracked.'.format(coin_pair)) new_buy_object = {'coinPair': coin_pair, 'quantity': 0, 'buy': {'orderUuid': buy_order_uuid}} self.trades['trackedCoinPairs'].append(coin_pair) self.trades['trades'].append(new_buy_object) write_json_to_file(self.trades_file_string, self.trades)
def store_initial_buy(self, coin_pair, buy_order_uuid): "\n Used to place an initial trade in the database\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n :param buy_order_uuid: The buy order's UUID\n :type buy_order_uuid: str\n " if (coin_pair in self.trades['trackedCoinPairs']): return logger.warning('Trying to buy on the {} market which is already tracked.'.format(coin_pair)) new_buy_object = {'coinPair': coin_pair, 'quantity': 0, 'buy': {'orderUuid': buy_order_uuid}} self.trades['trackedCoinPairs'].append(coin_pair) self.trades['trades'].append(new_buy_object) write_json_to_file(self.trades_file_string, self.trades)<|docstring|>Used to place an initial trade in the database :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str :param buy_order_uuid: The buy order's UUID :type buy_order_uuid: str<|endoftext|>
903758e4bd0424c92feaf725c1c4096d9c533db2bacc72349b271a94be5bad2e
def store_buy(self, bittrex_order, stats): '\n Used to place a buy trade in the database\n\n :param bittrex_order: Bittrex buy order object\n :type bittrex_order: dict\n :param stats: The buy stats to store\n :type stats: dict\n ' if (bittrex_order['Exchange'] not in self.trades['trackedCoinPairs']): return logger.warning('Trying to buy on the {} market without an initial buy object.'.format(bittrex_order['Exchange'])) order = self.convert_bittrex_order_object(bittrex_order, stats) trade = self.get_open_trade(bittrex_order['Exchange']) trade['quantity'] = round((bittrex_order['Quantity'] - bittrex_order['QuantityRemaining']), 8) trade['buy'] = order write_json_to_file(self.trades_file_string, self.trades)
Used to place a buy trade in the database :param bittrex_order: Bittrex buy order object :type bittrex_order: dict :param stats: The buy stats to store :type stats: dict
src/database.py
store_buy
CryptoRye/Crypto-Trading-Bot
301
python
def store_buy(self, bittrex_order, stats): '\n Used to place a buy trade in the database\n\n :param bittrex_order: Bittrex buy order object\n :type bittrex_order: dict\n :param stats: The buy stats to store\n :type stats: dict\n ' if (bittrex_order['Exchange'] not in self.trades['trackedCoinPairs']): return logger.warning('Trying to buy on the {} market without an initial buy object.'.format(bittrex_order['Exchange'])) order = self.convert_bittrex_order_object(bittrex_order, stats) trade = self.get_open_trade(bittrex_order['Exchange']) trade['quantity'] = round((bittrex_order['Quantity'] - bittrex_order['QuantityRemaining']), 8) trade['buy'] = order write_json_to_file(self.trades_file_string, self.trades)
def store_buy(self, bittrex_order, stats): '\n Used to place a buy trade in the database\n\n :param bittrex_order: Bittrex buy order object\n :type bittrex_order: dict\n :param stats: The buy stats to store\n :type stats: dict\n ' if (bittrex_order['Exchange'] not in self.trades['trackedCoinPairs']): return logger.warning('Trying to buy on the {} market without an initial buy object.'.format(bittrex_order['Exchange'])) order = self.convert_bittrex_order_object(bittrex_order, stats) trade = self.get_open_trade(bittrex_order['Exchange']) trade['quantity'] = round((bittrex_order['Quantity'] - bittrex_order['QuantityRemaining']), 8) trade['buy'] = order write_json_to_file(self.trades_file_string, self.trades)<|docstring|>Used to place a buy trade in the database :param bittrex_order: Bittrex buy order object :type bittrex_order: dict :param stats: The buy stats to store :type stats: dict<|endoftext|>
80d4a3e997d75162b19ea8cf770aab11a18322628bdc0925ae1a3e48b879f854
def store_sell(self, bittrex_order, stats): '\n Used to place a sell trade in the database\n\n :param bittrex_order: Bittrex sell order object\n :type bittrex_order: dict\n :param stats: The sell stats to store\n :type stats: dict\n ' if (bittrex_order['Exchange'] not in self.trades['trackedCoinPairs']): return logger.warning('Trying to sell on the {} market which is not tracked.'.format(bittrex_order['Exchange'])) order = self.convert_bittrex_order_object(bittrex_order, stats) trade = self.get_open_trade(bittrex_order['Exchange']) trade['sell'] = order self.trades['trackedCoinPairs'].remove(bittrex_order['Exchange']) write_json_to_file(self.trades_file_string, self.trades)
Used to place a sell trade in the database :param bittrex_order: Bittrex sell order object :type bittrex_order: dict :param stats: The sell stats to store :type stats: dict
src/database.py
store_sell
CryptoRye/Crypto-Trading-Bot
301
python
def store_sell(self, bittrex_order, stats): '\n Used to place a sell trade in the database\n\n :param bittrex_order: Bittrex sell order object\n :type bittrex_order: dict\n :param stats: The sell stats to store\n :type stats: dict\n ' if (bittrex_order['Exchange'] not in self.trades['trackedCoinPairs']): return logger.warning('Trying to sell on the {} market which is not tracked.'.format(bittrex_order['Exchange'])) order = self.convert_bittrex_order_object(bittrex_order, stats) trade = self.get_open_trade(bittrex_order['Exchange']) trade['sell'] = order self.trades['trackedCoinPairs'].remove(bittrex_order['Exchange']) write_json_to_file(self.trades_file_string, self.trades)
def store_sell(self, bittrex_order, stats): '\n Used to place a sell trade in the database\n\n :param bittrex_order: Bittrex sell order object\n :type bittrex_order: dict\n :param stats: The sell stats to store\n :type stats: dict\n ' if (bittrex_order['Exchange'] not in self.trades['trackedCoinPairs']): return logger.warning('Trying to sell on the {} market which is not tracked.'.format(bittrex_order['Exchange'])) order = self.convert_bittrex_order_object(bittrex_order, stats) trade = self.get_open_trade(bittrex_order['Exchange']) trade['sell'] = order self.trades['trackedCoinPairs'].remove(bittrex_order['Exchange']) write_json_to_file(self.trades_file_string, self.trades)<|docstring|>Used to place a sell trade in the database :param bittrex_order: Bittrex sell order object :type bittrex_order: dict :param stats: The sell stats to store :type stats: dict<|endoftext|>
1a64e2c53f3877270a9027513abafc90543ab0768e5d22274ec5fd47fc0e7618
def pause_buy(self, coin_pair): '\n Used to pause buy tracking on the coin pair\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n ' self.app_data['coinPairs'].remove(coin_pair) write_json_to_file(self.app_data_file_string, self.app_data)
Used to pause buy tracking on the coin pair :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str
src/database.py
pause_buy
CryptoRye/Crypto-Trading-Bot
301
python
def pause_buy(self, coin_pair): '\n Used to pause buy tracking on the coin pair\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n ' self.app_data['coinPairs'].remove(coin_pair) write_json_to_file(self.app_data_file_string, self.app_data)
def pause_buy(self, coin_pair): '\n Used to pause buy tracking on the coin pair\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n ' self.app_data['coinPairs'].remove(coin_pair) write_json_to_file(self.app_data_file_string, self.app_data)<|docstring|>Used to pause buy tracking on the coin pair :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str<|endoftext|>
fc9c8af9c9adf658f4209ec3c6a15e4f57b09a693f7df164b89cd39980581db0
def pause_sell(self, coin_pair): '\n Used to pause sell tracking on the coin pair and set the sell pause time\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n ' if (coin_pair in self.app_data['pausedTrackedCoinPairs']): return self.app_data['pausedTrackedCoinPairs'].append(coin_pair) if (self.app_data['pauseTime']['sell'] is None): self.app_data['pauseTime']['sell'] = time.time() write_json_to_file(self.app_data_file_string, self.app_data)
Used to pause sell tracking on the coin pair and set the sell pause time :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str
src/database.py
pause_sell
CryptoRye/Crypto-Trading-Bot
301
python
def pause_sell(self, coin_pair): '\n Used to pause sell tracking on the coin pair and set the sell pause time\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n ' if (coin_pair in self.app_data['pausedTrackedCoinPairs']): return self.app_data['pausedTrackedCoinPairs'].append(coin_pair) if (self.app_data['pauseTime']['sell'] is None): self.app_data['pauseTime']['sell'] = time.time() write_json_to_file(self.app_data_file_string, self.app_data)
def pause_sell(self, coin_pair): '\n Used to pause sell tracking on the coin pair and set the sell pause time\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n ' if (coin_pair in self.app_data['pausedTrackedCoinPairs']): return self.app_data['pausedTrackedCoinPairs'].append(coin_pair) if (self.app_data['pauseTime']['sell'] is None): self.app_data['pauseTime']['sell'] = time.time() write_json_to_file(self.app_data_file_string, self.app_data)<|docstring|>Used to pause sell tracking on the coin pair and set the sell pause time :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str<|endoftext|>
5bf64e57241d2cda0e97690e6e8ffa652b368c9fb71bfc27e094bda83870e182
def store_coin_pairs(self, btc_coin_pairs): '\n Used to store the latest Bittrex available markets and update the buy pause time\n\n :param btc_coin_pairs: String list of market pairs\n :type btc_coin_pairs: list\n ' self.app_data['coinPairs'] = btc_coin_pairs self.app_data['pauseTime']['buy'] = time.time() write_json_to_file(self.app_data_file_string, self.app_data)
Used to store the latest Bittrex available markets and update the buy pause time :param btc_coin_pairs: String list of market pairs :type btc_coin_pairs: list
src/database.py
store_coin_pairs
CryptoRye/Crypto-Trading-Bot
301
python
def store_coin_pairs(self, btc_coin_pairs): '\n Used to store the latest Bittrex available markets and update the buy pause time\n\n :param btc_coin_pairs: String list of market pairs\n :type btc_coin_pairs: list\n ' self.app_data['coinPairs'] = btc_coin_pairs self.app_data['pauseTime']['buy'] = time.time() write_json_to_file(self.app_data_file_string, self.app_data)
def store_coin_pairs(self, btc_coin_pairs): '\n Used to store the latest Bittrex available markets and update the buy pause time\n\n :param btc_coin_pairs: String list of market pairs\n :type btc_coin_pairs: list\n ' self.app_data['coinPairs'] = btc_coin_pairs self.app_data['pauseTime']['buy'] = time.time() write_json_to_file(self.app_data_file_string, self.app_data)<|docstring|>Used to store the latest Bittrex available markets and update the buy pause time :param btc_coin_pairs: String list of market pairs :type btc_coin_pairs: list<|endoftext|>
0a9e3684894f70a4b6c9dab4476216fe4438af329edd8829b01d8ae4dd84c218
def resume_sells(self): '\n Used to resume all paused sells and reset the sell pause time\n ' if (len(self.app_data['pausedTrackedCoinPairs']) < 1): return self.app_data['pausedTrackedCoinPairs'] = [] self.app_data['pauseTime']['sell'] = None write_json_to_file(self.app_data_file_string, self.app_data)
Used to resume all paused sells and reset the sell pause time
src/database.py
resume_sells
CryptoRye/Crypto-Trading-Bot
301
python
def resume_sells(self): '\n \n ' if (len(self.app_data['pausedTrackedCoinPairs']) < 1): return self.app_data['pausedTrackedCoinPairs'] = [] self.app_data['pauseTime']['sell'] = None write_json_to_file(self.app_data_file_string, self.app_data)
def resume_sells(self): '\n \n ' if (len(self.app_data['pausedTrackedCoinPairs']) < 1): return self.app_data['pausedTrackedCoinPairs'] = [] self.app_data['pauseTime']['sell'] = None write_json_to_file(self.app_data_file_string, self.app_data)<|docstring|>Used to resume all paused sells and reset the sell pause time<|endoftext|>
5a1dc90a743a83db235a2cc50718a8266d531c8fde7815d6d6fb608cb683a93f
def reset_balance_notifier(self, current_balance=None): "\n Used to reset the balance notifier pause time\n\n :param current_balance: The current total balance's BTC value\n :type current_balance: float\n " if (current_balance is not None): self.app_data['previousBalance'] = current_balance self.app_data['pauseTime']['balance'] = time.time() write_json_to_file(self.app_data_file_string, self.app_data)
Used to reset the balance notifier pause time :param current_balance: The current total balance's BTC value :type current_balance: float
src/database.py
reset_balance_notifier
CryptoRye/Crypto-Trading-Bot
301
python
def reset_balance_notifier(self, current_balance=None): "\n Used to reset the balance notifier pause time\n\n :param current_balance: The current total balance's BTC value\n :type current_balance: float\n " if (current_balance is not None): self.app_data['previousBalance'] = current_balance self.app_data['pauseTime']['balance'] = time.time() write_json_to_file(self.app_data_file_string, self.app_data)
def reset_balance_notifier(self, current_balance=None): "\n Used to reset the balance notifier pause time\n\n :param current_balance: The current total balance's BTC value\n :type current_balance: float\n " if (current_balance is not None): self.app_data['previousBalance'] = current_balance self.app_data['pauseTime']['balance'] = time.time() write_json_to_file(self.app_data_file_string, self.app_data)<|docstring|>Used to reset the balance notifier pause time :param current_balance: The current total balance's BTC value :type current_balance: float<|endoftext|>
40efb7a20e8fd48eea70594f5ef8b058dc9c890f97c21160732098bad3a76f02
def check_resume(self, pause_time, pause_type): "\n Used to check if the pause type can be un-paused\n\n :param pause_time: The amount of minutes tracking should be paused\n :type pause_time: int\n :param pause_type: The pause type to check (one of: 'buy', 'sell', 'balance)\n :type pause_type: str\n " if (self.app_data['pauseTime'][pause_type] is None): if (pause_type == 'balance'): self.reset_balance_notifier() return True return False return ((time.time() - self.app_data['pauseTime'][pause_type]) >= (pause_time * 60))
Used to check if the pause type can be un-paused :param pause_time: The amount of minutes tracking should be paused :type pause_time: int :param pause_type: The pause type to check (one of: 'buy', 'sell', 'balance) :type pause_type: str
src/database.py
check_resume
CryptoRye/Crypto-Trading-Bot
301
python
def check_resume(self, pause_time, pause_type): "\n Used to check if the pause type can be un-paused\n\n :param pause_time: The amount of minutes tracking should be paused\n :type pause_time: int\n :param pause_type: The pause type to check (one of: 'buy', 'sell', 'balance)\n :type pause_type: str\n " if (self.app_data['pauseTime'][pause_type] is None): if (pause_type == 'balance'): self.reset_balance_notifier() return True return False return ((time.time() - self.app_data['pauseTime'][pause_type]) >= (pause_time * 60))
def check_resume(self, pause_time, pause_type): "\n Used to check if the pause type can be un-paused\n\n :param pause_time: The amount of minutes tracking should be paused\n :type pause_time: int\n :param pause_type: The pause type to check (one of: 'buy', 'sell', 'balance)\n :type pause_type: str\n " if (self.app_data['pauseTime'][pause_type] is None): if (pause_type == 'balance'): self.reset_balance_notifier() return True return False return ((time.time() - self.app_data['pauseTime'][pause_type]) >= (pause_time * 60))<|docstring|>Used to check if the pause type can be un-paused :param pause_time: The amount of minutes tracking should be paused :type pause_time: int :param pause_type: The pause type to check (one of: 'buy', 'sell', 'balance) :type pause_type: str<|endoftext|>
485773d1b64e84241c62c3ee0eb2c001a94a01f8fc200ac0c6ecafbaaece5d34
def get_open_trade(self, coin_pair): "\n Used to get the coin pair's unsold trade in the database\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n\n :return: The open trade object\n :rtype: dict\n " trade_index = py_.find_index(self.trades['trades'], (lambda trade: ((trade['coinPair'] == coin_pair) and ('sell' not in trade)))) if (trade_index == (- 1)): logger.error('Could not find open trade for {} coin pair'.format(coin_pair)) return None return self.trades['trades'][trade_index]
Used to get the coin pair's unsold trade in the database :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str :return: The open trade object :rtype: dict
src/database.py
get_open_trade
CryptoRye/Crypto-Trading-Bot
301
python
def get_open_trade(self, coin_pair): "\n Used to get the coin pair's unsold trade in the database\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n\n :return: The open trade object\n :rtype: dict\n " trade_index = py_.find_index(self.trades['trades'], (lambda trade: ((trade['coinPair'] == coin_pair) and ('sell' not in trade)))) if (trade_index == (- 1)): logger.error('Could not find open trade for {} coin pair'.format(coin_pair)) return None return self.trades['trades'][trade_index]
def get_open_trade(self, coin_pair): "\n Used to get the coin pair's unsold trade in the database\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n\n :return: The open trade object\n :rtype: dict\n " trade_index = py_.find_index(self.trades['trades'], (lambda trade: ((trade['coinPair'] == coin_pair) and ('sell' not in trade)))) if (trade_index == (- 1)): logger.error('Could not find open trade for {} coin pair'.format(coin_pair)) return None return self.trades['trades'][trade_index]<|docstring|>Used to get the coin pair's unsold trade in the database :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str :return: The open trade object :rtype: dict<|endoftext|>
50e4bf0f1117435ec0691233ebc1a1c1af4faad8c26e64789c985100a053b492
def get_profit_margin(self, coin_pair, current_price, trade=None): '\n Used to get the profit margin for a coin pair"s trade\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n :param current_price: Market"s current price\n :type current_price: float\n :param trade: The trade to calculate the profit margin on\n Not required. If not passed in the function will go find it\n :type trade: dict\n\n :return: Profit margin\n :rtype: float\n ' if (trade is None): trade = self.get_open_trade(coin_pair) buy_btc_quantity = round((trade['buy']['price'] / (1 - bittrex_trade_commission)), 8) sell_btc_quantity = round(((trade['quantity'] * current_price) * (1 - bittrex_trade_commission)), 8) profit_margin = ((100 * (sell_btc_quantity - buy_btc_quantity)) / buy_btc_quantity) return profit_margin
Used to get the profit margin for a coin pair"s trade :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str :param current_price: Market"s current price :type current_price: float :param trade: The trade to calculate the profit margin on Not required. If not passed in the function will go find it :type trade: dict :return: Profit margin :rtype: float
src/database.py
get_profit_margin
CryptoRye/Crypto-Trading-Bot
301
python
def get_profit_margin(self, coin_pair, current_price, trade=None): '\n Used to get the profit margin for a coin pair"s trade\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n :param current_price: Market"s current price\n :type current_price: float\n :param trade: The trade to calculate the profit margin on\n Not required. If not passed in the function will go find it\n :type trade: dict\n\n :return: Profit margin\n :rtype: float\n ' if (trade is None): trade = self.get_open_trade(coin_pair) buy_btc_quantity = round((trade['buy']['price'] / (1 - bittrex_trade_commission)), 8) sell_btc_quantity = round(((trade['quantity'] * current_price) * (1 - bittrex_trade_commission)), 8) profit_margin = ((100 * (sell_btc_quantity - buy_btc_quantity)) / buy_btc_quantity) return profit_margin
def get_profit_margin(self, coin_pair, current_price, trade=None): '\n Used to get the profit margin for a coin pair"s trade\n\n :param coin_pair: String literal for the market (ex: BTC-LTC)\n :type coin_pair: str\n :param current_price: Market"s current price\n :type current_price: float\n :param trade: The trade to calculate the profit margin on\n Not required. If not passed in the function will go find it\n :type trade: dict\n\n :return: Profit margin\n :rtype: float\n ' if (trade is None): trade = self.get_open_trade(coin_pair) buy_btc_quantity = round((trade['buy']['price'] / (1 - bittrex_trade_commission)), 8) sell_btc_quantity = round(((trade['quantity'] * current_price) * (1 - bittrex_trade_commission)), 8) profit_margin = ((100 * (sell_btc_quantity - buy_btc_quantity)) / buy_btc_quantity) return profit_margin<|docstring|>Used to get the profit margin for a coin pair"s trade :param coin_pair: String literal for the market (ex: BTC-LTC) :type coin_pair: str :param current_price: Market"s current price :type current_price: float :param trade: The trade to calculate the profit margin on Not required. If not passed in the function will go find it :type trade: dict :return: Profit margin :rtype: float<|endoftext|>
e72644979290ae1fc00a5f64fc36eb9415dc301dc5853d6a78818810ae6dcd20
def get_previous_total_balance(self): '\n Used to get the previous total balance\n\n :return: Previous total balance\n :rtype: float\n ' if (('previousBalance' not in self.app_data) or (self.app_data['previousBalance'] == 0)): return None return self.app_data['previousBalance']
Used to get the previous total balance :return: Previous total balance :rtype: float
src/database.py
get_previous_total_balance
CryptoRye/Crypto-Trading-Bot
301
python
def get_previous_total_balance(self): '\n Used to get the previous total balance\n\n :return: Previous total balance\n :rtype: float\n ' if (('previousBalance' not in self.app_data) or (self.app_data['previousBalance'] == 0)): return None return self.app_data['previousBalance']
def get_previous_total_balance(self): '\n Used to get the previous total balance\n\n :return: Previous total balance\n :rtype: float\n ' if (('previousBalance' not in self.app_data) or (self.app_data['previousBalance'] == 0)): return None return self.app_data['previousBalance']<|docstring|>Used to get the previous total balance :return: Previous total balance :rtype: float<|endoftext|>
004166a47a543804b705baa25a26267344a796f6d2efd8fe279a1e5350085567
@staticmethod def convert_bittrex_order_object(bittrex_order, stats=None): '\n Used to convert a Bittrex order object to a database buy object\n and add stats to it of they are provided.\n\n :param bittrex_order: Bittrex buy order object\n :type bittrex_order: dict\n :param stats: The buy stats to store\n :type stats: dict\n ' database_order = {'orderUuid': bittrex_order['OrderUuid'], 'dateOpened': bittrex_order['Opened'], 'dateClosed': bittrex_order['Closed'], 'price': bittrex_order['Price'], 'unitPrice': bittrex_order['PricePerUnit'], 'commissionPaid': bittrex_order['CommissionPaid']} if (stats is not None): database_order['stats'] = stats return database_order
Used to convert a Bittrex order object to a database buy object and add stats to it of they are provided. :param bittrex_order: Bittrex buy order object :type bittrex_order: dict :param stats: The buy stats to store :type stats: dict
src/database.py
convert_bittrex_order_object
CryptoRye/Crypto-Trading-Bot
301
python
@staticmethod def convert_bittrex_order_object(bittrex_order, stats=None): '\n Used to convert a Bittrex order object to a database buy object\n and add stats to it of they are provided.\n\n :param bittrex_order: Bittrex buy order object\n :type bittrex_order: dict\n :param stats: The buy stats to store\n :type stats: dict\n ' database_order = {'orderUuid': bittrex_order['OrderUuid'], 'dateOpened': bittrex_order['Opened'], 'dateClosed': bittrex_order['Closed'], 'price': bittrex_order['Price'], 'unitPrice': bittrex_order['PricePerUnit'], 'commissionPaid': bittrex_order['CommissionPaid']} if (stats is not None): database_order['stats'] = stats return database_order
@staticmethod def convert_bittrex_order_object(bittrex_order, stats=None): '\n Used to convert a Bittrex order object to a database buy object\n and add stats to it of they are provided.\n\n :param bittrex_order: Bittrex buy order object\n :type bittrex_order: dict\n :param stats: The buy stats to store\n :type stats: dict\n ' database_order = {'orderUuid': bittrex_order['OrderUuid'], 'dateOpened': bittrex_order['Opened'], 'dateClosed': bittrex_order['Closed'], 'price': bittrex_order['Price'], 'unitPrice': bittrex_order['PricePerUnit'], 'commissionPaid': bittrex_order['CommissionPaid']} if (stats is not None): database_order['stats'] = stats return database_order<|docstring|>Used to convert a Bittrex order object to a database buy object and add stats to it of they are provided. :param bittrex_order: Bittrex buy order object :type bittrex_order: dict :param stats: The buy stats to store :type stats: dict<|endoftext|>
dfb8c2941339752acaeaa072aeceffcc295712c55b7a68ca2d9310509ad1a23c
def get_objective(self, coeff=1): '\n Objective function of the Photovoltaic.\n\n Return the objective function of the photovoltaic weighted\n with `coeff`. Depending on `self.force_renewables` leave objective\n function empty or build quadratic objective function to minimize\n discrepancy between available power and produced power.\n\n Parameters\n ----------\n coeff : float, optional\n Coefficient for the objective function.\n\n Returns\n -------\n ExpressionBase :\n Objective function.\n ' m = self.model s = pyomo.sum_product(m.p_el_vars, m.p_el_vars) s += ((- 2) * pyomo.sum_product(self.p_el_supply[self.op_slice], m.p_el_vars)) return (coeff * s)
Objective function of the Photovoltaic. Return the objective function of the photovoltaic weighted with `coeff`. Depending on `self.force_renewables` leave objective function empty or build quadratic objective function to minimize discrepancy between available power and produced power. Parameters ---------- coeff : float, optional Coefficient for the objective function. Returns ------- ExpressionBase : Objective function.
src/pycity_scheduling/classes/photovoltaic.py
get_objective
ElsevierSoftwareX/SOFTX-D-20-00087
4
python
def get_objective(self, coeff=1): '\n Objective function of the Photovoltaic.\n\n Return the objective function of the photovoltaic weighted\n with `coeff`. Depending on `self.force_renewables` leave objective\n function empty or build quadratic objective function to minimize\n discrepancy between available power and produced power.\n\n Parameters\n ----------\n coeff : float, optional\n Coefficient for the objective function.\n\n Returns\n -------\n ExpressionBase :\n Objective function.\n ' m = self.model s = pyomo.sum_product(m.p_el_vars, m.p_el_vars) s += ((- 2) * pyomo.sum_product(self.p_el_supply[self.op_slice], m.p_el_vars)) return (coeff * s)
def get_objective(self, coeff=1): '\n Objective function of the Photovoltaic.\n\n Return the objective function of the photovoltaic weighted\n with `coeff`. Depending on `self.force_renewables` leave objective\n function empty or build quadratic objective function to minimize\n discrepancy between available power and produced power.\n\n Parameters\n ----------\n coeff : float, optional\n Coefficient for the objective function.\n\n Returns\n -------\n ExpressionBase :\n Objective function.\n ' m = self.model s = pyomo.sum_product(m.p_el_vars, m.p_el_vars) s += ((- 2) * pyomo.sum_product(self.p_el_supply[self.op_slice], m.p_el_vars)) return (coeff * s)<|docstring|>Objective function of the Photovoltaic. Return the objective function of the photovoltaic weighted with `coeff`. Depending on `self.force_renewables` leave objective function empty or build quadratic objective function to minimize discrepancy between available power and produced power. Parameters ---------- coeff : float, optional Coefficient for the objective function. Returns ------- ExpressionBase : Objective function.<|endoftext|>
4409a517bf63efae1ed77ffee6be90da4727a89545fa0e0063e7918769d1b9c0
def commit_file(self, file_name, content): '\n Commits a file specified by the file name and its content. If the file \n already exists, its content will be overwritten.\n\n The changes will be committed, thereby, a new SHA will be generated.\n \n :return: Itself (the branch)\n ' self._files.set_file_content(file_name, content) self._commit() return self
Commits a file specified by the file name and its content. If the file already exists, its content will be overwritten. The changes will be committed, thereby, a new SHA will be generated. :return: Itself (the branch)
src/cicd_sim/git/branch.py
commit_file
Software-Natives-OSS/cicd_sim
0
python
def commit_file(self, file_name, content): '\n Commits a file specified by the file name and its content. If the file \n already exists, its content will be overwritten.\n\n The changes will be committed, thereby, a new SHA will be generated.\n \n :return: Itself (the branch)\n ' self._files.set_file_content(file_name, content) self._commit() return self
def commit_file(self, file_name, content): '\n Commits a file specified by the file name and its content. If the file \n already exists, its content will be overwritten.\n\n The changes will be committed, thereby, a new SHA will be generated.\n \n :return: Itself (the branch)\n ' self._files.set_file_content(file_name, content) self._commit() return self<|docstring|>Commits a file specified by the file name and its content. If the file already exists, its content will be overwritten. The changes will be committed, thereby, a new SHA will be generated. :return: Itself (the branch)<|endoftext|>
752997ced248a84e6e9adb9f8d76961558884b2f1c94b2a1aeb05cbf3cde93ba
def set_version(self, version): "Set the project version\n\n :version: A string denoting a semver.org version. E.g. '1.2.3-pre.0+20200313'\n \n Technically, this function commits a file named 'VERSION' as this CICD\n simulator expects an according file to determine the project version.\n " self.commit_file('VERSION', version)
Set the project version :version: A string denoting a semver.org version. E.g. '1.2.3-pre.0+20200313' Technically, this function commits a file named 'VERSION' as this CICD simulator expects an according file to determine the project version.
src/cicd_sim/git/branch.py
set_version
Software-Natives-OSS/cicd_sim
0
python
def set_version(self, version): "Set the project version\n\n :version: A string denoting a semver.org version. E.g. '1.2.3-pre.0+20200313'\n \n Technically, this function commits a file named 'VERSION' as this CICD\n simulator expects an according file to determine the project version.\n " self.commit_file('VERSION', version)
def set_version(self, version): "Set the project version\n\n :version: A string denoting a semver.org version. E.g. '1.2.3-pre.0+20200313'\n \n Technically, this function commits a file named 'VERSION' as this CICD\n simulator expects an according file to determine the project version.\n " self.commit_file('VERSION', version)<|docstring|>Set the project version :version: A string denoting a semver.org version. E.g. '1.2.3-pre.0+20200313' Technically, this function commits a file named 'VERSION' as this CICD simulator expects an according file to determine the project version.<|endoftext|>
134a17aeb54535959183d7d614740558988f98c40a2ab161fc78019915fb2f29
def set_requires(self, requires): "Set the project requires (AKA dependencies)\n\n :requires: A string composed of the required package and the required \n semver version or version range delimited by a slash ('/'). \n E.g. 'lib/1.2.4', 'libA/1.x' or even 'lib/>1.0.0.0-0 <2.0.0'\n \n Technically, this function commits a file named 'REQUIRES' as this CICD\n simulator expects an according file to determine the projects \n requirements.\n " self.commit_file('REQUIRES', requires)
Set the project requires (AKA dependencies) :requires: A string composed of the required package and the required semver version or version range delimited by a slash ('/'). E.g. 'lib/1.2.4', 'libA/1.x' or even 'lib/>1.0.0.0-0 <2.0.0' Technically, this function commits a file named 'REQUIRES' as this CICD simulator expects an according file to determine the projects requirements.
src/cicd_sim/git/branch.py
set_requires
Software-Natives-OSS/cicd_sim
0
python
def set_requires(self, requires): "Set the project requires (AKA dependencies)\n\n :requires: A string composed of the required package and the required \n semver version or version range delimited by a slash ('/'). \n E.g. 'lib/1.2.4', 'libA/1.x' or even 'lib/>1.0.0.0-0 <2.0.0'\n \n Technically, this function commits a file named 'REQUIRES' as this CICD\n simulator expects an according file to determine the projects \n requirements.\n " self.commit_file('REQUIRES', requires)
def set_requires(self, requires): "Set the project requires (AKA dependencies)\n\n :requires: A string composed of the required package and the required \n semver version or version range delimited by a slash ('/'). \n E.g. 'lib/1.2.4', 'libA/1.x' or even 'lib/>1.0.0.0-0 <2.0.0'\n \n Technically, this function commits a file named 'REQUIRES' as this CICD\n simulator expects an according file to determine the projects \n requirements.\n " self.commit_file('REQUIRES', requires)<|docstring|>Set the project requires (AKA dependencies) :requires: A string composed of the required package and the required semver version or version range delimited by a slash ('/'). E.g. 'lib/1.2.4', 'libA/1.x' or even 'lib/>1.0.0.0-0 <2.0.0' Technically, this function commits a file named 'REQUIRES' as this CICD simulator expects an according file to determine the projects requirements.<|endoftext|>
13b21a67cbfc4eeeb9a689d2f2b566c094695ca12a2853c23a96bcce57d34e24
def __init__(self): 'Capability - a model defined in Swagger' self.discriminator = None
Capability - a model defined in Swagger
vtpl_api/models/capability.py
__init__
vtpl1/videonetics_api
0
python
def __init__(self): self.discriminator = None
def __init__(self): self.discriminator = None<|docstring|>Capability - a model defined in Swagger<|endoftext|>
7d0220f1bd8b5050af2a8f4e507bde8aa569c4e58583a1e463192c83aeaaf671
def to_dict(self): 'Returns the model properties as a dict' result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(Capability, dict): for (key, value) in self.items(): result[key] = value return result
Returns the model properties as a dict
vtpl_api/models/capability.py
to_dict
vtpl1/videonetics_api
0
python
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(Capability, dict): for (key, value) in self.items(): result[key] = value return result
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(Capability, dict): for (key, value) in self.items(): result[key] = value return result<|docstring|>Returns the model properties as a dict<|endoftext|>
cbb19eaa2fc8a113d9e32f924ef280a7e97563f8915f94f65dab438997af2e99
def to_str(self): 'Returns the string representation of the model' return pprint.pformat(self.to_dict())
Returns the string representation of the model
vtpl_api/models/capability.py
to_str
vtpl1/videonetics_api
0
python
def to_str(self): return pprint.pformat(self.to_dict())
def to_str(self): return pprint.pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|>
772243a2c2b3261a9b954d07aaf295e3c1242a579a495e2d6a5679c677861703
def __repr__(self): 'For `print` and `pprint`' return self.to_str()
For `print` and `pprint`
vtpl_api/models/capability.py
__repr__
vtpl1/videonetics_api
0
python
def __repr__(self): return self.to_str()
def __repr__(self): return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|>
2a58b1f67871e4f170f1ca8cfdf3701648ca338cc86d4032fe908c2e9db7cacb
def __eq__(self, other): 'Returns true if both objects are equal' if (not isinstance(other, Capability)): return False return (self.__dict__ == other.__dict__)
Returns true if both objects are equal
vtpl_api/models/capability.py
__eq__
vtpl1/videonetics_api
0
python
def __eq__(self, other): if (not isinstance(other, Capability)): return False return (self.__dict__ == other.__dict__)
def __eq__(self, other): if (not isinstance(other, Capability)): return False return (self.__dict__ == other.__dict__)<|docstring|>Returns true if both objects are equal<|endoftext|>
43dc6740163eb9fc1161d09cb2208a64c7ad0cc8d9c8637ac3264522d3ec7e42
def __ne__(self, other): 'Returns true if both objects are not equal' return (not (self == other))
Returns true if both objects are not equal
vtpl_api/models/capability.py
__ne__
vtpl1/videonetics_api
0
python
def __ne__(self, other): return (not (self == other))
def __ne__(self, other): return (not (self == other))<|docstring|>Returns true if both objects are not equal<|endoftext|>
909bf60d6b8c4295f5bf849bcf5e78546f338fe4fa1af75ad961cea576959557
def test_run_with_save_version_as_run_id(self, mocker, dummy_context, dummy_dataframe, caplog): 'Test that the default behaviour, with run_id set to None,\n creates a journal record with the run_id the same as save_version.\n ' save_version = '2020-01-01T00.00.00.000Z' mocked_get_save_version = mocker.patch.object(dummy_context, '_get_save_version', return_value=save_version) dummy_context.catalog.save('cars', dummy_dataframe) dummy_context.run(load_versions={'boats': save_version}) mocked_get_save_version.assert_called_once_with() log_msg = next((record.getMessage() for record in caplog.records if (record.name == 'kedro.journal'))) assert (json.loads(log_msg)['run_id'] == save_version)
Test that the default behaviour, with run_id set to None, creates a journal record with the run_id the same as save_version.
tests/framework/context/test_context.py
test_run_with_save_version_as_run_id
juan-carlos-calvo/kedro
4,923
python
def test_run_with_save_version_as_run_id(self, mocker, dummy_context, dummy_dataframe, caplog): 'Test that the default behaviour, with run_id set to None,\n creates a journal record with the run_id the same as save_version.\n ' save_version = '2020-01-01T00.00.00.000Z' mocked_get_save_version = mocker.patch.object(dummy_context, '_get_save_version', return_value=save_version) dummy_context.catalog.save('cars', dummy_dataframe) dummy_context.run(load_versions={'boats': save_version}) mocked_get_save_version.assert_called_once_with() log_msg = next((record.getMessage() for record in caplog.records if (record.name == 'kedro.journal'))) assert (json.loads(log_msg)['run_id'] == save_version)
def test_run_with_save_version_as_run_id(self, mocker, dummy_context, dummy_dataframe, caplog): 'Test that the default behaviour, with run_id set to None,\n creates a journal record with the run_id the same as save_version.\n ' save_version = '2020-01-01T00.00.00.000Z' mocked_get_save_version = mocker.patch.object(dummy_context, '_get_save_version', return_value=save_version) dummy_context.catalog.save('cars', dummy_dataframe) dummy_context.run(load_versions={'boats': save_version}) mocked_get_save_version.assert_called_once_with() log_msg = next((record.getMessage() for record in caplog.records if (record.name == 'kedro.journal'))) assert (json.loads(log_msg)['run_id'] == save_version)<|docstring|>Test that the default behaviour, with run_id set to None, creates a journal record with the run_id the same as save_version.<|endoftext|>
cd05a349769a26a012100fc223198819acfe9a7c9e112e2ec048e201f9cecb94
def pad_omni_image(image, pad_size, image_dims=None): '\n Pad an omni-directional image with the correct image wrapping at the edges.\n\n :param image: Image to perform the padding on *[batch_shape,h,w,d]*\n :type image: array\n :param pad_size: Number of pixels to pad.\n :type pad_size: int\n :param image_dims: Image dimensions. Inferred from Inputs if None.\n :type image_dims: sequence of ints, optional\n :return: New padded omni-directional image *[batch_shape,h+ps,w+ps,d]*\n ' if (image_dims is None): image_dims = image.shape[(- 3):(- 1)] top_left = image[(..., 0:pad_size, int((image_dims[1] / 2)):, :)] top_right = image[(..., 0:pad_size, 0:int((image_dims[1] / 2)), :)] top_border = _ivy.flip(_ivy.concatenate((top_left, top_right), (- 2)), (- 3)) bottom_left = image[(..., (- pad_size):, int((image_dims[1] / 2)):, :)] bottom_right = image[(..., (- pad_size):, 0:int((image_dims[1] / 2)), :)] bottom_border = _ivy.flip(_ivy.concatenate((bottom_left, bottom_right), (- 2)), (- 3)) image_expanded = _ivy.concatenate((top_border, image, bottom_border), (- 3)) left_border = image_expanded[(..., (- pad_size):, :)] right_border = image_expanded[(..., 0:pad_size, :)] return _ivy.concatenate((left_border, image_expanded, right_border), (- 2))
Pad an omni-directional image with the correct image wrapping at the edges. :param image: Image to perform the padding on *[batch_shape,h,w,d]* :type image: array :param pad_size: Number of pixels to pad. :type pad_size: int :param image_dims: Image dimensions. Inferred from Inputs if None. :type image_dims: sequence of ints, optional :return: New padded omni-directional image *[batch_shape,h+ps,w+ps,d]*
ivy_vision/padding.py
pad_omni_image
ivy-dl/vision
35
python
def pad_omni_image(image, pad_size, image_dims=None): '\n Pad an omni-directional image with the correct image wrapping at the edges.\n\n :param image: Image to perform the padding on *[batch_shape,h,w,d]*\n :type image: array\n :param pad_size: Number of pixels to pad.\n :type pad_size: int\n :param image_dims: Image dimensions. Inferred from Inputs if None.\n :type image_dims: sequence of ints, optional\n :return: New padded omni-directional image *[batch_shape,h+ps,w+ps,d]*\n ' if (image_dims is None): image_dims = image.shape[(- 3):(- 1)] top_left = image[(..., 0:pad_size, int((image_dims[1] / 2)):, :)] top_right = image[(..., 0:pad_size, 0:int((image_dims[1] / 2)), :)] top_border = _ivy.flip(_ivy.concatenate((top_left, top_right), (- 2)), (- 3)) bottom_left = image[(..., (- pad_size):, int((image_dims[1] / 2)):, :)] bottom_right = image[(..., (- pad_size):, 0:int((image_dims[1] / 2)), :)] bottom_border = _ivy.flip(_ivy.concatenate((bottom_left, bottom_right), (- 2)), (- 3)) image_expanded = _ivy.concatenate((top_border, image, bottom_border), (- 3)) left_border = image_expanded[(..., (- pad_size):, :)] right_border = image_expanded[(..., 0:pad_size, :)] return _ivy.concatenate((left_border, image_expanded, right_border), (- 2))
def pad_omni_image(image, pad_size, image_dims=None): '\n Pad an omni-directional image with the correct image wrapping at the edges.\n\n :param image: Image to perform the padding on *[batch_shape,h,w,d]*\n :type image: array\n :param pad_size: Number of pixels to pad.\n :type pad_size: int\n :param image_dims: Image dimensions. Inferred from Inputs if None.\n :type image_dims: sequence of ints, optional\n :return: New padded omni-directional image *[batch_shape,h+ps,w+ps,d]*\n ' if (image_dims is None): image_dims = image.shape[(- 3):(- 1)] top_left = image[(..., 0:pad_size, int((image_dims[1] / 2)):, :)] top_right = image[(..., 0:pad_size, 0:int((image_dims[1] / 2)), :)] top_border = _ivy.flip(_ivy.concatenate((top_left, top_right), (- 2)), (- 3)) bottom_left = image[(..., (- pad_size):, int((image_dims[1] / 2)):, :)] bottom_right = image[(..., (- pad_size):, 0:int((image_dims[1] / 2)), :)] bottom_border = _ivy.flip(_ivy.concatenate((bottom_left, bottom_right), (- 2)), (- 3)) image_expanded = _ivy.concatenate((top_border, image, bottom_border), (- 3)) left_border = image_expanded[(..., (- pad_size):, :)] right_border = image_expanded[(..., 0:pad_size, :)] return _ivy.concatenate((left_border, image_expanded, right_border), (- 2))<|docstring|>Pad an omni-directional image with the correct image wrapping at the edges. :param image: Image to perform the padding on *[batch_shape,h,w,d]* :type image: array :param pad_size: Number of pixels to pad. :type pad_size: int :param image_dims: Image dimensions. Inferred from Inputs if None. :type image_dims: sequence of ints, optional :return: New padded omni-directional image *[batch_shape,h+ps,w+ps,d]*<|endoftext|>
c160ad1c266e099415edd7f0d3c5d94053e7f71c853734de8142ea639b52af94
def clones(module, N): 'Produce N identical layers.' return nn.ModuleList([copy.deepcopy(module) for _ in range(N)])
Produce N identical layers.
Model_Transformer/train_utils.py
clones
AnubhavGupta3377/Text-Classification-Models
481
python
def clones(module, N): return nn.ModuleList([copy.deepcopy(module) for _ in range(N)])
def clones(module, N): return nn.ModuleList([copy.deepcopy(module) for _ in range(N)])<|docstring|>Produce N identical layers.<|endoftext|>
ded32b216ffa5b61c215a606ee2b3cc1ba4bee532555b3430df520d520590fc6
@staticmethod def taking_int_input_from_user(msg_to_user): '\n [summary]\n Taking an int input from the user (checks if legal input -> num: int)\n\n Arguments:\n msg_to_user {[str]} -- [message to print to the user]\n ' is_correct = False while (not is_correct): try: num_from_user = int(input(msg_to_user)) if (num_from_user <= 0): raise ValueError is_correct = True except: print('Please enter a LEGAL number (an integer).') is_correct = False return num_from_user
[summary] Taking an int input from the user (checks if legal input -> num: int) Arguments: msg_to_user {[str]} -- [message to print to the user]
BlackJackGame/BlackJackGame.py
taking_int_input_from_user
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def taking_int_input_from_user(msg_to_user): '\n [summary]\n Taking an int input from the user (checks if legal input -> num: int)\n\n Arguments:\n msg_to_user {[str]} -- [message to print to the user]\n ' is_correct = False while (not is_correct): try: num_from_user = int(input(msg_to_user)) if (num_from_user <= 0): raise ValueError is_correct = True except: print('Please enter a LEGAL number (an integer).') is_correct = False return num_from_user
@staticmethod def taking_int_input_from_user(msg_to_user): '\n [summary]\n Taking an int input from the user (checks if legal input -> num: int)\n\n Arguments:\n msg_to_user {[str]} -- [message to print to the user]\n ' is_correct = False while (not is_correct): try: num_from_user = int(input(msg_to_user)) if (num_from_user <= 0): raise ValueError is_correct = True except: print('Please enter a LEGAL number (an integer).') is_correct = False return num_from_user<|docstring|>[summary] Taking an int input from the user (checks if legal input -> num: int) Arguments: msg_to_user {[str]} -- [message to print to the user]<|endoftext|>
d44e214cf43e21dfd42296a5a32ae6641af15e75cc8e441cfdde6f60083223a8
@staticmethod def take_bet_from_player(player): "\n [summary]\n Taking an input (int) from player. The input is the player's bet (with chips)\n\n Arguments:\n player {[Player]} -- [a reference to the player object, to check if the input is legal (if the player has enough chips)]\n " is_correct = False while (not is_correct): try: chips_to_bet = Game.taking_int_input_from_user('Please enter your bet (chips [integer]), considering your current chips balance: ') if (chips_to_bet > player.chips.total_chips): raise ValueError else: player.chips.make_bet(chips_to_bet) is_correct = True except: print(f"You don't have enought chips, your have: ({player.chips.count_total_chips()}). Please enter a legal number of chips.") is_correct = False return chips_to_bet
[summary] Taking an input (int) from player. The input is the player's bet (with chips) Arguments: player {[Player]} -- [a reference to the player object, to check if the input is legal (if the player has enough chips)]
BlackJackGame/BlackJackGame.py
take_bet_from_player
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def take_bet_from_player(player): "\n [summary]\n Taking an input (int) from player. The input is the player's bet (with chips)\n\n Arguments:\n player {[Player]} -- [a reference to the player object, to check if the input is legal (if the player has enough chips)]\n " is_correct = False while (not is_correct): try: chips_to_bet = Game.taking_int_input_from_user('Please enter your bet (chips [integer]), considering your current chips balance: ') if (chips_to_bet > player.chips.total_chips): raise ValueError else: player.chips.make_bet(chips_to_bet) is_correct = True except: print(f"You don't have enought chips, your have: ({player.chips.count_total_chips()}). Please enter a legal number of chips.") is_correct = False return chips_to_bet
@staticmethod def take_bet_from_player(player): "\n [summary]\n Taking an input (int) from player. The input is the player's bet (with chips)\n\n Arguments:\n player {[Player]} -- [a reference to the player object, to check if the input is legal (if the player has enough chips)]\n " is_correct = False while (not is_correct): try: chips_to_bet = Game.taking_int_input_from_user('Please enter your bet (chips [integer]), considering your current chips balance: ') if (chips_to_bet > player.chips.total_chips): raise ValueError else: player.chips.make_bet(chips_to_bet) is_correct = True except: print(f"You don't have enought chips, your have: ({player.chips.count_total_chips()}). Please enter a legal number of chips.") is_correct = False return chips_to_bet<|docstring|>[summary] Taking an input (int) from player. The input is the player's bet (with chips) Arguments: player {[Player]} -- [a reference to the player object, to check if the input is legal (if the player has enough chips)]<|endoftext|>
ff1bf5b21705f43998530eed38566febecd30f831fcacf64af952d6eb070d369
@staticmethod def player_hit(player, dealer): "\n [summary]\n Hitting (adding) new card to the player's hand from the dealer's deck\n\n Arguments:\n player {[Player]} -- [a reference to a player object, to add the card to his hand]\n dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck]\n " player.hand.add_new_card(dealer) player.hand.adjust_for_ace()
[summary] Hitting (adding) new card to the player's hand from the dealer's deck Arguments: player {[Player]} -- [a reference to a player object, to add the card to his hand] dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck]
BlackJackGame/BlackJackGame.py
player_hit
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def player_hit(player, dealer): "\n [summary]\n Hitting (adding) new card to the player's hand from the dealer's deck\n\n Arguments:\n player {[Player]} -- [a reference to a player object, to add the card to his hand]\n dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck]\n " player.hand.add_new_card(dealer) player.hand.adjust_for_ace()
@staticmethod def player_hit(player, dealer): "\n [summary]\n Hitting (adding) new card to the player's hand from the dealer's deck\n\n Arguments:\n player {[Player]} -- [a reference to a player object, to add the card to his hand]\n dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck]\n " player.hand.add_new_card(dealer) player.hand.adjust_for_ace()<|docstring|>[summary] Hitting (adding) new card to the player's hand from the dealer's deck Arguments: player {[Player]} -- [a reference to a player object, to add the card to his hand] dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck]<|endoftext|>
254ae775eb2c85da216c9c36e2ea25a9d408cd780e87bbc67d81214120f4cec9
@staticmethod def dealer_hit(dealer): "\n [summary]\n Hitting (adding) new card to the dealer's hand from its deck\n\n Arguments:\n dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to its hand from its deck]\n " dealer.hand.add_new_card(dealer) dealer.hand.adjust_for_ace()
[summary] Hitting (adding) new card to the dealer's hand from its deck Arguments: dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to its hand from its deck]
BlackJackGame/BlackJackGame.py
dealer_hit
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def dealer_hit(dealer): "\n [summary]\n Hitting (adding) new card to the dealer's hand from its deck\n\n Arguments:\n dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to its hand from its deck]\n " dealer.hand.add_new_card(dealer) dealer.hand.adjust_for_ace()
@staticmethod def dealer_hit(dealer): "\n [summary]\n Hitting (adding) new card to the dealer's hand from its deck\n\n Arguments:\n dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to its hand from its deck]\n " dealer.hand.add_new_card(dealer) dealer.hand.adjust_for_ace()<|docstring|>[summary] Hitting (adding) new card to the dealer's hand from its deck Arguments: dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to its hand from its deck]<|endoftext|>
c15a82612a7137167a7c8748d82d3219b1531119cb5eed97e700ca8dbbe29d17
@staticmethod def player_hit_or_stand(player, dealer): '\n [summary]\n Ask the player to choose between Hit or Stand (Hit -> add new card to his hand, Stand -> stand with his cards)\n If the player choose to Hit -> using player_hit() method with the references of the player and the dealer\n\n Arguments:\n player {[Player]} -- [a reference to a player object, to add the card to his hand - if the player choose to hit]\n dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck if the player choose to hit]\n ' print('Please enter your next move: ') is_correct = False while (not is_correct): try: answer_from_user = input("Do you want to Hit or Stand? Enter 'H'/'h' to Hit, or 'S'/'s' to Stand: ") if ((answer_from_user == 'H') or 'h' or 'S' or 's'): if ((answer_from_user == 'H') or 'h'): print(f'{player} Hits!') player_hit(player, dealer) is_correct = True elif ((answer_from_user == 'S') or 's'): print(f'''{player} Stands. The Dealer's Turn.''') Game.is_player_turn = False is_correct = True else: raise ValueError except: print('Wrong input, please try again.') is_correct = False
[summary] Ask the player to choose between Hit or Stand (Hit -> add new card to his hand, Stand -> stand with his cards) If the player choose to Hit -> using player_hit() method with the references of the player and the dealer Arguments: player {[Player]} -- [a reference to a player object, to add the card to his hand - if the player choose to hit] dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck if the player choose to hit]
BlackJackGame/BlackJackGame.py
player_hit_or_stand
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def player_hit_or_stand(player, dealer): '\n [summary]\n Ask the player to choose between Hit or Stand (Hit -> add new card to his hand, Stand -> stand with his cards)\n If the player choose to Hit -> using player_hit() method with the references of the player and the dealer\n\n Arguments:\n player {[Player]} -- [a reference to a player object, to add the card to his hand - if the player choose to hit]\n dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck if the player choose to hit]\n ' print('Please enter your next move: ') is_correct = False while (not is_correct): try: answer_from_user = input("Do you want to Hit or Stand? Enter 'H'/'h' to Hit, or 'S'/'s' to Stand: ") if ((answer_from_user == 'H') or 'h' or 'S' or 's'): if ((answer_from_user == 'H') or 'h'): print(f'{player} Hits!') player_hit(player, dealer) is_correct = True elif ((answer_from_user == 'S') or 's'): print(f'{player} Stands. The Dealer's Turn.') Game.is_player_turn = False is_correct = True else: raise ValueError except: print('Wrong input, please try again.') is_correct = False
@staticmethod def player_hit_or_stand(player, dealer): '\n [summary]\n Ask the player to choose between Hit or Stand (Hit -> add new card to his hand, Stand -> stand with his cards)\n If the player choose to Hit -> using player_hit() method with the references of the player and the dealer\n\n Arguments:\n player {[Player]} -- [a reference to a player object, to add the card to his hand - if the player choose to hit]\n dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck if the player choose to hit]\n ' print('Please enter your next move: ') is_correct = False while (not is_correct): try: answer_from_user = input("Do you want to Hit or Stand? Enter 'H'/'h' to Hit, or 'S'/'s' to Stand: ") if ((answer_from_user == 'H') or 'h' or 'S' or 's'): if ((answer_from_user == 'H') or 'h'): print(f'{player} Hits!') player_hit(player, dealer) is_correct = True elif ((answer_from_user == 'S') or 's'): print(f'{player} Stands. The Dealer's Turn.') Game.is_player_turn = False is_correct = True else: raise ValueError except: print('Wrong input, please try again.') is_correct = False<|docstring|>[summary] Ask the player to choose between Hit or Stand (Hit -> add new card to his hand, Stand -> stand with his cards) If the player choose to Hit -> using player_hit() method with the references of the player and the dealer Arguments: player {[Player]} -- [a reference to a player object, to add the card to his hand - if the player choose to hit] dealer {[ComputerDealer]} -- [a reference to a dealer object, to add the card to the player from his deck if the player choose to hit]<|endoftext|>
3658dea471c192ee45f8b7c3b7329bef4d2116014623c6e642e8dd465aa64259
@staticmethod def player_bust(player): '\n [summary]\n The player busted -> have to decrease his chips that was in the current bet (because of the lose)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n ' print(f'{player} BUST!') player.chips.losing_bet()
[summary] The player busted -> have to decrease his chips that was in the current bet (because of the lose) Arguments: player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]
BlackJackGame/BlackJackGame.py
player_bust
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def player_bust(player): '\n [summary]\n The player busted -> have to decrease his chips that was in the current bet (because of the lose)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n ' print(f'{player} BUST!') player.chips.losing_bet()
@staticmethod def player_bust(player): '\n [summary]\n The player busted -> have to decrease his chips that was in the current bet (because of the lose)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n ' print(f'{player} BUST!') player.chips.losing_bet()<|docstring|>[summary] The player busted -> have to decrease his chips that was in the current bet (because of the lose) Arguments: player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]<|endoftext|>
7e30cdcaed3e44503e4142e321c5aae58f5646a0a626edc681f4603a5fd77ae1
@staticmethod def player_wins(player): '\n [summary]\n The player wins -> have to increase his chips that was in the current bet (because of the win)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n ' print(f'{player} WINS!') player.chips.winning_bet()
[summary] The player wins -> have to increase his chips that was in the current bet (because of the win) Arguments: player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]
BlackJackGame/BlackJackGame.py
player_wins
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def player_wins(player): '\n [summary]\n The player wins -> have to increase his chips that was in the current bet (because of the win)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n ' print(f'{player} WINS!') player.chips.winning_bet()
@staticmethod def player_wins(player): '\n [summary]\n The player wins -> have to increase his chips that was in the current bet (because of the win)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n ' print(f'{player} WINS!') player.chips.winning_bet()<|docstring|>[summary] The player wins -> have to increase his chips that was in the current bet (because of the win) Arguments: player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]<|endoftext|>
c51da68ec4999cb2e13559df5a4079a15b596c0a8e4cdcf74a2c1e51686d0e8c
@staticmethod def dealer_bust(player, dealer): "\n [summary]\n The dealer bust so the player wins -> have to increase the player's chips that was in the current bet (because of the lose of the dealer)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]\n " print(f'''{player} WINS! {dealer} BUSTED!''') player.chips.winning_bet()
[summary] The dealer bust so the player wins -> have to increase the player's chips that was in the current bet (because of the lose of the dealer) Arguments: player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it] dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]
BlackJackGame/BlackJackGame.py
dealer_bust
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def dealer_bust(player, dealer): "\n [summary]\n The dealer bust so the player wins -> have to increase the player's chips that was in the current bet (because of the lose of the dealer)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]\n " print(f'{player} WINS! {dealer} BUSTED!') player.chips.winning_bet()
@staticmethod def dealer_bust(player, dealer): "\n [summary]\n The dealer bust so the player wins -> have to increase the player's chips that was in the current bet (because of the lose of the dealer)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]\n " print(f'{player} WINS! {dealer} BUSTED!') player.chips.winning_bet()<|docstring|>[summary] The dealer bust so the player wins -> have to increase the player's chips that was in the current bet (because of the lose of the dealer) Arguments: player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it] dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]<|endoftext|>
af6891c743e836141d52f0eb41278661c77cc855794ee21210f4c4ad7518f4f1
@staticmethod def dealer_wins(player, dealer): "\n [summary]\n The dealer wins so the player loses -> have to decrease the player's chips that was in the current bet (because of the win of the dealer)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]\n " print(f'{dealer} WINS!') player.chips.losing_bet()
[summary] The dealer wins so the player loses -> have to decrease the player's chips that was in the current bet (because of the win of the dealer) Arguments: player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it] dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]
BlackJackGame/BlackJackGame.py
dealer_wins
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def dealer_wins(player, dealer): "\n [summary]\n The dealer wins so the player loses -> have to decrease the player's chips that was in the current bet (because of the win of the dealer)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]\n " print(f'{dealer} WINS!') player.chips.losing_bet()
@staticmethod def dealer_wins(player, dealer): "\n [summary]\n The dealer wins so the player loses -> have to decrease the player's chips that was in the current bet (because of the win of the dealer)\n\n Arguments:\n player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it]\n dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]\n " print(f'{dealer} WINS!') player.chips.losing_bet()<|docstring|>[summary] The dealer wins so the player loses -> have to decrease the player's chips that was in the current bet (because of the win of the dealer) Arguments: player {[Player]} -- [a reference to the player (using its name) and to the chips object inside it] dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]<|endoftext|>
2aa5c1485ada29951a20315351768c003ea11c2d6d7af3d98f7f19f6bc8286f6
@staticmethod def push(player, dealer): '\n [summary]\n The player and the dealer tie -> push\n\n Arguments:\n player {[Player]} -- [a reference to the player to use its name]\n dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]\n ' print(f'{player} and {dealer} tie! PUSH!')
[summary] The player and the dealer tie -> push Arguments: player {[Player]} -- [a reference to the player to use its name] dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]
BlackJackGame/BlackJackGame.py
push
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def push(player, dealer): '\n [summary]\n The player and the dealer tie -> push\n\n Arguments:\n player {[Player]} -- [a reference to the player to use its name]\n dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]\n ' print(f'{player} and {dealer} tie! PUSH!')
@staticmethod def push(player, dealer): '\n [summary]\n The player and the dealer tie -> push\n\n Arguments:\n player {[Player]} -- [a reference to the player to use its name]\n dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]\n ' print(f'{player} and {dealer} tie! PUSH!')<|docstring|>[summary] The player and the dealer tie -> push Arguments: player {[Player]} -- [a reference to the player to use its name] dealer {[ComputerDealer]} -- [a reference to the dealer to use its name]<|endoftext|>
be8212d3074d0d182cf49979052a2223259f71a33666877f2cea438af2a78937
@staticmethod def ask_for_new_round(): '\n [summary]\n Asking the player to have a new round (with the same state as before [after the change in its chips at the end of the previous round])\n ' print('Please enter your choice: ') is_correct = False while (not is_correct): try: answer_from_user = input("Do you want to play another round (with the current chips you already have)? Enter 'Y'/'y' for 'Yes', or 'N'/'n' for 'No': ") if ((answer_from_user == 'Y') or 'y' or 'N' or 'n'): if ((answer_from_user == 'Y') or 'y'): print('New round! \nGet ready!') Game.game_state = True is_correct = True elif ((answer_from_user == 'N') or 'n'): print('Thanks for playing!') Game.game_state = False is_correct = True else: raise ValueError except: print('Wrong input, please try again.') is_correct = False
[summary] Asking the player to have a new round (with the same state as before [after the change in its chips at the end of the previous round])
BlackJackGame/BlackJackGame.py
ask_for_new_round
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def ask_for_new_round(): '\n [summary]\n Asking the player to have a new round (with the same state as before [after the change in its chips at the end of the previous round])\n ' print('Please enter your choice: ') is_correct = False while (not is_correct): try: answer_from_user = input("Do you want to play another round (with the current chips you already have)? Enter 'Y'/'y' for 'Yes', or 'N'/'n' for 'No': ") if ((answer_from_user == 'Y') or 'y' or 'N' or 'n'): if ((answer_from_user == 'Y') or 'y'): print('New round! \nGet ready!') Game.game_state = True is_correct = True elif ((answer_from_user == 'N') or 'n'): print('Thanks for playing!') Game.game_state = False is_correct = True else: raise ValueError except: print('Wrong input, please try again.') is_correct = False
@staticmethod def ask_for_new_round(): '\n [summary]\n Asking the player to have a new round (with the same state as before [after the change in its chips at the end of the previous round])\n ' print('Please enter your choice: ') is_correct = False while (not is_correct): try: answer_from_user = input("Do you want to play another round (with the current chips you already have)? Enter 'Y'/'y' for 'Yes', or 'N'/'n' for 'No': ") if ((answer_from_user == 'Y') or 'y' or 'N' or 'n'): if ((answer_from_user == 'Y') or 'y'): print('New round! \nGet ready!') Game.game_state = True is_correct = True elif ((answer_from_user == 'N') or 'n'): print('Thanks for playing!') Game.game_state = False is_correct = True else: raise ValueError except: print('Wrong input, please try again.') is_correct = False<|docstring|>[summary] Asking the player to have a new round (with the same state as before [after the change in its chips at the end of the previous round])<|endoftext|>
e24db54b10d44aeb525e4090e3d507dc92963f083a3be236ba27837a1d085f71
@staticmethod def ask_to_restart_the_whole_game(): '\n [summary]\n Asking the player to restart the whole game (running Game.run() method again)\n ' print('Please enter your choice: ') is_correct = False while (not is_correct): try: answer_from_user = input("Would you like to restart the whole game? Enter 'Y'/'y' for 'Yes', or 'N'/'n' for 'No': ") if ((answer_from_user == 'Y') or 'y' or 'N' or 'n'): if ((answer_from_user == 'Y') or 'y'): Game.game_state = True is_correct = True Game.run() elif ((answer_from_user == 'N') or 'n'): Game.game_state = False is_correct = True else: raise ValueError except: print('Wrong input, please try again.') is_correct = False
[summary] Asking the player to restart the whole game (running Game.run() method again)
BlackJackGame/BlackJackGame.py
ask_to_restart_the_whole_game
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def ask_to_restart_the_whole_game(): '\n [summary]\n Asking the player to restart the whole game (running Game.run() method again)\n ' print('Please enter your choice: ') is_correct = False while (not is_correct): try: answer_from_user = input("Would you like to restart the whole game? Enter 'Y'/'y' for 'Yes', or 'N'/'n' for 'No': ") if ((answer_from_user == 'Y') or 'y' or 'N' or 'n'): if ((answer_from_user == 'Y') or 'y'): Game.game_state = True is_correct = True Game.run() elif ((answer_from_user == 'N') or 'n'): Game.game_state = False is_correct = True else: raise ValueError except: print('Wrong input, please try again.') is_correct = False
@staticmethod def ask_to_restart_the_whole_game(): '\n [summary]\n Asking the player to restart the whole game (running Game.run() method again)\n ' print('Please enter your choice: ') is_correct = False while (not is_correct): try: answer_from_user = input("Would you like to restart the whole game? Enter 'Y'/'y' for 'Yes', or 'N'/'n' for 'No': ") if ((answer_from_user == 'Y') or 'y' or 'N' or 'n'): if ((answer_from_user == 'Y') or 'y'): Game.game_state = True is_correct = True Game.run() elif ((answer_from_user == 'N') or 'n'): Game.game_state = False is_correct = True else: raise ValueError except: print('Wrong input, please try again.') is_correct = False<|docstring|>[summary] Asking the player to restart the whole game (running Game.run() method again)<|endoftext|>
3d8cca51dad3daa86fde598b1f724dbbe00d41719b74f676d0abccf213d7fd0a
@staticmethod def run(): '\n [summary]\n The main flow method of the game (Static class method -> No need Game() instance)\n ' print('This is a Console UI BlackJack Game.') print('\n') print("You are going to play against a Computer - 'The Dealer'.") print('What is you name?') name_of_player = str(input('Please enter you name: ')) print(f'Hello {name_of_player}!') starting_chips = Game.taking_int_input_from_user('With how many chips do you want to start the game? ') print('\n') player = BlackJackGameBuilders.PlayerBuilder.build_player(name_of_player, starting_chips) dealer = BlackJackGameBuilders.ComputerDealerBuilder.build_dealer() dealer.deck.shuffle() player.set_new_hand() player.current_hand.add_new_card(dealer) player.current_hand.add_new_card(dealer) dealer.set_new_hand() dealer.current_hand.add_new_card(dealer) dealer.current_hand.add_new_card(dealer) print(f'''Finally.....GAME IS ON! Good Luck {player}!''') while Game.game_state: Game.is_player_turn = True print(('\n' * 3)) current_player_bet = Game.take_bet_from_player(player) print(f'Your bet is: {current_player_bet}.') print('\n') print(f'{player} has: ') print(player.show_hand()) print('\n') print(f'{dealer} has: ') print(dealer.show_dealer_one_card()) while Game.is_player_turn: Game.player_hit_or_stand(player, dealer) print(f'{player} has: ') print(player.show_hand()) print('\n') print(f'{dealer} has: ') print(dealer.show_dealer_one_card()) if (player.current_hand.value_of_cards_in_hand > 21): Game.player_bust(player) break if (player.current_hand.value_of_cards_in_hand <= 21): while (dealer.current_hand.value_of_cards_in_hand < 17): Game.dealer_hit(dealer) print(f'{player} has: ') print(player.show_hand()) print('\n') print(f'{dealer} has: ') print(dealer.show_hand()) if (dealer.current_hand.value_of_cards_in_hand > 21): Game.dealer_bust(player, dealer) elif (dealer.current_hand.value_of_cards_in_hand > player.current_hand.value_of_cards_in_hand): Game.dealer_wins(player, dealer) elif (dealer.current_hand.value_of_cards_in_hand < player.current_hand.value_of_cards_in_hand): Game.player_wins(player) elif (dealer.current_hand.value_of_cards_in_hand == player.current_hand.value_of_cards_in_hand): Game.push(player, dealer) print('\n') print(f''' {player}'s {player.chips.count_total_chips}''') if (player.chips.total_chips > 0): Game.ask_for_new_round() else: Game.game_state = False Game.ask_to_restart_the_whole_game()
[summary] The main flow method of the game (Static class method -> No need Game() instance)
BlackJackGame/BlackJackGame.py
run
NatanMeirov/PythonMiniProjects
0
python
@staticmethod def run(): '\n [summary]\n The main flow method of the game (Static class method -> No need Game() instance)\n ' print('This is a Console UI BlackJack Game.') print('\n') print("You are going to play against a Computer - 'The Dealer'.") print('What is you name?') name_of_player = str(input('Please enter you name: ')) print(f'Hello {name_of_player}!') starting_chips = Game.taking_int_input_from_user('With how many chips do you want to start the game? ') print('\n') player = BlackJackGameBuilders.PlayerBuilder.build_player(name_of_player, starting_chips) dealer = BlackJackGameBuilders.ComputerDealerBuilder.build_dealer() dealer.deck.shuffle() player.set_new_hand() player.current_hand.add_new_card(dealer) player.current_hand.add_new_card(dealer) dealer.set_new_hand() dealer.current_hand.add_new_card(dealer) dealer.current_hand.add_new_card(dealer) print(f'Finally.....GAME IS ON! Good Luck {player}!') while Game.game_state: Game.is_player_turn = True print(('\n' * 3)) current_player_bet = Game.take_bet_from_player(player) print(f'Your bet is: {current_player_bet}.') print('\n') print(f'{player} has: ') print(player.show_hand()) print('\n') print(f'{dealer} has: ') print(dealer.show_dealer_one_card()) while Game.is_player_turn: Game.player_hit_or_stand(player, dealer) print(f'{player} has: ') print(player.show_hand()) print('\n') print(f'{dealer} has: ') print(dealer.show_dealer_one_card()) if (player.current_hand.value_of_cards_in_hand > 21): Game.player_bust(player) break if (player.current_hand.value_of_cards_in_hand <= 21): while (dealer.current_hand.value_of_cards_in_hand < 17): Game.dealer_hit(dealer) print(f'{player} has: ') print(player.show_hand()) print('\n') print(f'{dealer} has: ') print(dealer.show_hand()) if (dealer.current_hand.value_of_cards_in_hand > 21): Game.dealer_bust(player, dealer) elif (dealer.current_hand.value_of_cards_in_hand > player.current_hand.value_of_cards_in_hand): Game.dealer_wins(player, dealer) elif (dealer.current_hand.value_of_cards_in_hand < player.current_hand.value_of_cards_in_hand): Game.player_wins(player) elif (dealer.current_hand.value_of_cards_in_hand == player.current_hand.value_of_cards_in_hand): Game.push(player, dealer) print('\n') print(f' {player}'s {player.chips.count_total_chips}') if (player.chips.total_chips > 0): Game.ask_for_new_round() else: Game.game_state = False Game.ask_to_restart_the_whole_game()
@staticmethod def run(): '\n [summary]\n The main flow method of the game (Static class method -> No need Game() instance)\n ' print('This is a Console UI BlackJack Game.') print('\n') print("You are going to play against a Computer - 'The Dealer'.") print('What is you name?') name_of_player = str(input('Please enter you name: ')) print(f'Hello {name_of_player}!') starting_chips = Game.taking_int_input_from_user('With how many chips do you want to start the game? ') print('\n') player = BlackJackGameBuilders.PlayerBuilder.build_player(name_of_player, starting_chips) dealer = BlackJackGameBuilders.ComputerDealerBuilder.build_dealer() dealer.deck.shuffle() player.set_new_hand() player.current_hand.add_new_card(dealer) player.current_hand.add_new_card(dealer) dealer.set_new_hand() dealer.current_hand.add_new_card(dealer) dealer.current_hand.add_new_card(dealer) print(f'Finally.....GAME IS ON! Good Luck {player}!') while Game.game_state: Game.is_player_turn = True print(('\n' * 3)) current_player_bet = Game.take_bet_from_player(player) print(f'Your bet is: {current_player_bet}.') print('\n') print(f'{player} has: ') print(player.show_hand()) print('\n') print(f'{dealer} has: ') print(dealer.show_dealer_one_card()) while Game.is_player_turn: Game.player_hit_or_stand(player, dealer) print(f'{player} has: ') print(player.show_hand()) print('\n') print(f'{dealer} has: ') print(dealer.show_dealer_one_card()) if (player.current_hand.value_of_cards_in_hand > 21): Game.player_bust(player) break if (player.current_hand.value_of_cards_in_hand <= 21): while (dealer.current_hand.value_of_cards_in_hand < 17): Game.dealer_hit(dealer) print(f'{player} has: ') print(player.show_hand()) print('\n') print(f'{dealer} has: ') print(dealer.show_hand()) if (dealer.current_hand.value_of_cards_in_hand > 21): Game.dealer_bust(player, dealer) elif (dealer.current_hand.value_of_cards_in_hand > player.current_hand.value_of_cards_in_hand): Game.dealer_wins(player, dealer) elif (dealer.current_hand.value_of_cards_in_hand < player.current_hand.value_of_cards_in_hand): Game.player_wins(player) elif (dealer.current_hand.value_of_cards_in_hand == player.current_hand.value_of_cards_in_hand): Game.push(player, dealer) print('\n') print(f' {player}'s {player.chips.count_total_chips}') if (player.chips.total_chips > 0): Game.ask_for_new_round() else: Game.game_state = False Game.ask_to_restart_the_whole_game()<|docstring|>[summary] The main flow method of the game (Static class method -> No need Game() instance)<|endoftext|>
cb3003656826937e67faf1d69a2326d699572f016195ab6d6a27a5138ddbd1e6
def response_content(self, url, request): ' Fake HTTP responses for use with HTTMock in tests.\n ' (scheme, host, path, _, query, _) = urlparse(url.geturl()) tests_dirname = dirname(__file__) if (host == 'fake-cwd.local'): with open((tests_dirname + path), 'rb') as file: (type, _) = mimetypes.guess_type(file.name) return httmock.response(200, file.read(), headers={'Content-Type': type}) elif ((host, path) == ('www.ci.berkeley.ca.us', '/uploadedFiles/IT/GIS/Parcels.zip')): with open(join(tests_dirname, 'data', 'us-ca-berkeley-excerpt.zip'), 'rb') as file: return httmock.response(200, file.read(), headers={'Content-Type': 'application/octet-stream'}) elif ((host, path) == ('data.sfgov.org', '/download/kvej-w5kb/ZIPPED%20SHAPEFILE')): return httmock.response(302, '', headers={'Location': 'http://apps.sfgov.org/datafiles/view.php?file=sfgis/eas_addresses_with_units.zip'}) elif ((host, path, query) == ('apps.sfgov.org', '/datafiles/view.php', 'file=sfgis/eas_addresses_with_units.zip')): with open(join(tests_dirname, 'data', 'us-ca-san_francisco-excerpt.zip'), 'rb') as file: return httmock.response(200, file.read(), headers={'Content-Type': 'application/download', 'Content-Disposition': 'attachment; filename=eas_addresses_with_units.zip;'}) elif ((host, path, query) == ('dcatlas.dcgis.dc.gov', '/catalog/download.asp', 'downloadID=2182&downloadTYPE=ESRI')): return httmock.response(200, (b'FAKE' * 99), headers={'Content-Type': 'application/x-zip-compressed'}) elif ((host, path, query) == ('data.northcowichan.ca', '/DataBrowser/DownloadCsv', 'container=mncowichan&entitySet=PropertyReport&filter=NOFILTER')): return httmock.response(200, (b'FAKE,FAKE\n' * 99), headers={'Content-Type': 'text/csv', 'Content-Disposition': 'attachment; filename=PropertyReport.csv'}) raise NotImplementedError(url.geturl())
Fake HTTP responses for use with HTTMock in tests.
openaddr/tests/cache.py
response_content
geobrando/machine
101
python
def response_content(self, url, request): ' \n ' (scheme, host, path, _, query, _) = urlparse(url.geturl()) tests_dirname = dirname(__file__) if (host == 'fake-cwd.local'): with open((tests_dirname + path), 'rb') as file: (type, _) = mimetypes.guess_type(file.name) return httmock.response(200, file.read(), headers={'Content-Type': type}) elif ((host, path) == ('www.ci.berkeley.ca.us', '/uploadedFiles/IT/GIS/Parcels.zip')): with open(join(tests_dirname, 'data', 'us-ca-berkeley-excerpt.zip'), 'rb') as file: return httmock.response(200, file.read(), headers={'Content-Type': 'application/octet-stream'}) elif ((host, path) == ('data.sfgov.org', '/download/kvej-w5kb/ZIPPED%20SHAPEFILE')): return httmock.response(302, , headers={'Location': 'http://apps.sfgov.org/datafiles/view.php?file=sfgis/eas_addresses_with_units.zip'}) elif ((host, path, query) == ('apps.sfgov.org', '/datafiles/view.php', 'file=sfgis/eas_addresses_with_units.zip')): with open(join(tests_dirname, 'data', 'us-ca-san_francisco-excerpt.zip'), 'rb') as file: return httmock.response(200, file.read(), headers={'Content-Type': 'application/download', 'Content-Disposition': 'attachment; filename=eas_addresses_with_units.zip;'}) elif ((host, path, query) == ('dcatlas.dcgis.dc.gov', '/catalog/download.asp', 'downloadID=2182&downloadTYPE=ESRI')): return httmock.response(200, (b'FAKE' * 99), headers={'Content-Type': 'application/x-zip-compressed'}) elif ((host, path, query) == ('data.northcowichan.ca', '/DataBrowser/DownloadCsv', 'container=mncowichan&entitySet=PropertyReport&filter=NOFILTER')): return httmock.response(200, (b'FAKE,FAKE\n' * 99), headers={'Content-Type': 'text/csv', 'Content-Disposition': 'attachment; filename=PropertyReport.csv'}) raise NotImplementedError(url.geturl())
def response_content(self, url, request): ' \n ' (scheme, host, path, _, query, _) = urlparse(url.geturl()) tests_dirname = dirname(__file__) if (host == 'fake-cwd.local'): with open((tests_dirname + path), 'rb') as file: (type, _) = mimetypes.guess_type(file.name) return httmock.response(200, file.read(), headers={'Content-Type': type}) elif ((host, path) == ('www.ci.berkeley.ca.us', '/uploadedFiles/IT/GIS/Parcels.zip')): with open(join(tests_dirname, 'data', 'us-ca-berkeley-excerpt.zip'), 'rb') as file: return httmock.response(200, file.read(), headers={'Content-Type': 'application/octet-stream'}) elif ((host, path) == ('data.sfgov.org', '/download/kvej-w5kb/ZIPPED%20SHAPEFILE')): return httmock.response(302, , headers={'Location': 'http://apps.sfgov.org/datafiles/view.php?file=sfgis/eas_addresses_with_units.zip'}) elif ((host, path, query) == ('apps.sfgov.org', '/datafiles/view.php', 'file=sfgis/eas_addresses_with_units.zip')): with open(join(tests_dirname, 'data', 'us-ca-san_francisco-excerpt.zip'), 'rb') as file: return httmock.response(200, file.read(), headers={'Content-Type': 'application/download', 'Content-Disposition': 'attachment; filename=eas_addresses_with_units.zip;'}) elif ((host, path, query) == ('dcatlas.dcgis.dc.gov', '/catalog/download.asp', 'downloadID=2182&downloadTYPE=ESRI')): return httmock.response(200, (b'FAKE' * 99), headers={'Content-Type': 'application/x-zip-compressed'}) elif ((host, path, query) == ('data.northcowichan.ca', '/DataBrowser/DownloadCsv', 'container=mncowichan&entitySet=PropertyReport&filter=NOFILTER')): return httmock.response(200, (b'FAKE,FAKE\n' * 99), headers={'Content-Type': 'text/csv', 'Content-Disposition': 'attachment; filename=PropertyReport.csv'}) raise NotImplementedError(url.geturl())<|docstring|>Fake HTTP responses for use with HTTMock in tests.<|endoftext|>
4b770d385a0af55f1020caa0ea6cd131c16804d90b2a6e12e436f6b14291bf50
def setUp(self): ' Prepare a clean temporary directory, and work there.\n ' self.workdir = tempfile.mkdtemp(prefix='testCache-')
Prepare a clean temporary directory, and work there.
openaddr/tests/cache.py
setUp
geobrando/machine
101
python
def setUp(self): ' \n ' self.workdir = tempfile.mkdtemp(prefix='testCache-')
def setUp(self): ' \n ' self.workdir = tempfile.mkdtemp(prefix='testCache-')<|docstring|>Prepare a clean temporary directory, and work there.<|endoftext|>
cf820a4c99a948104dd66adba35425bdb5294bbc95a761897910664c4c283909
def test_download_with_conform(self): ' ESRI Caching Will Request With The Minimum Fields Required ' conforms = ((None, None), (['a', 'b', 'c'], {'type': 'csv', 'street': ['a', 'b'], 'number': 'c'}), (['a'], {'type': 'csv', 'street': {'function': 'regexp', 'field': 'a'}, 'number': {'function': 'regexp', 'field': 'a'}})) task = EsriRestDownloadTask('us-fl-palmbeach') for (expected, conform) in conforms: actual = task.field_names_to_request(conform) self.assertEqual(expected, actual)
ESRI Caching Will Request With The Minimum Fields Required
openaddr/tests/cache.py
test_download_with_conform
geobrando/machine
101
python
def test_download_with_conform(self): ' ' conforms = ((None, None), (['a', 'b', 'c'], {'type': 'csv', 'street': ['a', 'b'], 'number': 'c'}), (['a'], {'type': 'csv', 'street': {'function': 'regexp', 'field': 'a'}, 'number': {'function': 'regexp', 'field': 'a'}})) task = EsriRestDownloadTask('us-fl-palmbeach') for (expected, conform) in conforms: actual = task.field_names_to_request(conform) self.assertEqual(expected, actual)
def test_download_with_conform(self): ' ' conforms = ((None, None), (['a', 'b', 'c'], {'type': 'csv', 'street': ['a', 'b'], 'number': 'c'}), (['a'], {'type': 'csv', 'street': {'function': 'regexp', 'field': 'a'}, 'number': {'function': 'regexp', 'field': 'a'}})) task = EsriRestDownloadTask('us-fl-palmbeach') for (expected, conform) in conforms: actual = task.field_names_to_request(conform) self.assertEqual(expected, actual)<|docstring|>ESRI Caching Will Request With The Minimum Fields Required<|endoftext|>
5dd38d5cef8dbb880c7a94fd2da5c194db7e467e169a3db5424366b1e1516dea
def test_download_handles_no_count(self): ' ESRI Caching Will Handle A Server Without returnCountOnly Support ' task = EsriRestDownloadTask('us-fl-palmbeach') with patch('esridump.EsriDumper.get_metadata') as metadata_patch: metadata_patch.return_value = {'fields': []} with patch('esridump.EsriDumper.get_feature_count') as feature_patch: feature_patch.side_effect = EsriDownloadError("Server doesn't support returnCountOnly") with self.assertRaises(EsriDownloadError) as e: task.download(['http://example.com/'], self.workdir) self.assertEqual(e.message, 'Could not find object ID field name for deduplication')
ESRI Caching Will Handle A Server Without returnCountOnly Support
openaddr/tests/cache.py
test_download_handles_no_count
geobrando/machine
101
python
def test_download_handles_no_count(self): ' ' task = EsriRestDownloadTask('us-fl-palmbeach') with patch('esridump.EsriDumper.get_metadata') as metadata_patch: metadata_patch.return_value = {'fields': []} with patch('esridump.EsriDumper.get_feature_count') as feature_patch: feature_patch.side_effect = EsriDownloadError("Server doesn't support returnCountOnly") with self.assertRaises(EsriDownloadError) as e: task.download(['http://example.com/'], self.workdir) self.assertEqual(e.message, 'Could not find object ID field name for deduplication')
def test_download_handles_no_count(self): ' ' task = EsriRestDownloadTask('us-fl-palmbeach') with patch('esridump.EsriDumper.get_metadata') as metadata_patch: metadata_patch.return_value = {'fields': []} with patch('esridump.EsriDumper.get_feature_count') as feature_patch: feature_patch.side_effect = EsriDownloadError("Server doesn't support returnCountOnly") with self.assertRaises(EsriDownloadError) as e: task.download(['http://example.com/'], self.workdir) self.assertEqual(e.message, 'Could not find object ID field name for deduplication')<|docstring|>ESRI Caching Will Handle A Server Without returnCountOnly Support<|endoftext|>
754634a45f03f82829820a48e33acea283dce00969db0d5d5ad0f6117babfa61
@override_settings(USE_I18N=True, MIDDLEWARE=['django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware']) def test_translation(self): '\n An invalid request is rejected with a localized error message.\n ' response = self.client.post('/') self.assertContains(response, 'Forbidden', status_code=403) self.assertContains(response, 'CSRF verification failed. Request aborted.', status_code=403) with self.settings(LANGUAGE_CODE='nl'), override('en-us'): response = self.client.post('/') self.assertContains(response, 'Verboden', status_code=403) self.assertContains(response, 'CSRF-verificatie mislukt. Verzoek afgebroken.', status_code=403)
An invalid request is rejected with a localized error message.
tests/view_tests/tests/test_csrf.py
test_translation
jonashaag/django-1.1-python-3.7
5,079
python
@override_settings(USE_I18N=True, MIDDLEWARE=['django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware']) def test_translation(self): '\n \n ' response = self.client.post('/') self.assertContains(response, 'Forbidden', status_code=403) self.assertContains(response, 'CSRF verification failed. Request aborted.', status_code=403) with self.settings(LANGUAGE_CODE='nl'), override('en-us'): response = self.client.post('/') self.assertContains(response, 'Verboden', status_code=403) self.assertContains(response, 'CSRF-verificatie mislukt. Verzoek afgebroken.', status_code=403)
@override_settings(USE_I18N=True, MIDDLEWARE=['django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware']) def test_translation(self): '\n \n ' response = self.client.post('/') self.assertContains(response, 'Forbidden', status_code=403) self.assertContains(response, 'CSRF verification failed. Request aborted.', status_code=403) with self.settings(LANGUAGE_CODE='nl'), override('en-us'): response = self.client.post('/') self.assertContains(response, 'Verboden', status_code=403) self.assertContains(response, 'CSRF-verificatie mislukt. Verzoek afgebroken.', status_code=403)<|docstring|>An invalid request is rejected with a localized error message.<|endoftext|>
7f74ec487eb0cc6479581abb3cfb7460bde400ffab73736450c07665552b6f08
@ignore_warnings(category=RemovedInDjango20Warning) @override_settings(USE_I18N=True, MIDDLEWARE=None, MIDDLEWARE_CLASSES=['django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware']) def test_translation_middleware_classes(self): '\n An invalid request is rejected with a localized error message.\n ' response = self.client.post('/') self.assertContains(response, 'Forbidden', status_code=403) self.assertContains(response, 'CSRF verification failed. Request aborted.', status_code=403) with self.settings(LANGUAGE_CODE='nl'), override('en-us'): response = self.client.post('/') self.assertContains(response, 'Verboden', status_code=403) self.assertContains(response, 'CSRF-verificatie mislukt. Verzoek afgebroken.', status_code=403)
An invalid request is rejected with a localized error message.
tests/view_tests/tests/test_csrf.py
test_translation_middleware_classes
jonashaag/django-1.1-python-3.7
5,079
python
@ignore_warnings(category=RemovedInDjango20Warning) @override_settings(USE_I18N=True, MIDDLEWARE=None, MIDDLEWARE_CLASSES=['django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware']) def test_translation_middleware_classes(self): '\n \n ' response = self.client.post('/') self.assertContains(response, 'Forbidden', status_code=403) self.assertContains(response, 'CSRF verification failed. Request aborted.', status_code=403) with self.settings(LANGUAGE_CODE='nl'), override('en-us'): response = self.client.post('/') self.assertContains(response, 'Verboden', status_code=403) self.assertContains(response, 'CSRF-verificatie mislukt. Verzoek afgebroken.', status_code=403)
@ignore_warnings(category=RemovedInDjango20Warning) @override_settings(USE_I18N=True, MIDDLEWARE=None, MIDDLEWARE_CLASSES=['django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware']) def test_translation_middleware_classes(self): '\n \n ' response = self.client.post('/') self.assertContains(response, 'Forbidden', status_code=403) self.assertContains(response, 'CSRF verification failed. Request aborted.', status_code=403) with self.settings(LANGUAGE_CODE='nl'), override('en-us'): response = self.client.post('/') self.assertContains(response, 'Verboden', status_code=403) self.assertContains(response, 'CSRF-verificatie mislukt. Verzoek afgebroken.', status_code=403)<|docstring|>An invalid request is rejected with a localized error message.<|endoftext|>
7a7e18871b8bab77475ae3512181bf49c8b848b73c89a62e80fcdc77b7b0fb06
@override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_no_referer(self): '\n Referer header is strictly checked for POST over HTTPS. Trigger the\n exception by sending an incorrect referer.\n ' response = self.client.post('/', HTTP_X_FORWARDED_PROTO='https') self.assertContains(response, 'You are seeing this message because this HTTPS site requires a &#39;Referer header&#39; to be sent by your Web browser, but none was sent.', status_code=403)
Referer header is strictly checked for POST over HTTPS. Trigger the exception by sending an incorrect referer.
tests/view_tests/tests/test_csrf.py
test_no_referer
jonashaag/django-1.1-python-3.7
5,079
python
@override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_no_referer(self): '\n Referer header is strictly checked for POST over HTTPS. Trigger the\n exception by sending an incorrect referer.\n ' response = self.client.post('/', HTTP_X_FORWARDED_PROTO='https') self.assertContains(response, 'You are seeing this message because this HTTPS site requires a &#39;Referer header&#39; to be sent by your Web browser, but none was sent.', status_code=403)
@override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_no_referer(self): '\n Referer header is strictly checked for POST over HTTPS. Trigger the\n exception by sending an incorrect referer.\n ' response = self.client.post('/', HTTP_X_FORWARDED_PROTO='https') self.assertContains(response, 'You are seeing this message because this HTTPS site requires a &#39;Referer header&#39; to be sent by your Web browser, but none was sent.', status_code=403)<|docstring|>Referer header is strictly checked for POST over HTTPS. Trigger the exception by sending an incorrect referer.<|endoftext|>
ca0a0d62502ece27abf05271d3373cded400df606689ff158ee6e93c110b437d
def test_no_cookies(self): '\n The CSRF cookie is checked for POST. Failure to send this cookie should\n provide a nice error message.\n ' response = self.client.post('/') self.assertContains(response, 'You are seeing this message because this site requires a CSRF cookie when submitting forms. This cookie is required for security reasons, to ensure that your browser is not being hijacked by third parties.', status_code=403)
The CSRF cookie is checked for POST. Failure to send this cookie should provide a nice error message.
tests/view_tests/tests/test_csrf.py
test_no_cookies
jonashaag/django-1.1-python-3.7
5,079
python
def test_no_cookies(self): '\n The CSRF cookie is checked for POST. Failure to send this cookie should\n provide a nice error message.\n ' response = self.client.post('/') self.assertContains(response, 'You are seeing this message because this site requires a CSRF cookie when submitting forms. This cookie is required for security reasons, to ensure that your browser is not being hijacked by third parties.', status_code=403)
def test_no_cookies(self): '\n The CSRF cookie is checked for POST. Failure to send this cookie should\n provide a nice error message.\n ' response = self.client.post('/') self.assertContains(response, 'You are seeing this message because this site requires a CSRF cookie when submitting forms. This cookie is required for security reasons, to ensure that your browser is not being hijacked by third parties.', status_code=403)<|docstring|>The CSRF cookie is checked for POST. Failure to send this cookie should provide a nice error message.<|endoftext|>
95985cc0381098dcf3c35d248e5fb602615a9d0e705670a175c769e6a887ef0e
@override_settings(TEMPLATES=[]) def test_no_django_template_engine(self): "\n The CSRF view doesn't depend on the TEMPLATES configuration (#24388).\n " response = self.client.post('/') self.assertContains(response, 'Forbidden', status_code=403)
The CSRF view doesn't depend on the TEMPLATES configuration (#24388).
tests/view_tests/tests/test_csrf.py
test_no_django_template_engine
jonashaag/django-1.1-python-3.7
5,079
python
@override_settings(TEMPLATES=[]) def test_no_django_template_engine(self): "\n \n " response = self.client.post('/') self.assertContains(response, 'Forbidden', status_code=403)
@override_settings(TEMPLATES=[]) def test_no_django_template_engine(self): "\n \n " response = self.client.post('/') self.assertContains(response, 'Forbidden', status_code=403)<|docstring|>The CSRF view doesn't depend on the TEMPLATES configuration (#24388).<|endoftext|>
7d605b2a247e58fad6479d6f8a707bb3d60faffe937d76357f82a0c4a898d0d9
@override_settings(TEMPLATES=[{'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': {'loaders': [('django.template.loaders.locmem.Loader', {CSRF_FAILURE_TEMPLATE_NAME: 'Test template for CSRF failure'})]}}]) def test_custom_template(self): '\n A custom CSRF_FAILURE_TEMPLATE_NAME is used.\n ' response = self.client.post('/') self.assertContains(response, 'Test template for CSRF failure', status_code=403)
A custom CSRF_FAILURE_TEMPLATE_NAME is used.
tests/view_tests/tests/test_csrf.py
test_custom_template
jonashaag/django-1.1-python-3.7
5,079
python
@override_settings(TEMPLATES=[{'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': {'loaders': [('django.template.loaders.locmem.Loader', {CSRF_FAILURE_TEMPLATE_NAME: 'Test template for CSRF failure'})]}}]) def test_custom_template(self): '\n \n ' response = self.client.post('/') self.assertContains(response, 'Test template for CSRF failure', status_code=403)
@override_settings(TEMPLATES=[{'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': {'loaders': [('django.template.loaders.locmem.Loader', {CSRF_FAILURE_TEMPLATE_NAME: 'Test template for CSRF failure'})]}}]) def test_custom_template(self): '\n \n ' response = self.client.post('/') self.assertContains(response, 'Test template for CSRF failure', status_code=403)<|docstring|>A custom CSRF_FAILURE_TEMPLATE_NAME is used.<|endoftext|>
18778544a8cf6d22b2257172499452785df23230c01bd9f66250465565c7b17c
def test_custom_template_does_not_exist(self): '\n An exception is raised if a nonexistent template is supplied.\n ' factory = RequestFactory() request = factory.post('/') with self.assertRaises(TemplateDoesNotExist): csrf_failure(request, template_name='nonexistent.html')
An exception is raised if a nonexistent template is supplied.
tests/view_tests/tests/test_csrf.py
test_custom_template_does_not_exist
jonashaag/django-1.1-python-3.7
5,079
python
def test_custom_template_does_not_exist(self): '\n \n ' factory = RequestFactory() request = factory.post('/') with self.assertRaises(TemplateDoesNotExist): csrf_failure(request, template_name='nonexistent.html')
def test_custom_template_does_not_exist(self): '\n \n ' factory = RequestFactory() request = factory.post('/') with self.assertRaises(TemplateDoesNotExist): csrf_failure(request, template_name='nonexistent.html')<|docstring|>An exception is raised if a nonexistent template is supplied.<|endoftext|>
4e82464926c0ee651e1826f5d2d0480788ca26cb50cba26db00b1da768e32d6a
def parse_type(t): 'Returns the Ibis datatype from source type.' t = t.lower() if (t in _impala_to_ibis_type): return _impala_to_ibis_type[t] elif (('varchar' in t) or ('char' in t)): return 'string' elif ('decimal' in t): result = dt.dtype(t) if result: return t else: return ValueError(t) elif (('struct' in t) or ('array' in t) or ('map' in t)): return t.replace('int', 'int32') else: raise Exception(t)
Returns the Ibis datatype from source type.
third_party/ibis/ibis_impala/api.py
parse_type
ajw0100/professional-services-data-validator
167
python
def parse_type(t): t = t.lower() if (t in _impala_to_ibis_type): return _impala_to_ibis_type[t] elif (('varchar' in t) or ('char' in t)): return 'string' elif ('decimal' in t): result = dt.dtype(t) if result: return t else: return ValueError(t) elif (('struct' in t) or ('array' in t) or ('map' in t)): return t.replace('int', 'int32') else: raise Exception(t)
def parse_type(t): t = t.lower() if (t in _impala_to_ibis_type): return _impala_to_ibis_type[t] elif (('varchar' in t) or ('char' in t)): return 'string' elif ('decimal' in t): result = dt.dtype(t) if result: return t else: return ValueError(t) elif (('struct' in t) or ('array' in t) or ('map' in t)): return t.replace('int', 'int32') else: raise Exception(t)<|docstring|>Returns the Ibis datatype from source type.<|endoftext|>
8a403d3e8144965847bfc7a2c5e116cecd5cc88f5bee3ccb37c9a8a9623e63f5
def ISM_filter(dict_freq, threshold): '\n collapse low frequency ISMs into "OTHER" per location\n Parameters\n ----------\n dict_freq: dictionary\n ISM frequency of a location of interest\n threshold: float\n ISMs lower than this threshold will be collapsed into "OTHER"\n Returns\n -------\n res_dict: dictionary\n filtered ISM frequency of a location of interest\n ' res_dict = {'OTHER': [0, 0]} total = sum([int(dict_freq[ISM][1]) for ISM in dict_freq]) for ISM in dict_freq: if ((int(dict_freq[ISM][1]) / total) < threshold): res_dict['OTHER'] = [0, (res_dict['OTHER'][1] + int(dict_freq[ISM][1]))] else: res_dict[ISM] = [dict_freq[ISM][0], (int(dict_freq[ISM][1]) + res_dict.get(ISM, [0, 0])[1])] if (res_dict['OTHER'][1] == 0): del res_dict['OTHER'] return res_dict
collapse low frequency ISMs into "OTHER" per location Parameters ---------- dict_freq: dictionary ISM frequency of a location of interest threshold: float ISMs lower than this threshold will be collapsed into "OTHER" Returns ------- res_dict: dictionary filtered ISM frequency of a location of interest
ncov_ism/_visualization.py
ISM_filter
EESI/ncov_ism
1
python
def ISM_filter(dict_freq, threshold): '\n collapse low frequency ISMs into "OTHER" per location\n Parameters\n ----------\n dict_freq: dictionary\n ISM frequency of a location of interest\n threshold: float\n ISMs lower than this threshold will be collapsed into "OTHER"\n Returns\n -------\n res_dict: dictionary\n filtered ISM frequency of a location of interest\n ' res_dict = {'OTHER': [0, 0]} total = sum([int(dict_freq[ISM][1]) for ISM in dict_freq]) for ISM in dict_freq: if ((int(dict_freq[ISM][1]) / total) < threshold): res_dict['OTHER'] = [0, (res_dict['OTHER'][1] + int(dict_freq[ISM][1]))] else: res_dict[ISM] = [dict_freq[ISM][0], (int(dict_freq[ISM][1]) + res_dict.get(ISM, [0, 0])[1])] if (res_dict['OTHER'][1] == 0): del res_dict['OTHER'] return res_dict
def ISM_filter(dict_freq, threshold): '\n collapse low frequency ISMs into "OTHER" per location\n Parameters\n ----------\n dict_freq: dictionary\n ISM frequency of a location of interest\n threshold: float\n ISMs lower than this threshold will be collapsed into "OTHER"\n Returns\n -------\n res_dict: dictionary\n filtered ISM frequency of a location of interest\n ' res_dict = {'OTHER': [0, 0]} total = sum([int(dict_freq[ISM][1]) for ISM in dict_freq]) for ISM in dict_freq: if ((int(dict_freq[ISM][1]) / total) < threshold): res_dict['OTHER'] = [0, (res_dict['OTHER'][1] + int(dict_freq[ISM][1]))] else: res_dict[ISM] = [dict_freq[ISM][0], (int(dict_freq[ISM][1]) + res_dict.get(ISM, [0, 0])[1])] if (res_dict['OTHER'][1] == 0): del res_dict['OTHER'] return res_dict<|docstring|>collapse low frequency ISMs into "OTHER" per location Parameters ---------- dict_freq: dictionary ISM frequency of a location of interest threshold: float ISMs lower than this threshold will be collapsed into "OTHER" Returns ------- res_dict: dictionary filtered ISM frequency of a location of interest<|endoftext|>
e56815b5cda99685cb34f19ddd83a695c12f93b608b4c7be62af725839210985
def ISM_time_series_filter(dict_freq, threshold): '\n collapse low frequency ISMs into "OTHER" per location\n Parameters\n ----------\n dict_freq: dictionary\n ISM frequency of a location of interest\n threshold: float\n ISMs lower than this threshold will be collapsed into "OTHER"\n Returns\n -------\n res_dict: dictionary\n filtered ISM frequency of a location of interest\n ' res_dict = {'OTHER': [0, 0]} total = sum([int(dict_freq[ISM]) for ISM in dict_freq]) for ISM in dict_freq: if ((int(dict_freq[ISM]) / total) < threshold): res_dict['OTHER'] = [0, (res_dict['OTHER'][1] + int(dict_freq[ISM]))] else: res_dict[ISM] = [dict_freq[ISM], (int(dict_freq[ISM]) + res_dict.get(ISM, [0, 0])[1])] if (res_dict['OTHER'][1] == 0): del res_dict['OTHER'] return res_dict
collapse low frequency ISMs into "OTHER" per location Parameters ---------- dict_freq: dictionary ISM frequency of a location of interest threshold: float ISMs lower than this threshold will be collapsed into "OTHER" Returns ------- res_dict: dictionary filtered ISM frequency of a location of interest
ncov_ism/_visualization.py
ISM_time_series_filter
EESI/ncov_ism
1
python
def ISM_time_series_filter(dict_freq, threshold): '\n collapse low frequency ISMs into "OTHER" per location\n Parameters\n ----------\n dict_freq: dictionary\n ISM frequency of a location of interest\n threshold: float\n ISMs lower than this threshold will be collapsed into "OTHER"\n Returns\n -------\n res_dict: dictionary\n filtered ISM frequency of a location of interest\n ' res_dict = {'OTHER': [0, 0]} total = sum([int(dict_freq[ISM]) for ISM in dict_freq]) for ISM in dict_freq: if ((int(dict_freq[ISM]) / total) < threshold): res_dict['OTHER'] = [0, (res_dict['OTHER'][1] + int(dict_freq[ISM]))] else: res_dict[ISM] = [dict_freq[ISM], (int(dict_freq[ISM]) + res_dict.get(ISM, [0, 0])[1])] if (res_dict['OTHER'][1] == 0): del res_dict['OTHER'] return res_dict
def ISM_time_series_filter(dict_freq, threshold): '\n collapse low frequency ISMs into "OTHER" per location\n Parameters\n ----------\n dict_freq: dictionary\n ISM frequency of a location of interest\n threshold: float\n ISMs lower than this threshold will be collapsed into "OTHER"\n Returns\n -------\n res_dict: dictionary\n filtered ISM frequency of a location of interest\n ' res_dict = {'OTHER': [0, 0]} total = sum([int(dict_freq[ISM]) for ISM in dict_freq]) for ISM in dict_freq: if ((int(dict_freq[ISM]) / total) < threshold): res_dict['OTHER'] = [0, (res_dict['OTHER'][1] + int(dict_freq[ISM]))] else: res_dict[ISM] = [dict_freq[ISM], (int(dict_freq[ISM]) + res_dict.get(ISM, [0, 0])[1])] if (res_dict['OTHER'][1] == 0): del res_dict['OTHER'] return res_dict<|docstring|>collapse low frequency ISMs into "OTHER" per location Parameters ---------- dict_freq: dictionary ISM frequency of a location of interest threshold: float ISMs lower than this threshold will be collapsed into "OTHER" Returns ------- res_dict: dictionary filtered ISM frequency of a location of interest<|endoftext|>
c6d9c9ea9e76e3f4cb21671dc77f6ef6b8e42590684bf20ea1f463671767416a
def ISM_visualization(region_raw_count, state_raw_count, count_dict, region_list, state_list, time_series_region_list, output_folder, ISM_FILTER_THRESHOLD=0.05, ISM_TIME_SERIES_FILTER_THRESHOLD=0.025): '\n Informative Subtype Marker analysis visualization\n Parameters\n ----------\n region_raw_count: dictionary\n ISM frequency per region\n state_raw_count: dictionary\n ISM frequency per state\n count_dict: dictionary\n ISM frequency time series per region\n region_list: list\n regions of interest\n state_list: list\n states of interest\n time_series_region_list: list\n regions of interest for time series analysis\n output_folder: str\n path to the output folder\n ISM_FILTER_THRESHOLD: float\n ISM filter threshold\n ISM_TIME_SERIES_FILTER_THRESHOLD: float\n ISM filter threshold for time series\n Returns\n -------\n Objects for downstream visualization\n ' ISM_set = set([]) region_pie_chart = {} for (idx, region) in enumerate(region_list): dict_freq_filtered = ISM_filter(region_raw_count[region], ISM_FILTER_THRESHOLD) region_pie_chart[region] = dict_freq_filtered ISM_set.update(dict_freq_filtered.keys()) state_pie_chart = {} for (idx, state) in enumerate(state_list): dict_freq_filtered = ISM_filter(state_raw_count[state], ISM_FILTER_THRESHOLD) state_pie_chart[state] = dict_freq_filtered ISM_set.update(dict_freq_filtered.keys()) count_list = [] date_list = [] sorted_date = sorted(count_dict.keys()) for date in sorted_date: dict_freq = {} for region in time_series_region_list: regional_dict_freq = count_dict[date][region] dict_freq_filtered = ISM_time_series_filter(regional_dict_freq, ISM_TIME_SERIES_FILTER_THRESHOLD) ISM_set.update(list(dict_freq_filtered.keys())) dict_freq[region] = dict_freq_filtered count_list.append(dict_freq) date_list.append(date) return (ISM_set, region_pie_chart, state_pie_chart, count_list, date_list)
Informative Subtype Marker analysis visualization Parameters ---------- region_raw_count: dictionary ISM frequency per region state_raw_count: dictionary ISM frequency per state count_dict: dictionary ISM frequency time series per region region_list: list regions of interest state_list: list states of interest time_series_region_list: list regions of interest for time series analysis output_folder: str path to the output folder ISM_FILTER_THRESHOLD: float ISM filter threshold ISM_TIME_SERIES_FILTER_THRESHOLD: float ISM filter threshold for time series Returns ------- Objects for downstream visualization
ncov_ism/_visualization.py
ISM_visualization
EESI/ncov_ism
1
python
def ISM_visualization(region_raw_count, state_raw_count, count_dict, region_list, state_list, time_series_region_list, output_folder, ISM_FILTER_THRESHOLD=0.05, ISM_TIME_SERIES_FILTER_THRESHOLD=0.025): '\n Informative Subtype Marker analysis visualization\n Parameters\n ----------\n region_raw_count: dictionary\n ISM frequency per region\n state_raw_count: dictionary\n ISM frequency per state\n count_dict: dictionary\n ISM frequency time series per region\n region_list: list\n regions of interest\n state_list: list\n states of interest\n time_series_region_list: list\n regions of interest for time series analysis\n output_folder: str\n path to the output folder\n ISM_FILTER_THRESHOLD: float\n ISM filter threshold\n ISM_TIME_SERIES_FILTER_THRESHOLD: float\n ISM filter threshold for time series\n Returns\n -------\n Objects for downstream visualization\n ' ISM_set = set([]) region_pie_chart = {} for (idx, region) in enumerate(region_list): dict_freq_filtered = ISM_filter(region_raw_count[region], ISM_FILTER_THRESHOLD) region_pie_chart[region] = dict_freq_filtered ISM_set.update(dict_freq_filtered.keys()) state_pie_chart = {} for (idx, state) in enumerate(state_list): dict_freq_filtered = ISM_filter(state_raw_count[state], ISM_FILTER_THRESHOLD) state_pie_chart[state] = dict_freq_filtered ISM_set.update(dict_freq_filtered.keys()) count_list = [] date_list = [] sorted_date = sorted(count_dict.keys()) for date in sorted_date: dict_freq = {} for region in time_series_region_list: regional_dict_freq = count_dict[date][region] dict_freq_filtered = ISM_time_series_filter(regional_dict_freq, ISM_TIME_SERIES_FILTER_THRESHOLD) ISM_set.update(list(dict_freq_filtered.keys())) dict_freq[region] = dict_freq_filtered count_list.append(dict_freq) date_list.append(date) return (ISM_set, region_pie_chart, state_pie_chart, count_list, date_list)
def ISM_visualization(region_raw_count, state_raw_count, count_dict, region_list, state_list, time_series_region_list, output_folder, ISM_FILTER_THRESHOLD=0.05, ISM_TIME_SERIES_FILTER_THRESHOLD=0.025): '\n Informative Subtype Marker analysis visualization\n Parameters\n ----------\n region_raw_count: dictionary\n ISM frequency per region\n state_raw_count: dictionary\n ISM frequency per state\n count_dict: dictionary\n ISM frequency time series per region\n region_list: list\n regions of interest\n state_list: list\n states of interest\n time_series_region_list: list\n regions of interest for time series analysis\n output_folder: str\n path to the output folder\n ISM_FILTER_THRESHOLD: float\n ISM filter threshold\n ISM_TIME_SERIES_FILTER_THRESHOLD: float\n ISM filter threshold for time series\n Returns\n -------\n Objects for downstream visualization\n ' ISM_set = set([]) region_pie_chart = {} for (idx, region) in enumerate(region_list): dict_freq_filtered = ISM_filter(region_raw_count[region], ISM_FILTER_THRESHOLD) region_pie_chart[region] = dict_freq_filtered ISM_set.update(dict_freq_filtered.keys()) state_pie_chart = {} for (idx, state) in enumerate(state_list): dict_freq_filtered = ISM_filter(state_raw_count[state], ISM_FILTER_THRESHOLD) state_pie_chart[state] = dict_freq_filtered ISM_set.update(dict_freq_filtered.keys()) count_list = [] date_list = [] sorted_date = sorted(count_dict.keys()) for date in sorted_date: dict_freq = {} for region in time_series_region_list: regional_dict_freq = count_dict[date][region] dict_freq_filtered = ISM_time_series_filter(regional_dict_freq, ISM_TIME_SERIES_FILTER_THRESHOLD) ISM_set.update(list(dict_freq_filtered.keys())) dict_freq[region] = dict_freq_filtered count_list.append(dict_freq) date_list.append(date) return (ISM_set, region_pie_chart, state_pie_chart, count_list, date_list)<|docstring|>Informative Subtype Marker analysis visualization Parameters ---------- region_raw_count: dictionary ISM frequency per region state_raw_count: dictionary ISM frequency per state count_dict: dictionary ISM frequency time series per region region_list: list regions of interest state_list: list states of interest time_series_region_list: list regions of interest for time series analysis output_folder: str path to the output folder ISM_FILTER_THRESHOLD: float ISM filter threshold ISM_TIME_SERIES_FILTER_THRESHOLD: float ISM filter threshold for time series Returns ------- Objects for downstream visualization<|endoftext|>
d544ecde11119dff11e755213b08f1a3d96132231d40d9b3ee84b0979a570f40
def customized_ISM_visualization(region_raw_count, count_dict, region_list, output_folder, ISM_FILTER_THRESHOLD=0.05, ISM_TIME_SERIES_FILTER_THRESHOLD=0.025): '\n Informative Subtype Marker analysis visualization\n Parameters\n ----------\n region_raw_count: dictionary\n ISM frequency per region\n state_raw_count: dictionary\n ISM frequency per state\n count_dict: dictionary\n ISM frequency time series per region\n region_list: list\n regions of interest\n state_list: list\n states of interest\n time_series_region_list: list\n regions of interest for time series analysis\n output_folder: str\n path to the output folder\n ISM_FILTER_THRESHOLD: float\n ISM filter threshold\n ISM_TIME_SERIES_FILTER_THRESHOLD: float\n ISM filter threshold for time series\n Returns\n -------\n Objects for downstream visualization\n ' ISM_set = set([]) region_pie_chart = {} for (idx, region) in enumerate(region_list): dict_freq_filtered = ISM_filter(region_raw_count[region], ISM_FILTER_THRESHOLD) region_pie_chart[region] = dict_freq_filtered ISM_set.update(dict_freq_filtered.keys()) count_list = [] date_list = [] sorted_date = sorted(count_dict.keys()) for date in sorted_date: dict_freq = {} for region in region_list: regional_dict_freq = count_dict[date][region] dict_freq_filtered = ISM_time_series_filter(regional_dict_freq, ISM_TIME_SERIES_FILTER_THRESHOLD) ISM_set.update(list(dict_freq_filtered.keys())) dict_freq[region] = dict_freq_filtered count_list.append(dict_freq) date_list.append(date) return (ISM_set, region_pie_chart, count_list, date_list)
Informative Subtype Marker analysis visualization Parameters ---------- region_raw_count: dictionary ISM frequency per region state_raw_count: dictionary ISM frequency per state count_dict: dictionary ISM frequency time series per region region_list: list regions of interest state_list: list states of interest time_series_region_list: list regions of interest for time series analysis output_folder: str path to the output folder ISM_FILTER_THRESHOLD: float ISM filter threshold ISM_TIME_SERIES_FILTER_THRESHOLD: float ISM filter threshold for time series Returns ------- Objects for downstream visualization
ncov_ism/_visualization.py
customized_ISM_visualization
EESI/ncov_ism
1
python
def customized_ISM_visualization(region_raw_count, count_dict, region_list, output_folder, ISM_FILTER_THRESHOLD=0.05, ISM_TIME_SERIES_FILTER_THRESHOLD=0.025): '\n Informative Subtype Marker analysis visualization\n Parameters\n ----------\n region_raw_count: dictionary\n ISM frequency per region\n state_raw_count: dictionary\n ISM frequency per state\n count_dict: dictionary\n ISM frequency time series per region\n region_list: list\n regions of interest\n state_list: list\n states of interest\n time_series_region_list: list\n regions of interest for time series analysis\n output_folder: str\n path to the output folder\n ISM_FILTER_THRESHOLD: float\n ISM filter threshold\n ISM_TIME_SERIES_FILTER_THRESHOLD: float\n ISM filter threshold for time series\n Returns\n -------\n Objects for downstream visualization\n ' ISM_set = set([]) region_pie_chart = {} for (idx, region) in enumerate(region_list): dict_freq_filtered = ISM_filter(region_raw_count[region], ISM_FILTER_THRESHOLD) region_pie_chart[region] = dict_freq_filtered ISM_set.update(dict_freq_filtered.keys()) count_list = [] date_list = [] sorted_date = sorted(count_dict.keys()) for date in sorted_date: dict_freq = {} for region in region_list: regional_dict_freq = count_dict[date][region] dict_freq_filtered = ISM_time_series_filter(regional_dict_freq, ISM_TIME_SERIES_FILTER_THRESHOLD) ISM_set.update(list(dict_freq_filtered.keys())) dict_freq[region] = dict_freq_filtered count_list.append(dict_freq) date_list.append(date) return (ISM_set, region_pie_chart, count_list, date_list)
def customized_ISM_visualization(region_raw_count, count_dict, region_list, output_folder, ISM_FILTER_THRESHOLD=0.05, ISM_TIME_SERIES_FILTER_THRESHOLD=0.025): '\n Informative Subtype Marker analysis visualization\n Parameters\n ----------\n region_raw_count: dictionary\n ISM frequency per region\n state_raw_count: dictionary\n ISM frequency per state\n count_dict: dictionary\n ISM frequency time series per region\n region_list: list\n regions of interest\n state_list: list\n states of interest\n time_series_region_list: list\n regions of interest for time series analysis\n output_folder: str\n path to the output folder\n ISM_FILTER_THRESHOLD: float\n ISM filter threshold\n ISM_TIME_SERIES_FILTER_THRESHOLD: float\n ISM filter threshold for time series\n Returns\n -------\n Objects for downstream visualization\n ' ISM_set = set([]) region_pie_chart = {} for (idx, region) in enumerate(region_list): dict_freq_filtered = ISM_filter(region_raw_count[region], ISM_FILTER_THRESHOLD) region_pie_chart[region] = dict_freq_filtered ISM_set.update(dict_freq_filtered.keys()) count_list = [] date_list = [] sorted_date = sorted(count_dict.keys()) for date in sorted_date: dict_freq = {} for region in region_list: regional_dict_freq = count_dict[date][region] dict_freq_filtered = ISM_time_series_filter(regional_dict_freq, ISM_TIME_SERIES_FILTER_THRESHOLD) ISM_set.update(list(dict_freq_filtered.keys())) dict_freq[region] = dict_freq_filtered count_list.append(dict_freq) date_list.append(date) return (ISM_set, region_pie_chart, count_list, date_list)<|docstring|>Informative Subtype Marker analysis visualization Parameters ---------- region_raw_count: dictionary ISM frequency per region state_raw_count: dictionary ISM frequency per state count_dict: dictionary ISM frequency time series per region region_list: list regions of interest state_list: list states of interest time_series_region_list: list regions of interest for time series analysis output_folder: str path to the output folder ISM_FILTER_THRESHOLD: float ISM filter threshold ISM_TIME_SERIES_FILTER_THRESHOLD: float ISM filter threshold for time series Returns ------- Objects for downstream visualization<|endoftext|>
5f2c980a3a26ffefe4e89402bb77b12ba6cba009ec3d69dc7efec839ed278142
def get_color_names(CSS4_COLORS, num_colors): '\n Prepare colors for each ISM.\n ' bad_colors = set(['seashell', 'linen', 'ivory', 'oldlace', 'floralwhite', 'lightyellow', 'lightgoldenrodyellow', 'honeydew', 'mintcream', 'azure', 'lightcyan', 'aliceblue', 'ghostwhite', 'lavenderblush']) by_hsv = sorted(((tuple(mcolors.rgb_to_hsv(mcolors.to_rgb(color))), name) for (name, color) in CSS4_COLORS.items())) names = [name for (hsv, name) in by_hsv][14:] prime_names = ['red', 'orange', 'green', 'blue', 'gold', 'lightskyblue', 'brown', 'black', 'pink', 'yellow'] OTHER = 'gray' name_list = [name for name in names if ((name not in prime_names) and (name != OTHER) and (name not in bad_colors))] if (num_colors > (len(name_list) - 10)): logging.info('NOTE: Repetitive colors for different ISMs (inadequate distinctive colors)') name_list = (name_list + (ceil((num_colors / len(name_list))) * name_list)) if (num_colors > len(prime_names)): ind_list = np.linspace(0, len(name_list), (num_colors - 10), dtype=int, endpoint=False).tolist() color_names = (prime_names + [name_list[ind] for ind in ind_list]) else: color_names = prime_names[:num_colors] return color_names
Prepare colors for each ISM.
ncov_ism/_visualization.py
get_color_names
EESI/ncov_ism
1
python
def get_color_names(CSS4_COLORS, num_colors): '\n \n ' bad_colors = set(['seashell', 'linen', 'ivory', 'oldlace', 'floralwhite', 'lightyellow', 'lightgoldenrodyellow', 'honeydew', 'mintcream', 'azure', 'lightcyan', 'aliceblue', 'ghostwhite', 'lavenderblush']) by_hsv = sorted(((tuple(mcolors.rgb_to_hsv(mcolors.to_rgb(color))), name) for (name, color) in CSS4_COLORS.items())) names = [name for (hsv, name) in by_hsv][14:] prime_names = ['red', 'orange', 'green', 'blue', 'gold', 'lightskyblue', 'brown', 'black', 'pink', 'yellow'] OTHER = 'gray' name_list = [name for name in names if ((name not in prime_names) and (name != OTHER) and (name not in bad_colors))] if (num_colors > (len(name_list) - 10)): logging.info('NOTE: Repetitive colors for different ISMs (inadequate distinctive colors)') name_list = (name_list + (ceil((num_colors / len(name_list))) * name_list)) if (num_colors > len(prime_names)): ind_list = np.linspace(0, len(name_list), (num_colors - 10), dtype=int, endpoint=False).tolist() color_names = (prime_names + [name_list[ind] for ind in ind_list]) else: color_names = prime_names[:num_colors] return color_names
def get_color_names(CSS4_COLORS, num_colors): '\n \n ' bad_colors = set(['seashell', 'linen', 'ivory', 'oldlace', 'floralwhite', 'lightyellow', 'lightgoldenrodyellow', 'honeydew', 'mintcream', 'azure', 'lightcyan', 'aliceblue', 'ghostwhite', 'lavenderblush']) by_hsv = sorted(((tuple(mcolors.rgb_to_hsv(mcolors.to_rgb(color))), name) for (name, color) in CSS4_COLORS.items())) names = [name for (hsv, name) in by_hsv][14:] prime_names = ['red', 'orange', 'green', 'blue', 'gold', 'lightskyblue', 'brown', 'black', 'pink', 'yellow'] OTHER = 'gray' name_list = [name for name in names if ((name not in prime_names) and (name != OTHER) and (name not in bad_colors))] if (num_colors > (len(name_list) - 10)): logging.info('NOTE: Repetitive colors for different ISMs (inadequate distinctive colors)') name_list = (name_list + (ceil((num_colors / len(name_list))) * name_list)) if (num_colors > len(prime_names)): ind_list = np.linspace(0, len(name_list), (num_colors - 10), dtype=int, endpoint=False).tolist() color_names = (prime_names + [name_list[ind] for ind in ind_list]) else: color_names = prime_names[:num_colors] return color_names<|docstring|>Prepare colors for each ISM.<|endoftext|>
37929257d6f8f9592684408e66b39ad28ed46e6a70f82b254ebb0450e02a637a
def global_color_map(COLOR_DICT, ISM_list, out_dir): '\n Plot color-ISM map for reference.\n Adapted from https://matplotlib.org/3.1.0/gallery/color/named_colors.html\n ' ncols = 3 n = len(COLOR_DICT) nrows = ((n // ncols) + int(((n % ncols) > 0))) cell_width = 1300 cell_height = 100 swatch_width = 180 margin = 30 topmargin = 40 width = ((cell_width * 3) + (2 * margin)) height = (((cell_height * nrows) + margin) + topmargin) dpi = 300 (fig, ax) = plt.subplots(figsize=((width / dpi), (height / dpi)), dpi=dpi) fig.subplots_adjust((margin / width), (margin / height), ((width - margin) / width), ((height - topmargin) / height)) ax.set_xlim(0, (cell_width * 4)) ax.set_ylim((cell_height * (nrows - 0.5)), ((- cell_height) / 2.0)) ax.yaxis.set_visible(False) ax.xaxis.set_visible(False) ax.set_axis_off() ISM_list.append('OTHER') for (i, name) in enumerate(ISM_list): row = (i % nrows) col = (i // nrows) y = (row * cell_height) swatch_start_x = (cell_width * col) swatch_end_x = ((cell_width * col) + swatch_width) text_pos_x = (((cell_width * col) + swatch_width) + 50) ax.text(text_pos_x, y, name, fontsize=14, fontname='monospace', horizontalalignment='left', verticalalignment='center') ax.hlines(y, swatch_start_x, swatch_end_x, color=COLOR_DICT[name], linewidth=18) plt.savefig('{}/COLOR_MAP.png'.format(out_dir), bbox_inches='tight', dpi=dpi) plt.close(fig)
Plot color-ISM map for reference. Adapted from https://matplotlib.org/3.1.0/gallery/color/named_colors.html
ncov_ism/_visualization.py
global_color_map
EESI/ncov_ism
1
python
def global_color_map(COLOR_DICT, ISM_list, out_dir): '\n Plot color-ISM map for reference.\n Adapted from https://matplotlib.org/3.1.0/gallery/color/named_colors.html\n ' ncols = 3 n = len(COLOR_DICT) nrows = ((n // ncols) + int(((n % ncols) > 0))) cell_width = 1300 cell_height = 100 swatch_width = 180 margin = 30 topmargin = 40 width = ((cell_width * 3) + (2 * margin)) height = (((cell_height * nrows) + margin) + topmargin) dpi = 300 (fig, ax) = plt.subplots(figsize=((width / dpi), (height / dpi)), dpi=dpi) fig.subplots_adjust((margin / width), (margin / height), ((width - margin) / width), ((height - topmargin) / height)) ax.set_xlim(0, (cell_width * 4)) ax.set_ylim((cell_height * (nrows - 0.5)), ((- cell_height) / 2.0)) ax.yaxis.set_visible(False) ax.xaxis.set_visible(False) ax.set_axis_off() ISM_list.append('OTHER') for (i, name) in enumerate(ISM_list): row = (i % nrows) col = (i // nrows) y = (row * cell_height) swatch_start_x = (cell_width * col) swatch_end_x = ((cell_width * col) + swatch_width) text_pos_x = (((cell_width * col) + swatch_width) + 50) ax.text(text_pos_x, y, name, fontsize=14, fontname='monospace', horizontalalignment='left', verticalalignment='center') ax.hlines(y, swatch_start_x, swatch_end_x, color=COLOR_DICT[name], linewidth=18) plt.savefig('{}/COLOR_MAP.png'.format(out_dir), bbox_inches='tight', dpi=dpi) plt.close(fig)
def global_color_map(COLOR_DICT, ISM_list, out_dir): '\n Plot color-ISM map for reference.\n Adapted from https://matplotlib.org/3.1.0/gallery/color/named_colors.html\n ' ncols = 3 n = len(COLOR_DICT) nrows = ((n // ncols) + int(((n % ncols) > 0))) cell_width = 1300 cell_height = 100 swatch_width = 180 margin = 30 topmargin = 40 width = ((cell_width * 3) + (2 * margin)) height = (((cell_height * nrows) + margin) + topmargin) dpi = 300 (fig, ax) = plt.subplots(figsize=((width / dpi), (height / dpi)), dpi=dpi) fig.subplots_adjust((margin / width), (margin / height), ((width - margin) / width), ((height - topmargin) / height)) ax.set_xlim(0, (cell_width * 4)) ax.set_ylim((cell_height * (nrows - 0.5)), ((- cell_height) / 2.0)) ax.yaxis.set_visible(False) ax.xaxis.set_visible(False) ax.set_axis_off() ISM_list.append('OTHER') for (i, name) in enumerate(ISM_list): row = (i % nrows) col = (i // nrows) y = (row * cell_height) swatch_start_x = (cell_width * col) swatch_end_x = ((cell_width * col) + swatch_width) text_pos_x = (((cell_width * col) + swatch_width) + 50) ax.text(text_pos_x, y, name, fontsize=14, fontname='monospace', horizontalalignment='left', verticalalignment='center') ax.hlines(y, swatch_start_x, swatch_end_x, color=COLOR_DICT[name], linewidth=18) plt.savefig('{}/COLOR_MAP.png'.format(out_dir), bbox_inches='tight', dpi=dpi) plt.close(fig)<|docstring|>Plot color-ISM map for reference. Adapted from https://matplotlib.org/3.1.0/gallery/color/named_colors.html<|endoftext|>
322885f1718605efb901f8a58ed16f49cff0fa47ce62f95834a3364dfb3979e4
def func(pct, allvals): '\n covert to absolute value for pie chart plot.\n ' absolute = int(round(((pct / 100.0) * np.sum(allvals)))) return '{:d}'.format(absolute)
covert to absolute value for pie chart plot.
ncov_ism/_visualization.py
func
EESI/ncov_ism
1
python
def func(pct, allvals): '\n \n ' absolute = int(round(((pct / 100.0) * np.sum(allvals)))) return '{:d}'.format(absolute)
def func(pct, allvals): '\n \n ' absolute = int(round(((pct / 100.0) * np.sum(allvals)))) return '{:d}'.format(absolute)<|docstring|>covert to absolute value for pie chart plot.<|endoftext|>
d435675463f2857358d6331f3c6f548d7b22d724ea9da202b1e7513df1905a49
def plot_pie_chart(sizes, labels, colors, ax): '\n plot pie chart\n Adapted from https://matplotlib.org/3.1.1/gallery/pie_and_polar_charts/pie_and_donut_labels.html#sphx-glr-gallery-pie-and-polar-charts-pie-and-donut-labels-py\n ' (wedges, texts, autotexts) = ax.pie(sizes, autopct=(lambda pct: func(pct, sizes)), colors=colors, textprops=dict(color='w')) time_labels = [('-' if (label == 'OTHER') else label.split(' ')[1]) for label in labels] ax.legend(wedges, time_labels, loc='lower left', bbox_to_anchor=(0.8, 0, 0.5, 1)) ax.axis('equal') return (wedges, labels)
plot pie chart Adapted from https://matplotlib.org/3.1.1/gallery/pie_and_polar_charts/pie_and_donut_labels.html#sphx-glr-gallery-pie-and-polar-charts-pie-and-donut-labels-py
ncov_ism/_visualization.py
plot_pie_chart
EESI/ncov_ism
1
python
def plot_pie_chart(sizes, labels, colors, ax): '\n plot pie chart\n Adapted from https://matplotlib.org/3.1.1/gallery/pie_and_polar_charts/pie_and_donut_labels.html#sphx-glr-gallery-pie-and-polar-charts-pie-and-donut-labels-py\n ' (wedges, texts, autotexts) = ax.pie(sizes, autopct=(lambda pct: func(pct, sizes)), colors=colors, textprops=dict(color='w')) time_labels = [('-' if (label == 'OTHER') else label.split(' ')[1]) for label in labels] ax.legend(wedges, time_labels, loc='lower left', bbox_to_anchor=(0.8, 0, 0.5, 1)) ax.axis('equal') return (wedges, labels)
def plot_pie_chart(sizes, labels, colors, ax): '\n plot pie chart\n Adapted from https://matplotlib.org/3.1.1/gallery/pie_and_polar_charts/pie_and_donut_labels.html#sphx-glr-gallery-pie-and-polar-charts-pie-and-donut-labels-py\n ' (wedges, texts, autotexts) = ax.pie(sizes, autopct=(lambda pct: func(pct, sizes)), colors=colors, textprops=dict(color='w')) time_labels = [('-' if (label == 'OTHER') else label.split(' ')[1]) for label in labels] ax.legend(wedges, time_labels, loc='lower left', bbox_to_anchor=(0.8, 0, 0.5, 1)) ax.axis('equal') return (wedges, labels)<|docstring|>plot pie chart Adapted from https://matplotlib.org/3.1.1/gallery/pie_and_polar_charts/pie_and_donut_labels.html#sphx-glr-gallery-pie-and-polar-charts-pie-and-donut-labels-py<|endoftext|>
b62972ad7758bcdc0d9935b23cb03feb7e2ce74346b09d6740a6ee87bc9961d1
def regional_growth_plot(region, ISM_df, REFERENCE_date, count_list, date_list, COLOR_DICT, OUTPUT_FOLDER): '\n time series plot for a region of interest\n ' xlim_len = (ISM_df[(ISM_df['country/region'] == region)]['date'].max().date() - REFERENCE_date).days fig = plt.figure(figsize=(30, 15)) n = 4 ax = plt.subplot(1, 1, 1) regional_total = [] ISM_regional_set = set([]) for i in range(len(count_list)): regional_dict_freq = count_list[i][region] regional_total.append(sum([regional_dict_freq[ISM][1] for ISM in regional_dict_freq])) ISM_regional_set.update(regional_dict_freq.keys()) ISM_regional_list = [] for ISM in ISM_regional_set: if (ISM != 'OTHER'): ISM_regional_list.append(ISM) NONOTHER = len(ISM_regional_list) if ('OTHER' in ISM_regional_set): ISM_regional_list.append('OTHER') to_plot = [] ISM_to_plot_list = [] for ISM in ISM_regional_list: ISM_regional_growth = [] for i in range(len(count_list)): regional_dict_freq = count_list[i][region] if ((ISM in regional_dict_freq) and (regional_dict_freq[ISM][1] != 0)): ISM_regional_growth.append((regional_dict_freq[ISM][1] / regional_total[i])) elif (ISM == 'OTHER'): other_count = sum([regional_dict_freq[ISM][1] for ISM in regional_dict_freq if (ISM not in ISM_regional_set)]) if (regional_total[i] != 0): ISM_regional_growth.append((other_count / regional_total[i])) else: ISM_regional_growth.append(0) else: ISM_regional_growth.append(0) to_plot.append(ISM_regional_growth) ISM_to_plot_list.append(ISM) to_plot = np.row_stack(to_plot) y_stack = np.cumsum(to_plot, axis=0) x = np.arange(y_stack.shape[1]) for i in range(y_stack.shape[0]): ISM = ISM_to_plot_list[i] if (i == 0): ax.fill_between(x, 0, y_stack[(i, :)], facecolor=COLOR_DICT[ISM], alpha=0.5, label=ISM) else: ax.fill_between(x, y_stack[(i - 1)], y_stack[(i, :)], facecolor=COLOR_DICT[ISM], alpha=0.5, label=ISM) major_ticks = np.arange(0, len(date_list), 5) minor_ticks = np.arange(0, len(date_list)) major_label = [] for i in major_ticks.tolist(): major_label.append(str(date_list[i])) ax.set_xticks(minor_ticks, minor=True) ax.set_xticks(major_ticks) ax.set_xticklabels(major_label) plt.setp(ax.get_xticklabels(), rotation=90) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) plt.legend(loc='lower left', bbox_to_anchor=(1, 0, 0.5, 1), prop={'family': monospace_font['fontname']}) plt.xlim([(- 1), xlim_len]) plt.ylabel('Relative abundance') ax.grid(which='minor', alpha=0.3, linestyle='--') ax.grid(which='major', alpha=0.8) plt.savefig('{}/3_ISM_growth_{}.png'.format(OUTPUT_FOLDER, region), bbox_inches='tight') plt.close(fig)
time series plot for a region of interest
ncov_ism/_visualization.py
regional_growth_plot
EESI/ncov_ism
1
python
def regional_growth_plot(region, ISM_df, REFERENCE_date, count_list, date_list, COLOR_DICT, OUTPUT_FOLDER): '\n \n ' xlim_len = (ISM_df[(ISM_df['country/region'] == region)]['date'].max().date() - REFERENCE_date).days fig = plt.figure(figsize=(30, 15)) n = 4 ax = plt.subplot(1, 1, 1) regional_total = [] ISM_regional_set = set([]) for i in range(len(count_list)): regional_dict_freq = count_list[i][region] regional_total.append(sum([regional_dict_freq[ISM][1] for ISM in regional_dict_freq])) ISM_regional_set.update(regional_dict_freq.keys()) ISM_regional_list = [] for ISM in ISM_regional_set: if (ISM != 'OTHER'): ISM_regional_list.append(ISM) NONOTHER = len(ISM_regional_list) if ('OTHER' in ISM_regional_set): ISM_regional_list.append('OTHER') to_plot = [] ISM_to_plot_list = [] for ISM in ISM_regional_list: ISM_regional_growth = [] for i in range(len(count_list)): regional_dict_freq = count_list[i][region] if ((ISM in regional_dict_freq) and (regional_dict_freq[ISM][1] != 0)): ISM_regional_growth.append((regional_dict_freq[ISM][1] / regional_total[i])) elif (ISM == 'OTHER'): other_count = sum([regional_dict_freq[ISM][1] for ISM in regional_dict_freq if (ISM not in ISM_regional_set)]) if (regional_total[i] != 0): ISM_regional_growth.append((other_count / regional_total[i])) else: ISM_regional_growth.append(0) else: ISM_regional_growth.append(0) to_plot.append(ISM_regional_growth) ISM_to_plot_list.append(ISM) to_plot = np.row_stack(to_plot) y_stack = np.cumsum(to_plot, axis=0) x = np.arange(y_stack.shape[1]) for i in range(y_stack.shape[0]): ISM = ISM_to_plot_list[i] if (i == 0): ax.fill_between(x, 0, y_stack[(i, :)], facecolor=COLOR_DICT[ISM], alpha=0.5, label=ISM) else: ax.fill_between(x, y_stack[(i - 1)], y_stack[(i, :)], facecolor=COLOR_DICT[ISM], alpha=0.5, label=ISM) major_ticks = np.arange(0, len(date_list), 5) minor_ticks = np.arange(0, len(date_list)) major_label = [] for i in major_ticks.tolist(): major_label.append(str(date_list[i])) ax.set_xticks(minor_ticks, minor=True) ax.set_xticks(major_ticks) ax.set_xticklabels(major_label) plt.setp(ax.get_xticklabels(), rotation=90) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) plt.legend(loc='lower left', bbox_to_anchor=(1, 0, 0.5, 1), prop={'family': monospace_font['fontname']}) plt.xlim([(- 1), xlim_len]) plt.ylabel('Relative abundance') ax.grid(which='minor', alpha=0.3, linestyle='--') ax.grid(which='major', alpha=0.8) plt.savefig('{}/3_ISM_growth_{}.png'.format(OUTPUT_FOLDER, region), bbox_inches='tight') plt.close(fig)
def regional_growth_plot(region, ISM_df, REFERENCE_date, count_list, date_list, COLOR_DICT, OUTPUT_FOLDER): '\n \n ' xlim_len = (ISM_df[(ISM_df['country/region'] == region)]['date'].max().date() - REFERENCE_date).days fig = plt.figure(figsize=(30, 15)) n = 4 ax = plt.subplot(1, 1, 1) regional_total = [] ISM_regional_set = set([]) for i in range(len(count_list)): regional_dict_freq = count_list[i][region] regional_total.append(sum([regional_dict_freq[ISM][1] for ISM in regional_dict_freq])) ISM_regional_set.update(regional_dict_freq.keys()) ISM_regional_list = [] for ISM in ISM_regional_set: if (ISM != 'OTHER'): ISM_regional_list.append(ISM) NONOTHER = len(ISM_regional_list) if ('OTHER' in ISM_regional_set): ISM_regional_list.append('OTHER') to_plot = [] ISM_to_plot_list = [] for ISM in ISM_regional_list: ISM_regional_growth = [] for i in range(len(count_list)): regional_dict_freq = count_list[i][region] if ((ISM in regional_dict_freq) and (regional_dict_freq[ISM][1] != 0)): ISM_regional_growth.append((regional_dict_freq[ISM][1] / regional_total[i])) elif (ISM == 'OTHER'): other_count = sum([regional_dict_freq[ISM][1] for ISM in regional_dict_freq if (ISM not in ISM_regional_set)]) if (regional_total[i] != 0): ISM_regional_growth.append((other_count / regional_total[i])) else: ISM_regional_growth.append(0) else: ISM_regional_growth.append(0) to_plot.append(ISM_regional_growth) ISM_to_plot_list.append(ISM) to_plot = np.row_stack(to_plot) y_stack = np.cumsum(to_plot, axis=0) x = np.arange(y_stack.shape[1]) for i in range(y_stack.shape[0]): ISM = ISM_to_plot_list[i] if (i == 0): ax.fill_between(x, 0, y_stack[(i, :)], facecolor=COLOR_DICT[ISM], alpha=0.5, label=ISM) else: ax.fill_between(x, y_stack[(i - 1)], y_stack[(i, :)], facecolor=COLOR_DICT[ISM], alpha=0.5, label=ISM) major_ticks = np.arange(0, len(date_list), 5) minor_ticks = np.arange(0, len(date_list)) major_label = [] for i in major_ticks.tolist(): major_label.append(str(date_list[i])) ax.set_xticks(minor_ticks, minor=True) ax.set_xticks(major_ticks) ax.set_xticklabels(major_label) plt.setp(ax.get_xticklabels(), rotation=90) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) plt.legend(loc='lower left', bbox_to_anchor=(1, 0, 0.5, 1), prop={'family': monospace_font['fontname']}) plt.xlim([(- 1), xlim_len]) plt.ylabel('Relative abundance') ax.grid(which='minor', alpha=0.3, linestyle='--') ax.grid(which='major', alpha=0.8) plt.savefig('{}/3_ISM_growth_{}.png'.format(OUTPUT_FOLDER, region), bbox_inches='tight') plt.close(fig)<|docstring|>time series plot for a region of interest<|endoftext|>
52fe005bf3f19ce8d5466ed0bf1d8c4497c829ae8302ee76e4242e3e69589cb3
def ISM_plot(ISM_df, ISM_set, region_list, region_pie_chart, state_list, state_pie_chart, REFERENCE_date, time_series_region_list, count_list, date_list, OUTPUT_FOLDER): '\n Generate figures for ISM analysis.\n ' ISM_index = {} idx = 0 for (ISM, counts) in ISM_df['ISM'].value_counts().items(): ISM_index[ISM] = idx idx += 1 logging.info('{} ISMs will show up in the visualizations'.format(len(ISM_set))) ISM_list = [] for ISM in ISM_set: if (ISM == 'OTHER'): continue ISM_list.append((ISM, ISM_index[ISM])) ISM_list = sorted(ISM_list, key=(lambda x: x[1])) ISM_list = [item[0] for item in ISM_list] color_map = get_color_names(CSS4_COLORS, len(ISM_list)) COLOR_DICT = {} for (idx, ISM) in enumerate(ISM_list): COLOR_DICT[ISM] = color_map[idx] COLOR_DICT['OTHER'] = 'gray' pickle.dump(COLOR_DICT, open('COLOR_DICT.pkl', 'wb')) global_color_map(COLOR_DICT, ISM_list, OUTPUT_FOLDER) DPI = 100 fig = plt.figure(figsize=(25, 15)) wedges_list = [] for (idx, region) in enumerate(region_list): dict_freq = region_pie_chart[region] total = sum([dict_freq[ISM][1] for ISM in dict_freq]) labels = [] sizes = [] colors = [] for ISM in dict_freq: if (ISM == 'OTHER'): continue labels.append('{}: {}'.format(ISM, dict_freq[ISM][0])) colors.append(COLOR_DICT[ISM]) sizes.append(dict_freq[ISM][1]) if ('OTHER' in dict_freq): labels.append('OTHER') colors.append(COLOR_DICT['OTHER']) sizes.append(dict_freq['OTHER'][1]) ax = plt.subplot(5, 5, (idx + 1)) (wedges, labels) = plot_pie_chart(sizes, labels, colors, ax) ax.set_title(region) wedges_list.append((wedges, labels)) labels_handles = {} handles_OTHER = None for (wedges, labels) in wedges_list: for (idx, label) in enumerate(labels): label = label.split(':')[0] if (label == 'OTHER'): handles_OTHER = [wedges[idx], label] continue if (label not in labels_handles): labels_handles[label] = wedges[idx] if handles_OTHER: handles_list = (list(labels_handles.values()) + [handles_OTHER[0]]) labels_list = (list(labels_handles.keys()) + [handles_OTHER[1]]) fig.legend(handles_list, labels_list, bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) else: fig.legend(labels_handles.values(), labels_handles.keys(), bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) plt.savefig('{}/1_regional_ISM.png'.format(OUTPUT_FOLDER), bbox_inches='tight', dpi=DPI, transparent=True) plt.close(fig) fig = plt.figure(figsize=(25, 20)) subplot_y = int(np.sqrt(len(state_list))) subplot_x = (int(np.sqrt(len(state_list))) + 1) if ((subplot_x * subplot_y) < len(state_list)): subplot_y = subplot_x wedges_list = [] for (idx, state) in enumerate(state_list): dict_freq = state_pie_chart[state] total = sum([dict_freq[ISM][1] for ISM in dict_freq]) labels = [] sizes = [] colors = [] for ISM in dict_freq: if (ISM == 'OTHER'): continue labels.append('{}: {}'.format(ISM, dict_freq[ISM][0])) colors.append(COLOR_DICT[ISM]) sizes.append(dict_freq[ISM][1]) if ('OTHER' in dict_freq): labels.append('OTHER') colors.append(COLOR_DICT['OTHER']) sizes.append(dict_freq['OTHER'][1]) ax = plt.subplot(subplot_x, subplot_y, (idx + 1)) (wedges, labels) = plot_pie_chart(sizes, labels, colors, ax) ax.set_title(state) wedges_list.append((wedges, labels)) labels_handles = {} handles_OTHER = None for (wedges, labels) in wedges_list: for (idx, label) in enumerate(labels): label = label.split(':')[0] if (label == 'OTHER'): handles_OTHER = [wedges[idx], label] continue if (label not in labels_handles): labels_handles[label] = wedges[idx] if handles_OTHER: handles_list = (list(labels_handles.values()) + [handles_OTHER[0]]) labels_list = (list(labels_handles.keys()) + [handles_OTHER[1]]) fig.legend(handles_list, labels_list, bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) else: fig.legend(labels_handles.values(), labels_handles.keys(), bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) plt.savefig('{}/2_intra-US_ISM.png'.format(OUTPUT_FOLDER), bbox_inches='tight', dpi=DPI, transparent=True) plt.close(fig) font = {'family': 'sans-serif', 'size': 25} matplotlib.rc('font', **font) for region in time_series_region_list: regional_growth_plot(region, ISM_df, REFERENCE_date, count_list, date_list, COLOR_DICT, OUTPUT_FOLDER)
Generate figures for ISM analysis.
ncov_ism/_visualization.py
ISM_plot
EESI/ncov_ism
1
python
def ISM_plot(ISM_df, ISM_set, region_list, region_pie_chart, state_list, state_pie_chart, REFERENCE_date, time_series_region_list, count_list, date_list, OUTPUT_FOLDER): '\n \n ' ISM_index = {} idx = 0 for (ISM, counts) in ISM_df['ISM'].value_counts().items(): ISM_index[ISM] = idx idx += 1 logging.info('{} ISMs will show up in the visualizations'.format(len(ISM_set))) ISM_list = [] for ISM in ISM_set: if (ISM == 'OTHER'): continue ISM_list.append((ISM, ISM_index[ISM])) ISM_list = sorted(ISM_list, key=(lambda x: x[1])) ISM_list = [item[0] for item in ISM_list] color_map = get_color_names(CSS4_COLORS, len(ISM_list)) COLOR_DICT = {} for (idx, ISM) in enumerate(ISM_list): COLOR_DICT[ISM] = color_map[idx] COLOR_DICT['OTHER'] = 'gray' pickle.dump(COLOR_DICT, open('COLOR_DICT.pkl', 'wb')) global_color_map(COLOR_DICT, ISM_list, OUTPUT_FOLDER) DPI = 100 fig = plt.figure(figsize=(25, 15)) wedges_list = [] for (idx, region) in enumerate(region_list): dict_freq = region_pie_chart[region] total = sum([dict_freq[ISM][1] for ISM in dict_freq]) labels = [] sizes = [] colors = [] for ISM in dict_freq: if (ISM == 'OTHER'): continue labels.append('{}: {}'.format(ISM, dict_freq[ISM][0])) colors.append(COLOR_DICT[ISM]) sizes.append(dict_freq[ISM][1]) if ('OTHER' in dict_freq): labels.append('OTHER') colors.append(COLOR_DICT['OTHER']) sizes.append(dict_freq['OTHER'][1]) ax = plt.subplot(5, 5, (idx + 1)) (wedges, labels) = plot_pie_chart(sizes, labels, colors, ax) ax.set_title(region) wedges_list.append((wedges, labels)) labels_handles = {} handles_OTHER = None for (wedges, labels) in wedges_list: for (idx, label) in enumerate(labels): label = label.split(':')[0] if (label == 'OTHER'): handles_OTHER = [wedges[idx], label] continue if (label not in labels_handles): labels_handles[label] = wedges[idx] if handles_OTHER: handles_list = (list(labels_handles.values()) + [handles_OTHER[0]]) labels_list = (list(labels_handles.keys()) + [handles_OTHER[1]]) fig.legend(handles_list, labels_list, bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) else: fig.legend(labels_handles.values(), labels_handles.keys(), bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) plt.savefig('{}/1_regional_ISM.png'.format(OUTPUT_FOLDER), bbox_inches='tight', dpi=DPI, transparent=True) plt.close(fig) fig = plt.figure(figsize=(25, 20)) subplot_y = int(np.sqrt(len(state_list))) subplot_x = (int(np.sqrt(len(state_list))) + 1) if ((subplot_x * subplot_y) < len(state_list)): subplot_y = subplot_x wedges_list = [] for (idx, state) in enumerate(state_list): dict_freq = state_pie_chart[state] total = sum([dict_freq[ISM][1] for ISM in dict_freq]) labels = [] sizes = [] colors = [] for ISM in dict_freq: if (ISM == 'OTHER'): continue labels.append('{}: {}'.format(ISM, dict_freq[ISM][0])) colors.append(COLOR_DICT[ISM]) sizes.append(dict_freq[ISM][1]) if ('OTHER' in dict_freq): labels.append('OTHER') colors.append(COLOR_DICT['OTHER']) sizes.append(dict_freq['OTHER'][1]) ax = plt.subplot(subplot_x, subplot_y, (idx + 1)) (wedges, labels) = plot_pie_chart(sizes, labels, colors, ax) ax.set_title(state) wedges_list.append((wedges, labels)) labels_handles = {} handles_OTHER = None for (wedges, labels) in wedges_list: for (idx, label) in enumerate(labels): label = label.split(':')[0] if (label == 'OTHER'): handles_OTHER = [wedges[idx], label] continue if (label not in labels_handles): labels_handles[label] = wedges[idx] if handles_OTHER: handles_list = (list(labels_handles.values()) + [handles_OTHER[0]]) labels_list = (list(labels_handles.keys()) + [handles_OTHER[1]]) fig.legend(handles_list, labels_list, bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) else: fig.legend(labels_handles.values(), labels_handles.keys(), bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) plt.savefig('{}/2_intra-US_ISM.png'.format(OUTPUT_FOLDER), bbox_inches='tight', dpi=DPI, transparent=True) plt.close(fig) font = {'family': 'sans-serif', 'size': 25} matplotlib.rc('font', **font) for region in time_series_region_list: regional_growth_plot(region, ISM_df, REFERENCE_date, count_list, date_list, COLOR_DICT, OUTPUT_FOLDER)
def ISM_plot(ISM_df, ISM_set, region_list, region_pie_chart, state_list, state_pie_chart, REFERENCE_date, time_series_region_list, count_list, date_list, OUTPUT_FOLDER): '\n \n ' ISM_index = {} idx = 0 for (ISM, counts) in ISM_df['ISM'].value_counts().items(): ISM_index[ISM] = idx idx += 1 logging.info('{} ISMs will show up in the visualizations'.format(len(ISM_set))) ISM_list = [] for ISM in ISM_set: if (ISM == 'OTHER'): continue ISM_list.append((ISM, ISM_index[ISM])) ISM_list = sorted(ISM_list, key=(lambda x: x[1])) ISM_list = [item[0] for item in ISM_list] color_map = get_color_names(CSS4_COLORS, len(ISM_list)) COLOR_DICT = {} for (idx, ISM) in enumerate(ISM_list): COLOR_DICT[ISM] = color_map[idx] COLOR_DICT['OTHER'] = 'gray' pickle.dump(COLOR_DICT, open('COLOR_DICT.pkl', 'wb')) global_color_map(COLOR_DICT, ISM_list, OUTPUT_FOLDER) DPI = 100 fig = plt.figure(figsize=(25, 15)) wedges_list = [] for (idx, region) in enumerate(region_list): dict_freq = region_pie_chart[region] total = sum([dict_freq[ISM][1] for ISM in dict_freq]) labels = [] sizes = [] colors = [] for ISM in dict_freq: if (ISM == 'OTHER'): continue labels.append('{}: {}'.format(ISM, dict_freq[ISM][0])) colors.append(COLOR_DICT[ISM]) sizes.append(dict_freq[ISM][1]) if ('OTHER' in dict_freq): labels.append('OTHER') colors.append(COLOR_DICT['OTHER']) sizes.append(dict_freq['OTHER'][1]) ax = plt.subplot(5, 5, (idx + 1)) (wedges, labels) = plot_pie_chart(sizes, labels, colors, ax) ax.set_title(region) wedges_list.append((wedges, labels)) labels_handles = {} handles_OTHER = None for (wedges, labels) in wedges_list: for (idx, label) in enumerate(labels): label = label.split(':')[0] if (label == 'OTHER'): handles_OTHER = [wedges[idx], label] continue if (label not in labels_handles): labels_handles[label] = wedges[idx] if handles_OTHER: handles_list = (list(labels_handles.values()) + [handles_OTHER[0]]) labels_list = (list(labels_handles.keys()) + [handles_OTHER[1]]) fig.legend(handles_list, labels_list, bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) else: fig.legend(labels_handles.values(), labels_handles.keys(), bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) plt.savefig('{}/1_regional_ISM.png'.format(OUTPUT_FOLDER), bbox_inches='tight', dpi=DPI, transparent=True) plt.close(fig) fig = plt.figure(figsize=(25, 20)) subplot_y = int(np.sqrt(len(state_list))) subplot_x = (int(np.sqrt(len(state_list))) + 1) if ((subplot_x * subplot_y) < len(state_list)): subplot_y = subplot_x wedges_list = [] for (idx, state) in enumerate(state_list): dict_freq = state_pie_chart[state] total = sum([dict_freq[ISM][1] for ISM in dict_freq]) labels = [] sizes = [] colors = [] for ISM in dict_freq: if (ISM == 'OTHER'): continue labels.append('{}: {}'.format(ISM, dict_freq[ISM][0])) colors.append(COLOR_DICT[ISM]) sizes.append(dict_freq[ISM][1]) if ('OTHER' in dict_freq): labels.append('OTHER') colors.append(COLOR_DICT['OTHER']) sizes.append(dict_freq['OTHER'][1]) ax = plt.subplot(subplot_x, subplot_y, (idx + 1)) (wedges, labels) = plot_pie_chart(sizes, labels, colors, ax) ax.set_title(state) wedges_list.append((wedges, labels)) labels_handles = {} handles_OTHER = None for (wedges, labels) in wedges_list: for (idx, label) in enumerate(labels): label = label.split(':')[0] if (label == 'OTHER'): handles_OTHER = [wedges[idx], label] continue if (label not in labels_handles): labels_handles[label] = wedges[idx] if handles_OTHER: handles_list = (list(labels_handles.values()) + [handles_OTHER[0]]) labels_list = (list(labels_handles.keys()) + [handles_OTHER[1]]) fig.legend(handles_list, labels_list, bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) else: fig.legend(labels_handles.values(), labels_handles.keys(), bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) plt.savefig('{}/2_intra-US_ISM.png'.format(OUTPUT_FOLDER), bbox_inches='tight', dpi=DPI, transparent=True) plt.close(fig) font = {'family': 'sans-serif', 'size': 25} matplotlib.rc('font', **font) for region in time_series_region_list: regional_growth_plot(region, ISM_df, REFERENCE_date, count_list, date_list, COLOR_DICT, OUTPUT_FOLDER)<|docstring|>Generate figures for ISM analysis.<|endoftext|>
d1808d47d6b6ec6e78b583adb72990c0ad7e26902599c77abaae4cd293e92a6f
def customized_ISM_plot(ISM_df, ISM_set, region_list, region_pie_chart, REFERENCE_date, count_list, date_list, OUTPUT_FOLDER): '\n Generate figures for ISM analysis.\n ' ISM_index = {} idx = 0 for (ISM, counts) in ISM_df['ISM'].value_counts().items(): ISM_index[ISM] = idx idx += 1 logging.info('{} ISMs will show up in the visualizations'.format(len(ISM_set))) ISM_list = [] for ISM in ISM_set: if (ISM == 'OTHER'): continue ISM_list.append((ISM, ISM_index[ISM])) ISM_list = sorted(ISM_list, key=(lambda x: x[1])) ISM_list = [item[0] for item in ISM_list] color_map = get_color_names(CSS4_COLORS, len(ISM_list)) COLOR_DICT = {} for (idx, ISM) in enumerate(ISM_list): COLOR_DICT[ISM] = color_map[idx] COLOR_DICT['OTHER'] = 'gray' pickle.dump(COLOR_DICT, open('COLOR_DICT.pkl', 'wb')) global_color_map(COLOR_DICT, ISM_list, OUTPUT_FOLDER) DPI = 100 fig = plt.figure(figsize=(25, 15)) wedges_list = [] for (idx, region) in enumerate(region_list): dict_freq = region_pie_chart[region] total = sum([dict_freq[ISM][1] for ISM in dict_freq]) labels = [] sizes = [] colors = [] for ISM in dict_freq: if (ISM == 'OTHER'): continue labels.append('{}: {}'.format(ISM, dict_freq[ISM][0])) colors.append(COLOR_DICT[ISM]) sizes.append(dict_freq[ISM][1]) if ('OTHER' in dict_freq): labels.append('OTHER') colors.append(COLOR_DICT['OTHER']) sizes.append(dict_freq['OTHER'][1]) ax = plt.subplot(5, 5, (idx + 1)) (wedges, labels) = plot_pie_chart(sizes, labels, colors, ax) ax.set_title(region) wedges_list.append((wedges, labels)) labels_handles = {} handles_OTHER = None for (wedges, labels) in wedges_list: for (idx, label) in enumerate(labels): label = label.split(':')[0] if (label == 'OTHER'): handles_OTHER = [wedges[idx], label] continue if (label not in labels_handles): labels_handles[label] = wedges[idx] if handles_OTHER: handles_list = (list(labels_handles.values()) + [handles_OTHER[0]]) labels_list = (list(labels_handles.keys()) + [handles_OTHER[1]]) fig.legend(handles_list, labels_list, bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) else: fig.legend(labels_handles.values(), labels_handles.keys(), bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) plt.savefig('{}/1_regional_ISM.png'.format(OUTPUT_FOLDER), bbox_inches='tight', dpi=DPI, transparent=True) plt.close(fig) font = {'family': 'sans-serif', 'size': 25} matplotlib.rc('font', **font) for region in region_list: regional_growth_plot(region, ISM_df, REFERENCE_date, count_list, date_list, COLOR_DICT, OUTPUT_FOLDER)
Generate figures for ISM analysis.
ncov_ism/_visualization.py
customized_ISM_plot
EESI/ncov_ism
1
python
def customized_ISM_plot(ISM_df, ISM_set, region_list, region_pie_chart, REFERENCE_date, count_list, date_list, OUTPUT_FOLDER): '\n \n ' ISM_index = {} idx = 0 for (ISM, counts) in ISM_df['ISM'].value_counts().items(): ISM_index[ISM] = idx idx += 1 logging.info('{} ISMs will show up in the visualizations'.format(len(ISM_set))) ISM_list = [] for ISM in ISM_set: if (ISM == 'OTHER'): continue ISM_list.append((ISM, ISM_index[ISM])) ISM_list = sorted(ISM_list, key=(lambda x: x[1])) ISM_list = [item[0] for item in ISM_list] color_map = get_color_names(CSS4_COLORS, len(ISM_list)) COLOR_DICT = {} for (idx, ISM) in enumerate(ISM_list): COLOR_DICT[ISM] = color_map[idx] COLOR_DICT['OTHER'] = 'gray' pickle.dump(COLOR_DICT, open('COLOR_DICT.pkl', 'wb')) global_color_map(COLOR_DICT, ISM_list, OUTPUT_FOLDER) DPI = 100 fig = plt.figure(figsize=(25, 15)) wedges_list = [] for (idx, region) in enumerate(region_list): dict_freq = region_pie_chart[region] total = sum([dict_freq[ISM][1] for ISM in dict_freq]) labels = [] sizes = [] colors = [] for ISM in dict_freq: if (ISM == 'OTHER'): continue labels.append('{}: {}'.format(ISM, dict_freq[ISM][0])) colors.append(COLOR_DICT[ISM]) sizes.append(dict_freq[ISM][1]) if ('OTHER' in dict_freq): labels.append('OTHER') colors.append(COLOR_DICT['OTHER']) sizes.append(dict_freq['OTHER'][1]) ax = plt.subplot(5, 5, (idx + 1)) (wedges, labels) = plot_pie_chart(sizes, labels, colors, ax) ax.set_title(region) wedges_list.append((wedges, labels)) labels_handles = {} handles_OTHER = None for (wedges, labels) in wedges_list: for (idx, label) in enumerate(labels): label = label.split(':')[0] if (label == 'OTHER'): handles_OTHER = [wedges[idx], label] continue if (label not in labels_handles): labels_handles[label] = wedges[idx] if handles_OTHER: handles_list = (list(labels_handles.values()) + [handles_OTHER[0]]) labels_list = (list(labels_handles.keys()) + [handles_OTHER[1]]) fig.legend(handles_list, labels_list, bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) else: fig.legend(labels_handles.values(), labels_handles.keys(), bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) plt.savefig('{}/1_regional_ISM.png'.format(OUTPUT_FOLDER), bbox_inches='tight', dpi=DPI, transparent=True) plt.close(fig) font = {'family': 'sans-serif', 'size': 25} matplotlib.rc('font', **font) for region in region_list: regional_growth_plot(region, ISM_df, REFERENCE_date, count_list, date_list, COLOR_DICT, OUTPUT_FOLDER)
def customized_ISM_plot(ISM_df, ISM_set, region_list, region_pie_chart, REFERENCE_date, count_list, date_list, OUTPUT_FOLDER): '\n \n ' ISM_index = {} idx = 0 for (ISM, counts) in ISM_df['ISM'].value_counts().items(): ISM_index[ISM] = idx idx += 1 logging.info('{} ISMs will show up in the visualizations'.format(len(ISM_set))) ISM_list = [] for ISM in ISM_set: if (ISM == 'OTHER'): continue ISM_list.append((ISM, ISM_index[ISM])) ISM_list = sorted(ISM_list, key=(lambda x: x[1])) ISM_list = [item[0] for item in ISM_list] color_map = get_color_names(CSS4_COLORS, len(ISM_list)) COLOR_DICT = {} for (idx, ISM) in enumerate(ISM_list): COLOR_DICT[ISM] = color_map[idx] COLOR_DICT['OTHER'] = 'gray' pickle.dump(COLOR_DICT, open('COLOR_DICT.pkl', 'wb')) global_color_map(COLOR_DICT, ISM_list, OUTPUT_FOLDER) DPI = 100 fig = plt.figure(figsize=(25, 15)) wedges_list = [] for (idx, region) in enumerate(region_list): dict_freq = region_pie_chart[region] total = sum([dict_freq[ISM][1] for ISM in dict_freq]) labels = [] sizes = [] colors = [] for ISM in dict_freq: if (ISM == 'OTHER'): continue labels.append('{}: {}'.format(ISM, dict_freq[ISM][0])) colors.append(COLOR_DICT[ISM]) sizes.append(dict_freq[ISM][1]) if ('OTHER' in dict_freq): labels.append('OTHER') colors.append(COLOR_DICT['OTHER']) sizes.append(dict_freq['OTHER'][1]) ax = plt.subplot(5, 5, (idx + 1)) (wedges, labels) = plot_pie_chart(sizes, labels, colors, ax) ax.set_title(region) wedges_list.append((wedges, labels)) labels_handles = {} handles_OTHER = None for (wedges, labels) in wedges_list: for (idx, label) in enumerate(labels): label = label.split(':')[0] if (label == 'OTHER'): handles_OTHER = [wedges[idx], label] continue if (label not in labels_handles): labels_handles[label] = wedges[idx] if handles_OTHER: handles_list = (list(labels_handles.values()) + [handles_OTHER[0]]) labels_list = (list(labels_handles.keys()) + [handles_OTHER[1]]) fig.legend(handles_list, labels_list, bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) else: fig.legend(labels_handles.values(), labels_handles.keys(), bbox_to_anchor=(0.82, 0.25), bbox_transform=plt.gcf().transFigure, ncol=5, prop={'family': monospace_font['fontname']}) plt.savefig('{}/1_regional_ISM.png'.format(OUTPUT_FOLDER), bbox_inches='tight', dpi=DPI, transparent=True) plt.close(fig) font = {'family': 'sans-serif', 'size': 25} matplotlib.rc('font', **font) for region in region_list: regional_growth_plot(region, ISM_df, REFERENCE_date, count_list, date_list, COLOR_DICT, OUTPUT_FOLDER)<|docstring|>Generate figures for ISM analysis.<|endoftext|>
0ec1e52e95b27382268e418e714eaba844f2efe693f4f4bbfe3aa9b87a0680e6
def apply_to_boxes(self, evaluation): 'System`MakeBoxes[System`CustomAtom, StandardForm|TraditionalForm|OutputForm|InputForm]' return CustomBoxConstruct(evaluation=evaluation)
System`MakeBoxes[System`CustomAtom, StandardForm|TraditionalForm|OutputForm|InputForm]
test/test_custom_boxconstruct.py
apply_to_boxes
lambdaxymox/Mathics
1,920
python
def apply_to_boxes(self, evaluation): return CustomBoxConstruct(evaluation=evaluation)
def apply_to_boxes(self, evaluation): return CustomBoxConstruct(evaluation=evaluation)<|docstring|>System`MakeBoxes[System`CustomAtom, StandardForm|TraditionalForm|OutputForm|InputForm]<|endoftext|>
005edf5b5a093f69043152a2f0c457ff45019dc62da85931c76e37780bf8eff6
def apply_box(self, elems, evaluation, options): 'System`MakeBoxes[System`Graphics[elems_, System`OptionsPattern[System`Graphics]],\n System`StandardForm|System`TraditionalForm|System`OutputForm]' instance = CustomGraphicsBox(*elems._leaves, evaluation=evaluation) return instance
System`MakeBoxes[System`Graphics[elems_, System`OptionsPattern[System`Graphics]], System`StandardForm|System`TraditionalForm|System`OutputForm]
test/test_custom_boxconstruct.py
apply_box
lambdaxymox/Mathics
1,920
python
def apply_box(self, elems, evaluation, options): 'System`MakeBoxes[System`Graphics[elems_, System`OptionsPattern[System`Graphics]],\n System`StandardForm|System`TraditionalForm|System`OutputForm]' instance = CustomGraphicsBox(*elems._leaves, evaluation=evaluation) return instance
def apply_box(self, elems, evaluation, options): 'System`MakeBoxes[System`Graphics[elems_, System`OptionsPattern[System`Graphics]],\n System`StandardForm|System`TraditionalForm|System`OutputForm]' instance = CustomGraphicsBox(*elems._leaves, evaluation=evaluation) return instance<|docstring|>System`MakeBoxes[System`Graphics[elems_, System`OptionsPattern[System`Graphics]], System`StandardForm|System`TraditionalForm|System`OutputForm]<|endoftext|>
f8d10f3cf870d4ee0165db2b53839a324cd1f7fa67a93d6158f09954abc56917
def test_check_n_jobs(): 'Test check_n_jobs function..\n ' n_jobs_valid = 1 n_jobs_safe = check_n_jobs(n_jobs_valid) assert_equal(n_jobs_valid, n_jobs_safe) n_jobs_large = 100000000 n_jobs_safe = check_n_jobs(n_jobs_large) assert_true((n_jobs_safe < n_jobs_large)) assert_true((n_jobs_safe >= 1)) n_jobs_not_enough = (- 100000000) n_jobs_safe = check_n_jobs(n_jobs_not_enough) assert_equal(n_jobs_safe, 1) n_jobs = 0 assert_raises(ValueError, check_n_jobs, n_jobs)
Test check_n_jobs function..
nilearn_sandbox/tests/test_common_checks.py
test_check_n_jobs
KamalakerDadi/nilearn_sandbox
1
python
def test_check_n_jobs(): '\n ' n_jobs_valid = 1 n_jobs_safe = check_n_jobs(n_jobs_valid) assert_equal(n_jobs_valid, n_jobs_safe) n_jobs_large = 100000000 n_jobs_safe = check_n_jobs(n_jobs_large) assert_true((n_jobs_safe < n_jobs_large)) assert_true((n_jobs_safe >= 1)) n_jobs_not_enough = (- 100000000) n_jobs_safe = check_n_jobs(n_jobs_not_enough) assert_equal(n_jobs_safe, 1) n_jobs = 0 assert_raises(ValueError, check_n_jobs, n_jobs)
def test_check_n_jobs(): '\n ' n_jobs_valid = 1 n_jobs_safe = check_n_jobs(n_jobs_valid) assert_equal(n_jobs_valid, n_jobs_safe) n_jobs_large = 100000000 n_jobs_safe = check_n_jobs(n_jobs_large) assert_true((n_jobs_safe < n_jobs_large)) assert_true((n_jobs_safe >= 1)) n_jobs_not_enough = (- 100000000) n_jobs_safe = check_n_jobs(n_jobs_not_enough) assert_equal(n_jobs_safe, 1) n_jobs = 0 assert_raises(ValueError, check_n_jobs, n_jobs)<|docstring|>Test check_n_jobs function..<|endoftext|>
bb796ebaff3587a5e5eeec0b88c78df8123e2cdef39cc8f123a8c2cf203d86a4
def get_basename_root(path): 'Return the extensionless basename of the file described by ``path``.\n\n Example:\n name = get_basename_root("/path/to/file.json")\n name == "file" # true\n\n :param path: A file path.\n :type path: str\n :returns: str\n\n ' (name, _) = os.path.splitext(os.path.basename(path)) return name
Return the extensionless basename of the file described by ``path``. Example: name = get_basename_root("/path/to/file.json") name == "file" # true :param path: A file path. :type path: str :returns: str
integration_tests/steps/compilation.py
get_basename_root
Grindizer/ptolemy
1
python
def get_basename_root(path): 'Return the extensionless basename of the file described by ``path``.\n\n Example:\n name = get_basename_root("/path/to/file.json")\n name == "file" # true\n\n :param path: A file path.\n :type path: str\n :returns: str\n\n ' (name, _) = os.path.splitext(os.path.basename(path)) return name
def get_basename_root(path): 'Return the extensionless basename of the file described by ``path``.\n\n Example:\n name = get_basename_root("/path/to/file.json")\n name == "file" # true\n\n :param path: A file path.\n :type path: str\n :returns: str\n\n ' (name, _) = os.path.splitext(os.path.basename(path)) return name<|docstring|>Return the extensionless basename of the file described by ``path``. Example: name = get_basename_root("/path/to/file.json") name == "file" # true :param path: A file path. :type path: str :returns: str<|endoftext|>
80907e49651d10629c51df6c63e9f0a208a22f7494928695a9699da955d658a7
def __init__(self, networkSecurityGroupId=None, networkSecurityGroupName=None, description=None, vpcId=None, securityGroupRules=None, createdTime=None): '\n :param networkSecurityGroupId: (Optional) 安全组ID\n :param networkSecurityGroupName: (Optional) 安全组名称\n :param description: (Optional) 安全组描述信息\n :param vpcId: (Optional) 安全组所在vpc的Id\n :param securityGroupRules: (Optional) 安全组规则信息\n :param createdTime: (Optional) 安全组创建时间\n ' self.networkSecurityGroupId = networkSecurityGroupId self.networkSecurityGroupName = networkSecurityGroupName self.description = description self.vpcId = vpcId self.securityGroupRules = securityGroupRules self.createdTime = createdTime
:param networkSecurityGroupId: (Optional) 安全组ID :param networkSecurityGroupName: (Optional) 安全组名称 :param description: (Optional) 安全组描述信息 :param vpcId: (Optional) 安全组所在vpc的Id :param securityGroupRules: (Optional) 安全组规则信息 :param createdTime: (Optional) 安全组创建时间
jdcloud_sdk/services/vpc/models/NetworkSecurityGroup.py
__init__
lidaobing/jdcloud-sdk-python
0
python
def __init__(self, networkSecurityGroupId=None, networkSecurityGroupName=None, description=None, vpcId=None, securityGroupRules=None, createdTime=None): '\n :param networkSecurityGroupId: (Optional) 安全组ID\n :param networkSecurityGroupName: (Optional) 安全组名称\n :param description: (Optional) 安全组描述信息\n :param vpcId: (Optional) 安全组所在vpc的Id\n :param securityGroupRules: (Optional) 安全组规则信息\n :param createdTime: (Optional) 安全组创建时间\n ' self.networkSecurityGroupId = networkSecurityGroupId self.networkSecurityGroupName = networkSecurityGroupName self.description = description self.vpcId = vpcId self.securityGroupRules = securityGroupRules self.createdTime = createdTime
def __init__(self, networkSecurityGroupId=None, networkSecurityGroupName=None, description=None, vpcId=None, securityGroupRules=None, createdTime=None): '\n :param networkSecurityGroupId: (Optional) 安全组ID\n :param networkSecurityGroupName: (Optional) 安全组名称\n :param description: (Optional) 安全组描述信息\n :param vpcId: (Optional) 安全组所在vpc的Id\n :param securityGroupRules: (Optional) 安全组规则信息\n :param createdTime: (Optional) 安全组创建时间\n ' self.networkSecurityGroupId = networkSecurityGroupId self.networkSecurityGroupName = networkSecurityGroupName self.description = description self.vpcId = vpcId self.securityGroupRules = securityGroupRules self.createdTime = createdTime<|docstring|>:param networkSecurityGroupId: (Optional) 安全组ID :param networkSecurityGroupName: (Optional) 安全组名称 :param description: (Optional) 安全组描述信息 :param vpcId: (Optional) 安全组所在vpc的Id :param securityGroupRules: (Optional) 安全组规则信息 :param createdTime: (Optional) 安全组创建时间<|endoftext|>
c72910a17eca28910f634d4599538017d05319539d0c66efe1d13109b813df50
@patch('hc.api.management.commands.sendalerts.Command.handle_many') def test_it_notifies_when_check_run_too_often(self, mock): '\n Tests that a check is not too often i.e.\n it should not be run before the time left\n before its timeout period expires is\n less than or equal to its grace period\n ' self.client.get(('/ping/%s/' % self.check.code)) check = Check.objects.filter(name='Test 1').first() self.assertEqual(check.runs_too_often, True) result = Command().handle_many() assert result, True
Tests that a check is not too often i.e. it should not be run before the time left before its timeout period expires is less than or equal to its grace period
hc/api/tests/test_sendalerts_running_often.py
test_it_notifies_when_check_run_too_often
andela/hc-aces-kla-
0
python
@patch('hc.api.management.commands.sendalerts.Command.handle_many') def test_it_notifies_when_check_run_too_often(self, mock): '\n Tests that a check is not too often i.e.\n it should not be run before the time left\n before its timeout period expires is\n less than or equal to its grace period\n ' self.client.get(('/ping/%s/' % self.check.code)) check = Check.objects.filter(name='Test 1').first() self.assertEqual(check.runs_too_often, True) result = Command().handle_many() assert result, True
@patch('hc.api.management.commands.sendalerts.Command.handle_many') def test_it_notifies_when_check_run_too_often(self, mock): '\n Tests that a check is not too often i.e.\n it should not be run before the time left\n before its timeout period expires is\n less than or equal to its grace period\n ' self.client.get(('/ping/%s/' % self.check.code)) check = Check.objects.filter(name='Test 1').first() self.assertEqual(check.runs_too_often, True) result = Command().handle_many() assert result, True<|docstring|>Tests that a check is not too often i.e. it should not be run before the time left before its timeout period expires is less than or equal to its grace period<|endoftext|>
e4d01a37cbceccacc854c124e34bb45019dbbf40a78e6a796815bcb788f4dd83
def _notify(self, to_return): '\n if hasattr(to_return, "_observers") and hasattr(self, "_observers"):\n Subject._notify()\n else:\n print("Error! No observers found")\n ' if (to_return is not None): if hasattr(self, '_observers'): to_return._observers = self._observers Subject._notify(self) else: Subject._notify(self)
if hasattr(to_return, "_observers") and hasattr(self, "_observers"): Subject._notify() else: print("Error! No observers found")
Py3DViewer/utils/ObservableArray.py
_notify
alexus98/py3DViewer
24
python
def _notify(self, to_return): '\n if hasattr(to_return, "_observers") and hasattr(self, "_observers"):\n Subject._notify()\n else:\n print("Error! No observers found")\n ' if (to_return is not None): if hasattr(self, '_observers'): to_return._observers = self._observers Subject._notify(self) else: Subject._notify(self)
def _notify(self, to_return): '\n if hasattr(to_return, "_observers") and hasattr(self, "_observers"):\n Subject._notify()\n else:\n print("Error! No observers found")\n ' if (to_return is not None): if hasattr(self, '_observers'): to_return._observers = self._observers Subject._notify(self) else: Subject._notify(self)<|docstring|>if hasattr(to_return, "_observers") and hasattr(self, "_observers"): Subject._notify() else: print("Error! No observers found")<|endoftext|>
cedb5b7046e55b30067f4f0a42e5eb3fc01261927fc1216f4d200bf6f38f5b6a
def to_json(self): 'Take all model attributes and render them as JSON.' return {'id': self.id, 'title': self.title, 'author': self.author, 'isbn': self.isbn, 'pub_date': (self.pub_date.strftime('%m/%d/%Y') if self.pub_date else None)}
Take all model attributes and render them as JSON.
book_api/models/book.py
to_json
musflood/zonar-book-api
0
python
def to_json(self): return {'id': self.id, 'title': self.title, 'author': self.author, 'isbn': self.isbn, 'pub_date': (self.pub_date.strftime('%m/%d/%Y') if self.pub_date else None)}
def to_json(self): return {'id': self.id, 'title': self.title, 'author': self.author, 'isbn': self.isbn, 'pub_date': (self.pub_date.strftime('%m/%d/%Y') if self.pub_date else None)}<|docstring|>Take all model attributes and render them as JSON.<|endoftext|>
854a0ed7ee9e0d06cd7f807bd6e468bcfcd0586de3ba4c49c231b76b3c51e93f
def decode_binary_rle(data): '\n decodes binary rle to integer list rle\n ' m = len(data) cnts = ([0] * m) h = 0 p = 0 while (p < m): x = 0 k = 0 more = 1 while (more > 0): c = (ord(data[p]) - 48) x |= ((c & 31) << (5 * k)) more = (c & 32) p = (p + 1) k = (k + 1) if ((more == 0) and ((c & 16) != 0)): x |= ((- 1) << (5 * k)) if (h > 2): x += cnts[(h - 2)] cnts[h] = x h += 1 return cnts[0:h]
decodes binary rle to integer list rle
darwin/importer/formats/coco.py
decode_binary_rle
sachasamama/darwin-py
28
python
def decode_binary_rle(data): '\n \n ' m = len(data) cnts = ([0] * m) h = 0 p = 0 while (p < m): x = 0 k = 0 more = 1 while (more > 0): c = (ord(data[p]) - 48) x |= ((c & 31) << (5 * k)) more = (c & 32) p = (p + 1) k = (k + 1) if ((more == 0) and ((c & 16) != 0)): x |= ((- 1) << (5 * k)) if (h > 2): x += cnts[(h - 2)] cnts[h] = x h += 1 return cnts[0:h]
def decode_binary_rle(data): '\n \n ' m = len(data) cnts = ([0] * m) h = 0 p = 0 while (p < m): x = 0 k = 0 more = 1 while (more > 0): c = (ord(data[p]) - 48) x |= ((c & 31) << (5 * k)) more = (c & 32) p = (p + 1) k = (k + 1) if ((more == 0) and ((c & 16) != 0)): x |= ((- 1) << (5 * k)) if (h > 2): x += cnts[(h - 2)] cnts[h] = x h += 1 return cnts[0:h]<|docstring|>decodes binary rle to integer list rle<|endoftext|>
547e05a5d58fba96c04103700620e689b7ee1952d4daa8e48b353eec675a6f13
def _apply_some_settings(self): '\n Applies all settings that can be applied without re-opening the ide\n :return: None\n ' self._apply_tab_length_to_all_open_editors()
Applies all settings that can be applied without re-opening the ide :return: None
ide/SettingsDialog.py
_apply_some_settings
bookofproofs/fpl
4
python
def _apply_some_settings(self): '\n Applies all settings that can be applied without re-opening the ide\n :return: None\n ' self._apply_tab_length_to_all_open_editors()
def _apply_some_settings(self): '\n Applies all settings that can be applied without re-opening the ide\n :return: None\n ' self._apply_tab_length_to_all_open_editors()<|docstring|>Applies all settings that can be applied without re-opening the ide :return: None<|endoftext|>
ddc689a529894cdb8da6dfce7adcbab914f30b5c2a6656f714230546ab7395d9
def cluster_diffs(concepts, data, graph_location, file_length_map, occurrence_matrix, file_index_map, times, edges_kept=None, use_file_dist=True, use_call_distance=True, use_data=True, use_namespace=True, use_change_coupling=True): "\n :param concepts: The number of concepts we wish to segment\n :param data: The initial diff-regions segmentation, each it's own group\n :param graph_location: The location of the dot file representing the deltaPDG of the file\n :param file_length_map: A map between filename and file line count\n :param occurrence_matrix: The matrix mapping commits to files and vice versa\n :param file_index_map: The map between filenames and occurrence_matrix indices\n :return: The proposed clustering of diff_regions\n " deltaPDG = obj_dict_to_networkx(read_graph_from_dot(graph_location)) if (edges_kept is not None): deltaPDG = remove_all_except(deltaPDG, edges_kept) context = get_context_from_nxgraph(deltaPDG) voters = [(file_distance(file_length_map) if use_file_dist else None), (call_graph_distance(deltaPDG, context) if use_call_distance else None), (data_dependency(deltaPDG) if use_data else None), (namespace_distance(deltaPDG, context) if use_namespace else None), (change_coupling(occurrence_matrix, file_index_map) if use_change_coupling else None)] voters = [v for v in voters if (v is not None)] n = len(data) t0 = time.process_time() for i in range(times): (affinity, args) = generate_empty_affinity(n, voters) with ThreadPool(processes=min((os.cpu_count() - 1), 6)) as wp: for (k, value) in wp.imap_unordered((lambda i: (i[1], i[0](data[i[(- 1)][0]], data[i[(- 1)][1]]))), args): affinity[k] += value labels = cluster_from_voter_affinity(affinity, concepts) t1 = time.process_time() time_ = ((t1 - t0) / times) return (labels, time_)
:param concepts: The number of concepts we wish to segment :param data: The initial diff-regions segmentation, each it's own group :param graph_location: The location of the dot file representing the deltaPDG of the file :param file_length_map: A map between filename and file line count :param occurrence_matrix: The matrix mapping commits to files and vice versa :param file_index_map: The map between filenames and occurrence_matrix indices :return: The proposed clustering of diff_regions
confidence_voters/confidence_voters.py
cluster_diffs
PPPI/Flexeme
3
python
def cluster_diffs(concepts, data, graph_location, file_length_map, occurrence_matrix, file_index_map, times, edges_kept=None, use_file_dist=True, use_call_distance=True, use_data=True, use_namespace=True, use_change_coupling=True): "\n :param concepts: The number of concepts we wish to segment\n :param data: The initial diff-regions segmentation, each it's own group\n :param graph_location: The location of the dot file representing the deltaPDG of the file\n :param file_length_map: A map between filename and file line count\n :param occurrence_matrix: The matrix mapping commits to files and vice versa\n :param file_index_map: The map between filenames and occurrence_matrix indices\n :return: The proposed clustering of diff_regions\n " deltaPDG = obj_dict_to_networkx(read_graph_from_dot(graph_location)) if (edges_kept is not None): deltaPDG = remove_all_except(deltaPDG, edges_kept) context = get_context_from_nxgraph(deltaPDG) voters = [(file_distance(file_length_map) if use_file_dist else None), (call_graph_distance(deltaPDG, context) if use_call_distance else None), (data_dependency(deltaPDG) if use_data else None), (namespace_distance(deltaPDG, context) if use_namespace else None), (change_coupling(occurrence_matrix, file_index_map) if use_change_coupling else None)] voters = [v for v in voters if (v is not None)] n = len(data) t0 = time.process_time() for i in range(times): (affinity, args) = generate_empty_affinity(n, voters) with ThreadPool(processes=min((os.cpu_count() - 1), 6)) as wp: for (k, value) in wp.imap_unordered((lambda i: (i[1], i[0](data[i[(- 1)][0]], data[i[(- 1)][1]]))), args): affinity[k] += value labels = cluster_from_voter_affinity(affinity, concepts) t1 = time.process_time() time_ = ((t1 - t0) / times) return (labels, time_)
def cluster_diffs(concepts, data, graph_location, file_length_map, occurrence_matrix, file_index_map, times, edges_kept=None, use_file_dist=True, use_call_distance=True, use_data=True, use_namespace=True, use_change_coupling=True): "\n :param concepts: The number of concepts we wish to segment\n :param data: The initial diff-regions segmentation, each it's own group\n :param graph_location: The location of the dot file representing the deltaPDG of the file\n :param file_length_map: A map between filename and file line count\n :param occurrence_matrix: The matrix mapping commits to files and vice versa\n :param file_index_map: The map between filenames and occurrence_matrix indices\n :return: The proposed clustering of diff_regions\n " deltaPDG = obj_dict_to_networkx(read_graph_from_dot(graph_location)) if (edges_kept is not None): deltaPDG = remove_all_except(deltaPDG, edges_kept) context = get_context_from_nxgraph(deltaPDG) voters = [(file_distance(file_length_map) if use_file_dist else None), (call_graph_distance(deltaPDG, context) if use_call_distance else None), (data_dependency(deltaPDG) if use_data else None), (namespace_distance(deltaPDG, context) if use_namespace else None), (change_coupling(occurrence_matrix, file_index_map) if use_change_coupling else None)] voters = [v for v in voters if (v is not None)] n = len(data) t0 = time.process_time() for i in range(times): (affinity, args) = generate_empty_affinity(n, voters) with ThreadPool(processes=min((os.cpu_count() - 1), 6)) as wp: for (k, value) in wp.imap_unordered((lambda i: (i[1], i[0](data[i[(- 1)][0]], data[i[(- 1)][1]]))), args): affinity[k] += value labels = cluster_from_voter_affinity(affinity, concepts) t1 = time.process_time() time_ = ((t1 - t0) / times) return (labels, time_)<|docstring|>:param concepts: The number of concepts we wish to segment :param data: The initial diff-regions segmentation, each it's own group :param graph_location: The location of the dot file representing the deltaPDG of the file :param file_length_map: A map between filename and file line count :param occurrence_matrix: The matrix mapping commits to files and vice versa :param file_index_map: The map between filenames and occurrence_matrix indices :return: The proposed clustering of diff_regions<|endoftext|>
2b20e432207c763e3258096bf92924509ae8a6e197a256e401e2d4f8d8395627
def calc_added_turbulence(self, dw_ijlk, cw_ijlk, D_src_il, ct_ilk, TI_ilk, **_): ' Calculate the added turbulence intensity at locations specified by\n downstream distances (dw_jl) and crosswind distances (cw_jl)\n caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk).\n\n Returns\n -------\n TI_eff_ijlk: array:float\n Effective turbulence intensity [-]\n ' with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', 'divide by zero encountered in true_divide') np.warnings.filterwarnings('ignore', 'invalid value encountered in true_divide') TI_add_ijlk = (1 / (1.5 + ((0.8 * (dw_ijlk / D_src_il[(:, na, :, na)])) / np.sqrt(ct_ilk)[(:, na)]))) TI_add_ijlk[np.isnan(TI_add_ijlk)] = 0 weights_ijlk = self.weight(dw_ijlk, cw_ijlk, D_src_il) TI_add_ijlk = (weights_ijlk * (np.hypot(TI_add_ijlk, TI_ilk[(:, na)]) - TI_ilk[(:, na)])) return TI_add_ijlk
Calculate the added turbulence intensity at locations specified by downstream distances (dw_jl) and crosswind distances (cw_jl) caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk). Returns ------- TI_eff_ijlk: array:float Effective turbulence intensity [-]
py_wake/turbulence_models/stf.py
calc_added_turbulence
hmharley/PyWake
0
python
def calc_added_turbulence(self, dw_ijlk, cw_ijlk, D_src_il, ct_ilk, TI_ilk, **_): ' Calculate the added turbulence intensity at locations specified by\n downstream distances (dw_jl) and crosswind distances (cw_jl)\n caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk).\n\n Returns\n -------\n TI_eff_ijlk: array:float\n Effective turbulence intensity [-]\n ' with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', 'divide by zero encountered in true_divide') np.warnings.filterwarnings('ignore', 'invalid value encountered in true_divide') TI_add_ijlk = (1 / (1.5 + ((0.8 * (dw_ijlk / D_src_il[(:, na, :, na)])) / np.sqrt(ct_ilk)[(:, na)]))) TI_add_ijlk[np.isnan(TI_add_ijlk)] = 0 weights_ijlk = self.weight(dw_ijlk, cw_ijlk, D_src_il) TI_add_ijlk = (weights_ijlk * (np.hypot(TI_add_ijlk, TI_ilk[(:, na)]) - TI_ilk[(:, na)])) return TI_add_ijlk
def calc_added_turbulence(self, dw_ijlk, cw_ijlk, D_src_il, ct_ilk, TI_ilk, **_): ' Calculate the added turbulence intensity at locations specified by\n downstream distances (dw_jl) and crosswind distances (cw_jl)\n caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk).\n\n Returns\n -------\n TI_eff_ijlk: array:float\n Effective turbulence intensity [-]\n ' with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', 'divide by zero encountered in true_divide') np.warnings.filterwarnings('ignore', 'invalid value encountered in true_divide') TI_add_ijlk = (1 / (1.5 + ((0.8 * (dw_ijlk / D_src_il[(:, na, :, na)])) / np.sqrt(ct_ilk)[(:, na)]))) TI_add_ijlk[np.isnan(TI_add_ijlk)] = 0 weights_ijlk = self.weight(dw_ijlk, cw_ijlk, D_src_il) TI_add_ijlk = (weights_ijlk * (np.hypot(TI_add_ijlk, TI_ilk[(:, na)]) - TI_ilk[(:, na)])) return TI_add_ijlk<|docstring|>Calculate the added turbulence intensity at locations specified by downstream distances (dw_jl) and crosswind distances (cw_jl) caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk). Returns ------- TI_eff_ijlk: array:float Effective turbulence intensity [-]<|endoftext|>
78309a837be4ed3bd1478664c3758d4857fb1a57af5f1469b9e22167938370ed
def calc_added_turbulence(self, dw_ijlk, cw_ijlk, D_src_il, WS_ilk, TI_ilk, **_): ' Calculate the added turbulence intensity at locations specified by\n downstream distances (dw_jl) and crosswind distances (cw_jl)\n caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk).\n\n Returns\n -------\n TI_eff_jlk: array:float\n Effective turbulence intensity [-]\n ' TI_maxadd_ijlk = (0.9 / (1.5 + ((0.3 * (dw_ijlk / D_src_il[(:, na, :, na)])) * np.sqrt(WS_ilk)[(:, na)]))) weights_ijlk = self.weight(dw_ijlk, cw_ijlk, D_src_il) TI_add_ijlk = (weights_ijlk * (np.hypot(TI_maxadd_ijlk, TI_ilk[(:, na)]) - TI_ilk[(:, na)])) return TI_add_ijlk
Calculate the added turbulence intensity at locations specified by downstream distances (dw_jl) and crosswind distances (cw_jl) caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk). Returns ------- TI_eff_jlk: array:float Effective turbulence intensity [-]
py_wake/turbulence_models/stf.py
calc_added_turbulence
hmharley/PyWake
0
python
def calc_added_turbulence(self, dw_ijlk, cw_ijlk, D_src_il, WS_ilk, TI_ilk, **_): ' Calculate the added turbulence intensity at locations specified by\n downstream distances (dw_jl) and crosswind distances (cw_jl)\n caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk).\n\n Returns\n -------\n TI_eff_jlk: array:float\n Effective turbulence intensity [-]\n ' TI_maxadd_ijlk = (0.9 / (1.5 + ((0.3 * (dw_ijlk / D_src_il[(:, na, :, na)])) * np.sqrt(WS_ilk)[(:, na)]))) weights_ijlk = self.weight(dw_ijlk, cw_ijlk, D_src_il) TI_add_ijlk = (weights_ijlk * (np.hypot(TI_maxadd_ijlk, TI_ilk[(:, na)]) - TI_ilk[(:, na)])) return TI_add_ijlk
def calc_added_turbulence(self, dw_ijlk, cw_ijlk, D_src_il, WS_ilk, TI_ilk, **_): ' Calculate the added turbulence intensity at locations specified by\n downstream distances (dw_jl) and crosswind distances (cw_jl)\n caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk).\n\n Returns\n -------\n TI_eff_jlk: array:float\n Effective turbulence intensity [-]\n ' TI_maxadd_ijlk = (0.9 / (1.5 + ((0.3 * (dw_ijlk / D_src_il[(:, na, :, na)])) * np.sqrt(WS_ilk)[(:, na)]))) weights_ijlk = self.weight(dw_ijlk, cw_ijlk, D_src_il) TI_add_ijlk = (weights_ijlk * (np.hypot(TI_maxadd_ijlk, TI_ilk[(:, na)]) - TI_ilk[(:, na)])) return TI_add_ijlk<|docstring|>Calculate the added turbulence intensity at locations specified by downstream distances (dw_jl) and crosswind distances (cw_jl) caused by the wake of a turbine (diameter: D_src_l, thrust coefficient: Ct_lk). Returns ------- TI_eff_jlk: array:float Effective turbulence intensity [-]<|endoftext|>