nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
NervanaSystems/neon
8c3fb8a93b4a89303467b25817c60536542d08bd
neon/data/imagecaption.py
python
Flickr8k.__init__
(self, path='.', max_images=-1)
[]
def __init__(self, path='.', max_images=-1): url = 'https://s3-us-west-1.amazonaws.com/neon-stockdatasets/image-caption' super(Flickr8k, self).__init__('flickr8k.zip', url, 49165563, path=path) self.max_images = max_images
[ "def", "__init__", "(", "self", ",", "path", "=", "'.'", ",", "max_images", "=", "-", "1", ")", ":", "url", "=", "'https://s3-us-west-1.amazonaws.com/neon-stockdatasets/image-caption'", "super", "(", "Flickr8k", ",", "self", ")", ".", "__init__", "(", "'flickr8k.zip'", ",", "url", ",", "49165563", ",", "path", "=", "path", ")", "self", ".", "max_images", "=", "max_images" ]
https://github.com/NervanaSystems/neon/blob/8c3fb8a93b4a89303467b25817c60536542d08bd/neon/data/imagecaption.py#L378-L384
circuits/circuits
a1c404174835bc94bacb4b090280999d90f14e3b
circuits/net/events.py
python
close.__init__
(self, *args)
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
[ "x", ".", "__init__", "(", "...", ")", "initializes", "x", ";", "see", "x", ".", "__class__", ".", "__doc__", "for", "signature" ]
def __init__(self, *args): "x.__init__(...) initializes x; see x.__class__.__doc__ for signature" super(close, self).__init__(*args)
[ "def", "__init__", "(", "self", ",", "*", "args", ")", ":", "super", "(", "close", ",", "self", ")", ".", "__init__", "(", "*", "args", ")" ]
https://github.com/circuits/circuits/blob/a1c404174835bc94bacb4b090280999d90f14e3b/circuits/net/events.py#L204-L207
fake-name/ReadableWebProxy
ed5c7abe38706acc2684a1e6cd80242a03c5f010
WebMirror/management/rss_parser_funcs/feed_parse_extractOtomeshishizaWordpressCom.py
python
extractOtomeshishizaWordpressCom
(item)
return False
Parser for 'otomeshishiza.wordpress.com'
Parser for 'otomeshishiza.wordpress.com'
[ "Parser", "for", "otomeshishiza", ".", "wordpress", ".", "com" ]
def extractOtomeshishizaWordpressCom(item): ''' Parser for 'otomeshishiza.wordpress.com' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None if item['title'].startswith("Protected: "): return None tagmap = [ ('Love You 59 Seconds', '[Online Gaming] Love You 59 Seconds', 'translated'), ('The Pregnant Woman Next Door How Are You Doing?', 'The Pregnant Woman Next Door, How Are You Doing?', 'translated'), ('Ever Since I Take Home An Adonis Who Has Lost His Business', 'Ever Since I Take Home An Adonis Who Has Lost His Business', 'translated'), ('The Love Story of A Passerby', 'The Love Story of A Passerby', 'translated'), ('The Paternity Guard', 'The Paternity Guard', 'translated'), ('Reincarnation of a Superstar', 'Reincarnation of a Superstar', 'translated'), ('Friends With Benefits', 'Friends With Benefits', 'translated'), ('YDXJ10', 'Yandai Xie Jie No. 10', 'translated'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
[ "def", "extractOtomeshishizaWordpressCom", "(", "item", ")", ":", "vol", ",", "chp", ",", "frag", ",", "postfix", "=", "extractVolChapterFragmentPostfix", "(", "item", "[", "'title'", "]", ")", "if", "not", "(", "chp", "or", "vol", ")", "or", "\"preview\"", "in", "item", "[", "'title'", "]", ".", "lower", "(", ")", ":", "return", "None", "if", "item", "[", "'title'", "]", ".", "startswith", "(", "\"Protected: \"", ")", ":", "return", "None", "tagmap", "=", "[", "(", "'Love You 59 Seconds'", ",", "'[Online Gaming] Love You 59 Seconds'", ",", "'translated'", ")", ",", "(", "'The Pregnant Woman Next Door How Are You Doing?'", ",", "'The Pregnant Woman Next Door, How Are You Doing?'", ",", "'translated'", ")", ",", "(", "'Ever Since I Take Home An Adonis Who Has Lost His Business'", ",", "'Ever Since I Take Home An Adonis Who Has Lost His Business'", ",", "'translated'", ")", ",", "(", "'The Love Story of A Passerby'", ",", "'The Love Story of A Passerby'", ",", "'translated'", ")", ",", "(", "'The Paternity Guard'", ",", "'The Paternity Guard'", ",", "'translated'", ")", ",", "(", "'Reincarnation of a Superstar'", ",", "'Reincarnation of a Superstar'", ",", "'translated'", ")", ",", "(", "'Friends With Benefits'", ",", "'Friends With Benefits'", ",", "'translated'", ")", ",", "(", "'YDXJ10'", ",", "'Yandai Xie Jie No. 10'", ",", "'translated'", ")", ",", "]", "for", "tagname", ",", "name", ",", "tl_type", "in", "tagmap", ":", "if", "tagname", "in", "item", "[", "'tags'", "]", ":", "return", "buildReleaseMessageWithType", "(", "item", ",", "name", ",", "vol", ",", "chp", ",", "frag", "=", "frag", ",", "postfix", "=", "postfix", ",", "tl_type", "=", "tl_type", ")", "return", "False" ]
https://github.com/fake-name/ReadableWebProxy/blob/ed5c7abe38706acc2684a1e6cd80242a03c5f010/WebMirror/management/rss_parser_funcs/feed_parse_extractOtomeshishizaWordpressCom.py#L1-L28
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/CPython/27/Lib/distutils/cmd.py
python
Command.copy_tree
(self, infile, outfile, preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1)
return dir_util.copy_tree( infile, outfile, preserve_mode,preserve_times,preserve_symlinks, not self.force, dry_run=self.dry_run)
Copy an entire directory tree respecting verbose, dry-run, and force flags.
Copy an entire directory tree respecting verbose, dry-run, and force flags.
[ "Copy", "an", "entire", "directory", "tree", "respecting", "verbose", "dry", "-", "run", "and", "force", "flags", "." ]
def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1): """Copy an entire directory tree respecting verbose, dry-run, and force flags. """ return dir_util.copy_tree( infile, outfile, preserve_mode,preserve_times,preserve_symlinks, not self.force, dry_run=self.dry_run)
[ "def", "copy_tree", "(", "self", ",", "infile", ",", "outfile", ",", "preserve_mode", "=", "1", ",", "preserve_times", "=", "1", ",", "preserve_symlinks", "=", "0", ",", "level", "=", "1", ")", ":", "return", "dir_util", ".", "copy_tree", "(", "infile", ",", "outfile", ",", "preserve_mode", ",", "preserve_times", ",", "preserve_symlinks", ",", "not", "self", ".", "force", ",", "dry_run", "=", "self", ".", "dry_run", ")" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/distutils/cmd.py#L367-L377
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/idlelib/query.py
python
SectionName.entry_ok
(self)
return name
Return sensible ConfigParser section name or None.
Return sensible ConfigParser section name or None.
[ "Return", "sensible", "ConfigParser", "section", "name", "or", "None", "." ]
def entry_ok(self): "Return sensible ConfigParser section name or None." name = self.entry.get().strip() if not name: self.showerror('no name specified.') return None elif len(name)>30: self.showerror('name is longer than 30 characters.') return None elif name in self.used_names: self.showerror('name is already in use.') return None return name
[ "def", "entry_ok", "(", "self", ")", ":", "name", "=", "self", ".", "entry", ".", "get", "(", ")", ".", "strip", "(", ")", "if", "not", "name", ":", "self", ".", "showerror", "(", "'no name specified.'", ")", "return", "None", "elif", "len", "(", "name", ")", ">", "30", ":", "self", ".", "showerror", "(", "'name is longer than 30 characters.'", ")", "return", "None", "elif", "name", "in", "self", ".", "used_names", ":", "self", ".", "showerror", "(", "'name is already in use.'", ")", "return", "None", "return", "name" ]
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/idlelib/query.py#L176-L188
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/idlelib/UndoDelegator.py
python
InsertCommand.__init__
(self, index1, chars, tags=None)
[]
def __init__(self, index1, chars, tags=None): Command.__init__(self, index1, None, chars, tags)
[ "def", "__init__", "(", "self", ",", "index1", ",", "chars", ",", "tags", "=", "None", ")", ":", "Command", ".", "__init__", "(", "self", ",", "index1", ",", "None", ",", "chars", ",", "tags", ")" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/idlelib/UndoDelegator.py#L210-L211
lspvic/jupyter_tensorboard
329cb3f27569a0751e28552ed5bad28916f28cfe
jupyter_tensorboard/application.py
python
ToggleJupyterTensorboardApp.start
(self)
Perform the App's actions as configured.
Perform the App's actions as configured.
[ "Perform", "the", "App", "s", "actions", "as", "configured", "." ]
def start(self): """Perform the App's actions as configured.""" if self.extra_args: sys.exit('{} takes no extra arguments'.format(self.name)) else: if self._toggle_value: nbextensions.install_nbextension_python( _pkg_name, overwrite=True, symlink=False, user=self.user, sys_prefix=self.sys_prefix, prefix=None, nbextensions_dir=None, logger=None) else: nbextensions.uninstall_nbextension_python( _pkg_name, user=self.user, sys_prefix=self.sys_prefix, prefix=None, nbextensions_dir=None, logger=None) self.toggle_nbextension_python(_pkg_name) self.toggle_server_extension_python(_pkg_name)
[ "def", "start", "(", "self", ")", ":", "if", "self", ".", "extra_args", ":", "sys", ".", "exit", "(", "'{} takes no extra arguments'", ".", "format", "(", "self", ".", "name", ")", ")", "else", ":", "if", "self", ".", "_toggle_value", ":", "nbextensions", ".", "install_nbextension_python", "(", "_pkg_name", ",", "overwrite", "=", "True", ",", "symlink", "=", "False", ",", "user", "=", "self", ".", "user", ",", "sys_prefix", "=", "self", ".", "sys_prefix", ",", "prefix", "=", "None", ",", "nbextensions_dir", "=", "None", ",", "logger", "=", "None", ")", "else", ":", "nbextensions", ".", "uninstall_nbextension_python", "(", "_pkg_name", ",", "user", "=", "self", ".", "user", ",", "sys_prefix", "=", "self", ".", "sys_prefix", ",", "prefix", "=", "None", ",", "nbextensions_dir", "=", "None", ",", "logger", "=", "None", ")", "self", ".", "toggle_nbextension_python", "(", "_pkg_name", ")", "self", ".", "toggle_server_extension_python", "(", "_pkg_name", ")" ]
https://github.com/lspvic/jupyter_tensorboard/blob/329cb3f27569a0751e28552ed5bad28916f28cfe/jupyter_tensorboard/application.py#L53-L70
researchmm/tasn
5dba8ccc096cedc63913730eeea14a9647911129
tasn-mxnet/python/mxnet/autograd.py
python
set_recording
(is_recording)
return bool(prev.value)
Set status to recording/not recording. When recording, graph will be constructed for gradient computation. Parameters ---------- is_recording: bool Returns ------- previous state before this set.
Set status to recording/not recording. When recording, graph will be constructed for gradient computation.
[ "Set", "status", "to", "recording", "/", "not", "recording", ".", "When", "recording", "graph", "will", "be", "constructed", "for", "gradient", "computation", "." ]
def set_recording(is_recording): #pylint: disable=redefined-outer-name """Set status to recording/not recording. When recording, graph will be constructed for gradient computation. Parameters ---------- is_recording: bool Returns ------- previous state before this set. """ prev = ctypes.c_int() check_call(_LIB.MXAutogradSetIsRecording( ctypes.c_int(is_recording), ctypes.byref(prev))) return bool(prev.value)
[ "def", "set_recording", "(", "is_recording", ")", ":", "#pylint: disable=redefined-outer-name", "prev", "=", "ctypes", ".", "c_int", "(", ")", "check_call", "(", "_LIB", ".", "MXAutogradSetIsRecording", "(", "ctypes", ".", "c_int", "(", "is_recording", ")", ",", "ctypes", ".", "byref", "(", "prev", ")", ")", ")", "return", "bool", "(", "prev", ".", "value", ")" ]
https://github.com/researchmm/tasn/blob/5dba8ccc096cedc63913730eeea14a9647911129/tasn-mxnet/python/mxnet/autograd.py#L35-L50
stanfordnlp/stanza
e44d1c88340e33bf9813e6f5a6bd24387eefc4b2
stanza/models/common/doc.py
python
Sentence.build_ents
(self)
return self.ents
Build the list of entities by iterating over all tokens. Return all entities as a list. Note that unlike other attributes, since NER requires raw text, the actual tagging are always performed at and attached to the `Token`s, instead of `Word`s.
Build the list of entities by iterating over all tokens. Return all entities as a list.
[ "Build", "the", "list", "of", "entities", "by", "iterating", "over", "all", "tokens", ".", "Return", "all", "entities", "as", "a", "list", "." ]
def build_ents(self): """ Build the list of entities by iterating over all tokens. Return all entities as a list. Note that unlike other attributes, since NER requires raw text, the actual tagging are always performed at and attached to the `Token`s, instead of `Word`s. """ self.ents = [] tags = [w.ner for w in self.tokens] decoded = decode_from_bioes(tags) for e in decoded: ent_tokens = self.tokens[e['start']:e['end']+1] self.ents.append(Span(tokens=ent_tokens, type=e['type'], doc=self.doc, sent=self)) return self.ents
[ "def", "build_ents", "(", "self", ")", ":", "self", ".", "ents", "=", "[", "]", "tags", "=", "[", "w", ".", "ner", "for", "w", "in", "self", ".", "tokens", "]", "decoded", "=", "decode_from_bioes", "(", "tags", ")", "for", "e", "in", "decoded", ":", "ent_tokens", "=", "self", ".", "tokens", "[", "e", "[", "'start'", "]", ":", "e", "[", "'end'", "]", "+", "1", "]", "self", ".", "ents", ".", "append", "(", "Span", "(", "tokens", "=", "ent_tokens", ",", "type", "=", "e", "[", "'type'", "]", ",", "doc", "=", "self", ".", "doc", ",", "sent", "=", "self", ")", ")", "return", "self", ".", "ents" ]
https://github.com/stanfordnlp/stanza/blob/e44d1c88340e33bf9813e6f5a6bd24387eefc4b2/stanza/models/common/doc.py#L498-L510
MIC-DKFZ/nnUNet
cd9cb633e14e5d27aec43ea21c958449437c99b2
nnunet/inference/predict.py
python
predict_cases
(model, list_of_lists, output_filenames, folds, save_npz, num_threads_preprocessing, num_threads_nifti_save, segs_from_prev_stage=None, do_tta=True, mixed_precision=True, overwrite_existing=False, all_in_gpu=False, step_size=0.5, checkpoint_name="model_final_checkpoint", segmentation_export_kwargs: dict = None, disable_postprocessing: bool = False)
:param segmentation_export_kwargs: :param model: folder where the model is saved, must contain fold_x subfolders :param list_of_lists: [[case0_0000.nii.gz, case0_0001.nii.gz], [case1_0000.nii.gz, case1_0001.nii.gz], ...] :param output_filenames: [output_file_case0.nii.gz, output_file_case1.nii.gz, ...] :param folds: default: (0, 1, 2, 3, 4) (but can also be 'all' or a subset of the five folds, for example use (0, ) for using only fold_0 :param save_npz: default: False :param num_threads_preprocessing: :param num_threads_nifti_save: :param segs_from_prev_stage: :param do_tta: default: True, can be set to False for a 8x speedup at the cost of a reduced segmentation quality :param overwrite_existing: default: True :param mixed_precision: if None then we take no action. If True/False we overwrite what the model has in its init :return:
:param segmentation_export_kwargs: :param model: folder where the model is saved, must contain fold_x subfolders :param list_of_lists: [[case0_0000.nii.gz, case0_0001.nii.gz], [case1_0000.nii.gz, case1_0001.nii.gz], ...] :param output_filenames: [output_file_case0.nii.gz, output_file_case1.nii.gz, ...] :param folds: default: (0, 1, 2, 3, 4) (but can also be 'all' or a subset of the five folds, for example use (0, ) for using only fold_0 :param save_npz: default: False :param num_threads_preprocessing: :param num_threads_nifti_save: :param segs_from_prev_stage: :param do_tta: default: True, can be set to False for a 8x speedup at the cost of a reduced segmentation quality :param overwrite_existing: default: True :param mixed_precision: if None then we take no action. If True/False we overwrite what the model has in its init :return:
[ ":", "param", "segmentation_export_kwargs", ":", ":", "param", "model", ":", "folder", "where", "the", "model", "is", "saved", "must", "contain", "fold_x", "subfolders", ":", "param", "list_of_lists", ":", "[[", "case0_0000", ".", "nii", ".", "gz", "case0_0001", ".", "nii", ".", "gz", "]", "[", "case1_0000", ".", "nii", ".", "gz", "case1_0001", ".", "nii", ".", "gz", "]", "...", "]", ":", "param", "output_filenames", ":", "[", "output_file_case0", ".", "nii", ".", "gz", "output_file_case1", ".", "nii", ".", "gz", "...", "]", ":", "param", "folds", ":", "default", ":", "(", "0", "1", "2", "3", "4", ")", "(", "but", "can", "also", "be", "all", "or", "a", "subset", "of", "the", "five", "folds", "for", "example", "use", "(", "0", ")", "for", "using", "only", "fold_0", ":", "param", "save_npz", ":", "default", ":", "False", ":", "param", "num_threads_preprocessing", ":", ":", "param", "num_threads_nifti_save", ":", ":", "param", "segs_from_prev_stage", ":", ":", "param", "do_tta", ":", "default", ":", "True", "can", "be", "set", "to", "False", "for", "a", "8x", "speedup", "at", "the", "cost", "of", "a", "reduced", "segmentation", "quality", ":", "param", "overwrite_existing", ":", "default", ":", "True", ":", "param", "mixed_precision", ":", "if", "None", "then", "we", "take", "no", "action", ".", "If", "True", "/", "False", "we", "overwrite", "what", "the", "model", "has", "in", "its", "init", ":", "return", ":" ]
def predict_cases(model, list_of_lists, output_filenames, folds, save_npz, num_threads_preprocessing, num_threads_nifti_save, segs_from_prev_stage=None, do_tta=True, mixed_precision=True, overwrite_existing=False, all_in_gpu=False, step_size=0.5, checkpoint_name="model_final_checkpoint", segmentation_export_kwargs: dict = None, disable_postprocessing: bool = False): """ :param segmentation_export_kwargs: :param model: folder where the model is saved, must contain fold_x subfolders :param list_of_lists: [[case0_0000.nii.gz, case0_0001.nii.gz], [case1_0000.nii.gz, case1_0001.nii.gz], ...] :param output_filenames: [output_file_case0.nii.gz, output_file_case1.nii.gz, ...] :param folds: default: (0, 1, 2, 3, 4) (but can also be 'all' or a subset of the five folds, for example use (0, ) for using only fold_0 :param save_npz: default: False :param num_threads_preprocessing: :param num_threads_nifti_save: :param segs_from_prev_stage: :param do_tta: default: True, can be set to False for a 8x speedup at the cost of a reduced segmentation quality :param overwrite_existing: default: True :param mixed_precision: if None then we take no action. If True/False we overwrite what the model has in its init :return: """ assert len(list_of_lists) == len(output_filenames) if segs_from_prev_stage is not None: assert len(segs_from_prev_stage) == len(output_filenames) pool = Pool(num_threads_nifti_save) results = [] cleaned_output_files = [] for o in output_filenames: dr, f = os.path.split(o) if len(dr) > 0: maybe_mkdir_p(dr) if not f.endswith(".nii.gz"): f, _ = os.path.splitext(f) f = f + ".nii.gz" cleaned_output_files.append(join(dr, f)) if not overwrite_existing: print("number of cases:", len(list_of_lists)) # if save_npz=True then we should also check for missing npz files not_done_idx = [i for i, j in enumerate(cleaned_output_files) if (not isfile(j)) or (save_npz and not isfile(j[:-7] + '.npz'))] cleaned_output_files = [cleaned_output_files[i] for i in not_done_idx] list_of_lists = [list_of_lists[i] for i in not_done_idx] if segs_from_prev_stage is not None: segs_from_prev_stage = [segs_from_prev_stage[i] for i in not_done_idx] print("number of cases that still need to be predicted:", len(cleaned_output_files)) print("emptying cuda cache") torch.cuda.empty_cache() print("loading parameters for folds,", folds) trainer, params = load_model_and_checkpoint_files(model, folds, mixed_precision=mixed_precision, checkpoint_name=checkpoint_name) if segmentation_export_kwargs is None: if 'segmentation_export_params' in trainer.plans.keys(): force_separate_z = trainer.plans['segmentation_export_params']['force_separate_z'] interpolation_order = trainer.plans['segmentation_export_params']['interpolation_order'] interpolation_order_z = trainer.plans['segmentation_export_params']['interpolation_order_z'] else: force_separate_z = None interpolation_order = 1 interpolation_order_z = 0 else: force_separate_z = segmentation_export_kwargs['force_separate_z'] interpolation_order = segmentation_export_kwargs['interpolation_order'] interpolation_order_z = segmentation_export_kwargs['interpolation_order_z'] print("starting preprocessing generator") preprocessing = preprocess_multithreaded(trainer, list_of_lists, cleaned_output_files, num_threads_preprocessing, segs_from_prev_stage) print("starting prediction...") all_output_files = [] for preprocessed in preprocessing: output_filename, (d, dct) = preprocessed all_output_files.append(all_output_files) if isinstance(d, str): data = np.load(d) os.remove(d) d = data print("predicting", output_filename) trainer.load_checkpoint_ram(params[0], False) softmax = trainer.predict_preprocessed_data_return_seg_and_softmax( d, do_mirroring=do_tta, mirror_axes=trainer.data_aug_params['mirror_axes'], use_sliding_window=True, step_size=step_size, use_gaussian=True, all_in_gpu=all_in_gpu, mixed_precision=mixed_precision)[1] for p in params[1:]: trainer.load_checkpoint_ram(p, False) softmax += trainer.predict_preprocessed_data_return_seg_and_softmax( d, do_mirroring=do_tta, mirror_axes=trainer.data_aug_params['mirror_axes'], use_sliding_window=True, step_size=step_size, use_gaussian=True, all_in_gpu=all_in_gpu, mixed_precision=mixed_precision)[1] if len(params) > 1: softmax /= len(params) transpose_forward = trainer.plans.get('transpose_forward') if transpose_forward is not None: transpose_backward = trainer.plans.get('transpose_backward') softmax = softmax.transpose([0] + [i + 1 for i in transpose_backward]) if save_npz: npz_file = output_filename[:-7] + ".npz" else: npz_file = None if hasattr(trainer, 'regions_class_order'): region_class_order = trainer.regions_class_order else: region_class_order = None """There is a problem with python process communication that prevents us from communicating objects larger than 2 GB between processes (basically when the length of the pickle string that will be sent is communicated by the multiprocessing.Pipe object then the placeholder (I think) does not allow for long enough strings (lol). This could be fixed by changing i to l (for long) but that would require manually patching system python code. We circumvent that problem here by saving softmax_pred to a npy file that will then be read (and finally deleted) by the Process. save_segmentation_nifti_from_softmax can take either filename or np.ndarray and will handle this automatically""" bytes_per_voxel = 4 if all_in_gpu: bytes_per_voxel = 2 # if all_in_gpu then the return value is half (float16) if np.prod(softmax.shape) > (2e9 / bytes_per_voxel * 0.85): # * 0.85 just to be save print( "This output is too large for python process-process communication. Saving output temporarily to disk") np.save(output_filename[:-7] + ".npy", softmax) softmax = output_filename[:-7] + ".npy" results.append(pool.starmap_async(save_segmentation_nifti_from_softmax, ((softmax, output_filename, dct, interpolation_order, region_class_order, None, None, npz_file, None, force_separate_z, interpolation_order_z),) )) print("inference done. Now waiting for the segmentation export to finish...") _ = [i.get() for i in results] # now apply postprocessing # first load the postprocessing properties if they are present. Else raise a well visible warning if not disable_postprocessing: results = [] pp_file = join(model, "postprocessing.json") if isfile(pp_file): print("postprocessing...") shutil.copy(pp_file, os.path.abspath(os.path.dirname(output_filenames[0]))) # for_which_classes stores for which of the classes everything but the largest connected component needs to be # removed for_which_classes, min_valid_obj_size = load_postprocessing(pp_file) results.append(pool.starmap_async(load_remove_save, zip(output_filenames, output_filenames, [for_which_classes] * len(output_filenames), [min_valid_obj_size] * len(output_filenames)))) _ = [i.get() for i in results] else: print("WARNING! Cannot run postprocessing because the postprocessing file is missing. Make sure to run " "consolidate_folds in the output folder of the model first!\nThe folder you need to run this in is " "%s" % model) pool.close() pool.join()
[ "def", "predict_cases", "(", "model", ",", "list_of_lists", ",", "output_filenames", ",", "folds", ",", "save_npz", ",", "num_threads_preprocessing", ",", "num_threads_nifti_save", ",", "segs_from_prev_stage", "=", "None", ",", "do_tta", "=", "True", ",", "mixed_precision", "=", "True", ",", "overwrite_existing", "=", "False", ",", "all_in_gpu", "=", "False", ",", "step_size", "=", "0.5", ",", "checkpoint_name", "=", "\"model_final_checkpoint\"", ",", "segmentation_export_kwargs", ":", "dict", "=", "None", ",", "disable_postprocessing", ":", "bool", "=", "False", ")", ":", "assert", "len", "(", "list_of_lists", ")", "==", "len", "(", "output_filenames", ")", "if", "segs_from_prev_stage", "is", "not", "None", ":", "assert", "len", "(", "segs_from_prev_stage", ")", "==", "len", "(", "output_filenames", ")", "pool", "=", "Pool", "(", "num_threads_nifti_save", ")", "results", "=", "[", "]", "cleaned_output_files", "=", "[", "]", "for", "o", "in", "output_filenames", ":", "dr", ",", "f", "=", "os", ".", "path", ".", "split", "(", "o", ")", "if", "len", "(", "dr", ")", ">", "0", ":", "maybe_mkdir_p", "(", "dr", ")", "if", "not", "f", ".", "endswith", "(", "\".nii.gz\"", ")", ":", "f", ",", "_", "=", "os", ".", "path", ".", "splitext", "(", "f", ")", "f", "=", "f", "+", "\".nii.gz\"", "cleaned_output_files", ".", "append", "(", "join", "(", "dr", ",", "f", ")", ")", "if", "not", "overwrite_existing", ":", "print", "(", "\"number of cases:\"", ",", "len", "(", "list_of_lists", ")", ")", "# if save_npz=True then we should also check for missing npz files", "not_done_idx", "=", "[", "i", "for", "i", ",", "j", "in", "enumerate", "(", "cleaned_output_files", ")", "if", "(", "not", "isfile", "(", "j", ")", ")", "or", "(", "save_npz", "and", "not", "isfile", "(", "j", "[", ":", "-", "7", "]", "+", "'.npz'", ")", ")", "]", "cleaned_output_files", "=", "[", "cleaned_output_files", "[", "i", "]", "for", "i", "in", "not_done_idx", "]", "list_of_lists", "=", "[", "list_of_lists", "[", "i", "]", "for", "i", "in", "not_done_idx", "]", "if", "segs_from_prev_stage", "is", "not", "None", ":", "segs_from_prev_stage", "=", "[", "segs_from_prev_stage", "[", "i", "]", "for", "i", "in", "not_done_idx", "]", "print", "(", "\"number of cases that still need to be predicted:\"", ",", "len", "(", "cleaned_output_files", ")", ")", "print", "(", "\"emptying cuda cache\"", ")", "torch", ".", "cuda", ".", "empty_cache", "(", ")", "print", "(", "\"loading parameters for folds,\"", ",", "folds", ")", "trainer", ",", "params", "=", "load_model_and_checkpoint_files", "(", "model", ",", "folds", ",", "mixed_precision", "=", "mixed_precision", ",", "checkpoint_name", "=", "checkpoint_name", ")", "if", "segmentation_export_kwargs", "is", "None", ":", "if", "'segmentation_export_params'", "in", "trainer", ".", "plans", ".", "keys", "(", ")", ":", "force_separate_z", "=", "trainer", ".", "plans", "[", "'segmentation_export_params'", "]", "[", "'force_separate_z'", "]", "interpolation_order", "=", "trainer", ".", "plans", "[", "'segmentation_export_params'", "]", "[", "'interpolation_order'", "]", "interpolation_order_z", "=", "trainer", ".", "plans", "[", "'segmentation_export_params'", "]", "[", "'interpolation_order_z'", "]", "else", ":", "force_separate_z", "=", "None", "interpolation_order", "=", "1", "interpolation_order_z", "=", "0", "else", ":", "force_separate_z", "=", "segmentation_export_kwargs", "[", "'force_separate_z'", "]", "interpolation_order", "=", "segmentation_export_kwargs", "[", "'interpolation_order'", "]", "interpolation_order_z", "=", "segmentation_export_kwargs", "[", "'interpolation_order_z'", "]", "print", "(", "\"starting preprocessing generator\"", ")", "preprocessing", "=", "preprocess_multithreaded", "(", "trainer", ",", "list_of_lists", ",", "cleaned_output_files", ",", "num_threads_preprocessing", ",", "segs_from_prev_stage", ")", "print", "(", "\"starting prediction...\"", ")", "all_output_files", "=", "[", "]", "for", "preprocessed", "in", "preprocessing", ":", "output_filename", ",", "(", "d", ",", "dct", ")", "=", "preprocessed", "all_output_files", ".", "append", "(", "all_output_files", ")", "if", "isinstance", "(", "d", ",", "str", ")", ":", "data", "=", "np", ".", "load", "(", "d", ")", "os", ".", "remove", "(", "d", ")", "d", "=", "data", "print", "(", "\"predicting\"", ",", "output_filename", ")", "trainer", ".", "load_checkpoint_ram", "(", "params", "[", "0", "]", ",", "False", ")", "softmax", "=", "trainer", ".", "predict_preprocessed_data_return_seg_and_softmax", "(", "d", ",", "do_mirroring", "=", "do_tta", ",", "mirror_axes", "=", "trainer", ".", "data_aug_params", "[", "'mirror_axes'", "]", ",", "use_sliding_window", "=", "True", ",", "step_size", "=", "step_size", ",", "use_gaussian", "=", "True", ",", "all_in_gpu", "=", "all_in_gpu", ",", "mixed_precision", "=", "mixed_precision", ")", "[", "1", "]", "for", "p", "in", "params", "[", "1", ":", "]", ":", "trainer", ".", "load_checkpoint_ram", "(", "p", ",", "False", ")", "softmax", "+=", "trainer", ".", "predict_preprocessed_data_return_seg_and_softmax", "(", "d", ",", "do_mirroring", "=", "do_tta", ",", "mirror_axes", "=", "trainer", ".", "data_aug_params", "[", "'mirror_axes'", "]", ",", "use_sliding_window", "=", "True", ",", "step_size", "=", "step_size", ",", "use_gaussian", "=", "True", ",", "all_in_gpu", "=", "all_in_gpu", ",", "mixed_precision", "=", "mixed_precision", ")", "[", "1", "]", "if", "len", "(", "params", ")", ">", "1", ":", "softmax", "/=", "len", "(", "params", ")", "transpose_forward", "=", "trainer", ".", "plans", ".", "get", "(", "'transpose_forward'", ")", "if", "transpose_forward", "is", "not", "None", ":", "transpose_backward", "=", "trainer", ".", "plans", ".", "get", "(", "'transpose_backward'", ")", "softmax", "=", "softmax", ".", "transpose", "(", "[", "0", "]", "+", "[", "i", "+", "1", "for", "i", "in", "transpose_backward", "]", ")", "if", "save_npz", ":", "npz_file", "=", "output_filename", "[", ":", "-", "7", "]", "+", "\".npz\"", "else", ":", "npz_file", "=", "None", "if", "hasattr", "(", "trainer", ",", "'regions_class_order'", ")", ":", "region_class_order", "=", "trainer", ".", "regions_class_order", "else", ":", "region_class_order", "=", "None", "\"\"\"There is a problem with python process communication that prevents us from communicating objects \n larger than 2 GB between processes (basically when the length of the pickle string that will be sent is \n communicated by the multiprocessing.Pipe object then the placeholder (I think) does not allow for long \n enough strings (lol). This could be fixed by changing i to l (for long) but that would require manually \n patching system python code. We circumvent that problem here by saving softmax_pred to a npy file that will \n then be read (and finally deleted) by the Process. save_segmentation_nifti_from_softmax can take either \n filename or np.ndarray and will handle this automatically\"\"\"", "bytes_per_voxel", "=", "4", "if", "all_in_gpu", ":", "bytes_per_voxel", "=", "2", "# if all_in_gpu then the return value is half (float16)", "if", "np", ".", "prod", "(", "softmax", ".", "shape", ")", ">", "(", "2e9", "/", "bytes_per_voxel", "*", "0.85", ")", ":", "# * 0.85 just to be save", "print", "(", "\"This output is too large for python process-process communication. Saving output temporarily to disk\"", ")", "np", ".", "save", "(", "output_filename", "[", ":", "-", "7", "]", "+", "\".npy\"", ",", "softmax", ")", "softmax", "=", "output_filename", "[", ":", "-", "7", "]", "+", "\".npy\"", "results", ".", "append", "(", "pool", ".", "starmap_async", "(", "save_segmentation_nifti_from_softmax", ",", "(", "(", "softmax", ",", "output_filename", ",", "dct", ",", "interpolation_order", ",", "region_class_order", ",", "None", ",", "None", ",", "npz_file", ",", "None", ",", "force_separate_z", ",", "interpolation_order_z", ")", ",", ")", ")", ")", "print", "(", "\"inference done. Now waiting for the segmentation export to finish...\"", ")", "_", "=", "[", "i", ".", "get", "(", ")", "for", "i", "in", "results", "]", "# now apply postprocessing", "# first load the postprocessing properties if they are present. Else raise a well visible warning", "if", "not", "disable_postprocessing", ":", "results", "=", "[", "]", "pp_file", "=", "join", "(", "model", ",", "\"postprocessing.json\"", ")", "if", "isfile", "(", "pp_file", ")", ":", "print", "(", "\"postprocessing...\"", ")", "shutil", ".", "copy", "(", "pp_file", ",", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "output_filenames", "[", "0", "]", ")", ")", ")", "# for_which_classes stores for which of the classes everything but the largest connected component needs to be", "# removed", "for_which_classes", ",", "min_valid_obj_size", "=", "load_postprocessing", "(", "pp_file", ")", "results", ".", "append", "(", "pool", ".", "starmap_async", "(", "load_remove_save", ",", "zip", "(", "output_filenames", ",", "output_filenames", ",", "[", "for_which_classes", "]", "*", "len", "(", "output_filenames", ")", ",", "[", "min_valid_obj_size", "]", "*", "len", "(", "output_filenames", ")", ")", ")", ")", "_", "=", "[", "i", ".", "get", "(", ")", "for", "i", "in", "results", "]", "else", ":", "print", "(", "\"WARNING! Cannot run postprocessing because the postprocessing file is missing. Make sure to run \"", "\"consolidate_folds in the output folder of the model first!\\nThe folder you need to run this in is \"", "\"%s\"", "%", "model", ")", "pool", ".", "close", "(", ")", "pool", ".", "join", "(", ")" ]
https://github.com/MIC-DKFZ/nnUNet/blob/cd9cb633e14e5d27aec43ea21c958449437c99b2/nnunet/inference/predict.py#L131-L292
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/gettext.py
python
_as_int
(n)
return n
[]
def _as_int(n): try: i = round(n) except TypeError: raise TypeError('Plural value must be an integer, got %s' % (n.__class__.__name__,)) from None return n
[ "def", "_as_int", "(", "n", ")", ":", "try", ":", "i", "=", "round", "(", "n", ")", "except", "TypeError", ":", "raise", "TypeError", "(", "'Plural value must be an integer, got %s'", "%", "(", "n", ".", "__class__", ".", "__name__", ",", ")", ")", "from", "None", "return", "n" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/gettext.py#L161-L167
F8LEFT/DecLLVM
d38e45e3d0dd35634adae1d0cf7f96f3bd96e74c
python/idc.py
python
GetFirstBmask
(enum_id)
return idaapi.get_first_bmask(enum_id)
Get first bitmask in the enum (bitfield) @param enum_id: id of enum (bitfield) @return: the smallest bitmask of constant or -1 no bitmasks are defined yet All bitmasks are sorted by their values as unsigned longs.
Get first bitmask in the enum (bitfield)
[ "Get", "first", "bitmask", "in", "the", "enum", "(", "bitfield", ")" ]
def GetFirstBmask(enum_id): """ Get first bitmask in the enum (bitfield) @param enum_id: id of enum (bitfield) @return: the smallest bitmask of constant or -1 no bitmasks are defined yet All bitmasks are sorted by their values as unsigned longs. """ return idaapi.get_first_bmask(enum_id)
[ "def", "GetFirstBmask", "(", "enum_id", ")", ":", "return", "idaapi", ".", "get_first_bmask", "(", "enum_id", ")" ]
https://github.com/F8LEFT/DecLLVM/blob/d38e45e3d0dd35634adae1d0cf7f96f3bd96e74c/python/idc.py#L5906-L5917
evilhero/mylar
dbee01d7e48e8c717afa01b2de1946c5d0b956cb
lib/js2py/legecy_translators/nodevisitor.py
python
transform_crap
(code)
return transform_crap(code[:beg]+formula+code[end:])
Transforms this ?: crap into if else python syntax
Transforms this ?: crap into if else python syntax
[ "Transforms", "this", "?", ":", "crap", "into", "if", "else", "python", "syntax" ]
def transform_crap(code): #needs some more tests """Transforms this ?: crap into if else python syntax""" ind = code.rfind('?') if ind==-1: return code sep = code.find(':', ind) if sep==-1: raise SyntaxError('Invalid ?: syntax (probably missing ":" )') beg = max(code.rfind(':', 0, ind), code.find('?', 0, ind))+1 end = code.find(':',sep+1) end = len(code) if end==-1 else end formula = '('+code[ind+1:sep]+' if '+code[beg:ind]+' else '+code[sep+1:end]+')' return transform_crap(code[:beg]+formula+code[end:])
[ "def", "transform_crap", "(", "code", ")", ":", "#needs some more tests", "ind", "=", "code", ".", "rfind", "(", "'?'", ")", "if", "ind", "==", "-", "1", ":", "return", "code", "sep", "=", "code", ".", "find", "(", "':'", ",", "ind", ")", "if", "sep", "==", "-", "1", ":", "raise", "SyntaxError", "(", "'Invalid ?: syntax (probably missing \":\" )'", ")", "beg", "=", "max", "(", "code", ".", "rfind", "(", "':'", ",", "0", ",", "ind", ")", ",", "code", ".", "find", "(", "'?'", ",", "0", ",", "ind", ")", ")", "+", "1", "end", "=", "code", ".", "find", "(", "':'", ",", "sep", "+", "1", ")", "end", "=", "len", "(", "code", ")", "if", "end", "==", "-", "1", "else", "end", "formula", "=", "'('", "+", "code", "[", "ind", "+", "1", ":", "sep", "]", "+", "' if '", "+", "code", "[", "beg", ":", "ind", "]", "+", "' else '", "+", "code", "[", "sep", "+", "1", ":", "end", "]", "+", "')'", "return", "transform_crap", "(", "code", "[", ":", "beg", "]", "+", "formula", "+", "code", "[", "end", ":", "]", ")" ]
https://github.com/evilhero/mylar/blob/dbee01d7e48e8c717afa01b2de1946c5d0b956cb/lib/js2py/legecy_translators/nodevisitor.py#L445-L457
1012598167/flask_mongodb_game
60c7e0351586656ec38f851592886338e50b4110
python_flask/venv/Lib/site-packages/werkzeug/datastructures.py
python
ContentRange.unset
(self)
Sets the units to `None` which indicates that the header should no longer be used.
Sets the units to `None` which indicates that the header should no longer be used.
[ "Sets", "the", "units", "to", "None", "which", "indicates", "that", "the", "header", "should", "no", "longer", "be", "used", "." ]
def unset(self): """Sets the units to `None` which indicates that the header should no longer be used. """ self.set(None, None, units=None)
[ "def", "unset", "(", "self", ")", ":", "self", ".", "set", "(", "None", ",", "None", ",", "units", "=", "None", ")" ]
https://github.com/1012598167/flask_mongodb_game/blob/60c7e0351586656ec38f851592886338e50b4110/python_flask/venv/Lib/site-packages/werkzeug/datastructures.py#L2423-L2427
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/cpdp/v20190820/cpdp_client.py
python
CpdpClient.QueryShopOpenId
(self, request)
云支付-获取门店OpenId接口 :param request: Request instance for QueryShopOpenId. :type request: :class:`tencentcloud.cpdp.v20190820.models.QueryShopOpenIdRequest` :rtype: :class:`tencentcloud.cpdp.v20190820.models.QueryShopOpenIdResponse`
云支付-获取门店OpenId接口
[ "云支付", "-", "获取门店OpenId接口" ]
def QueryShopOpenId(self, request): """云支付-获取门店OpenId接口 :param request: Request instance for QueryShopOpenId. :type request: :class:`tencentcloud.cpdp.v20190820.models.QueryShopOpenIdRequest` :rtype: :class:`tencentcloud.cpdp.v20190820.models.QueryShopOpenIdResponse` """ try: params = request._serialize() body = self.call("QueryShopOpenId", params) response = json.loads(body) if "Error" not in response["Response"]: model = models.QueryShopOpenIdResponse() model._deserialize(response["Response"]) return model else: code = response["Response"]["Error"]["Code"] message = response["Response"]["Error"]["Message"] reqid = response["Response"]["RequestId"] raise TencentCloudSDKException(code, message, reqid) except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(e.message, e.message)
[ "def", "QueryShopOpenId", "(", "self", ",", "request", ")", ":", "try", ":", "params", "=", "request", ".", "_serialize", "(", ")", "body", "=", "self", ".", "call", "(", "\"QueryShopOpenId\"", ",", "params", ")", "response", "=", "json", ".", "loads", "(", "body", ")", "if", "\"Error\"", "not", "in", "response", "[", "\"Response\"", "]", ":", "model", "=", "models", ".", "QueryShopOpenIdResponse", "(", ")", "model", ".", "_deserialize", "(", "response", "[", "\"Response\"", "]", ")", "return", "model", "else", ":", "code", "=", "response", "[", "\"Response\"", "]", "[", "\"Error\"", "]", "[", "\"Code\"", "]", "message", "=", "response", "[", "\"Response\"", "]", "[", "\"Error\"", "]", "[", "\"Message\"", "]", "reqid", "=", "response", "[", "\"Response\"", "]", "[", "\"RequestId\"", "]", "raise", "TencentCloudSDKException", "(", "code", ",", "message", ",", "reqid", ")", "except", "Exception", "as", "e", ":", "if", "isinstance", "(", "e", ",", "TencentCloudSDKException", ")", ":", "raise", "else", ":", "raise", "TencentCloudSDKException", "(", "e", ".", "message", ",", "e", ".", "message", ")" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/cpdp/v20190820/cpdp_client.py#L2782-L2807
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/nx584/binary_sensor.py
python
NX584Watcher.__init__
(self, client, zone_sensors)
Initialize NX584 watcher thread.
Initialize NX584 watcher thread.
[ "Initialize", "NX584", "watcher", "thread", "." ]
def __init__(self, client, zone_sensors): """Initialize NX584 watcher thread.""" super().__init__() self.daemon = True self._client = client self._zone_sensors = zone_sensors
[ "def", "__init__", "(", "self", ",", "client", ",", "zone_sensors", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "daemon", "=", "True", "self", ".", "_client", "=", "client", "self", ".", "_zone_sensors", "=", "zone_sensors" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/nx584/binary_sensor.py#L125-L130
mesalock-linux/mesapy
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
lib-python/2.7/lib-tk/Tkinter.py
python
Text.tag_nextrange
(self, tagName, index1, index2=None)
return self.tk.splitlist(self.tk.call( self._w, 'tag', 'nextrange', tagName, index1, index2))
Return a list of start and end index for the first sequence of characters between INDEX1 and INDEX2 which all have tag TAGNAME. The text is searched forward from INDEX1.
Return a list of start and end index for the first sequence of characters between INDEX1 and INDEX2 which all have tag TAGNAME. The text is searched forward from INDEX1.
[ "Return", "a", "list", "of", "start", "and", "end", "index", "for", "the", "first", "sequence", "of", "characters", "between", "INDEX1", "and", "INDEX2", "which", "all", "have", "tag", "TAGNAME", ".", "The", "text", "is", "searched", "forward", "from", "INDEX1", "." ]
def tag_nextrange(self, tagName, index1, index2=None): """Return a list of start and end index for the first sequence of characters between INDEX1 and INDEX2 which all have tag TAGNAME. The text is searched forward from INDEX1.""" return self.tk.splitlist(self.tk.call( self._w, 'tag', 'nextrange', tagName, index1, index2))
[ "def", "tag_nextrange", "(", "self", ",", "tagName", ",", "index1", ",", "index2", "=", "None", ")", ":", "return", "self", ".", "tk", ".", "splitlist", "(", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'tag'", ",", "'nextrange'", ",", "tagName", ",", "index1", ",", "index2", ")", ")" ]
https://github.com/mesalock-linux/mesapy/blob/ed546d59a21b36feb93e2309d5c6b75aa0ad95c9/lib-python/2.7/lib-tk/Tkinter.py#L3203-L3208
Source-Python-Dev-Team/Source.Python
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
addons/source-python/packages/site-packages/jinja2/compiler.py
python
has_safe_repr
(value)
return False
Does the node have a safe representation?
Does the node have a safe representation?
[ "Does", "the", "node", "have", "a", "safe", "representation?" ]
def has_safe_repr(value): """Does the node have a safe representation?""" if value is None or value is NotImplemented or value is Ellipsis: return True if isinstance(value, (bool, int, float, complex, range_type, Markup) + string_types): return True if isinstance(value, (tuple, list, set, frozenset)): for item in value: if not has_safe_repr(item): return False return True elif isinstance(value, dict): for key, value in iteritems(value): if not has_safe_repr(key): return False if not has_safe_repr(value): return False return True return False
[ "def", "has_safe_repr", "(", "value", ")", ":", "if", "value", "is", "None", "or", "value", "is", "NotImplemented", "or", "value", "is", "Ellipsis", ":", "return", "True", "if", "isinstance", "(", "value", ",", "(", "bool", ",", "int", ",", "float", ",", "complex", ",", "range_type", ",", "Markup", ")", "+", "string_types", ")", ":", "return", "True", "if", "isinstance", "(", "value", ",", "(", "tuple", ",", "list", ",", "set", ",", "frozenset", ")", ")", ":", "for", "item", "in", "value", ":", "if", "not", "has_safe_repr", "(", "item", ")", ":", "return", "False", "return", "True", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "for", "key", ",", "value", "in", "iteritems", "(", "value", ")", ":", "if", "not", "has_safe_repr", "(", "key", ")", ":", "return", "False", "if", "not", "has_safe_repr", "(", "value", ")", ":", "return", "False", "return", "True", "return", "False" ]
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/packages/site-packages/jinja2/compiler.py#L67-L86
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/models/v1_persistent_volume_list.py
python
V1PersistentVolumeList.to_dict
(self)
return result
Returns the model properties as a dict
Returns the model properties as a dict
[ "Returns", "the", "model", "properties", "as", "a", "dict" ]
def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result
[ "def", "to_dict", "(", "self", ")", ":", "result", "=", "{", "}", "for", "attr", ",", "_", "in", "six", ".", "iteritems", "(", "self", ".", "openapi_types", ")", ":", "value", "=", "getattr", "(", "self", ",", "attr", ")", "if", "isinstance", "(", "value", ",", "list", ")", ":", "result", "[", "attr", "]", "=", "list", "(", "map", "(", "lambda", "x", ":", "x", ".", "to_dict", "(", ")", "if", "hasattr", "(", "x", ",", "\"to_dict\"", ")", "else", "x", ",", "value", ")", ")", "elif", "hasattr", "(", "value", ",", "\"to_dict\"", ")", ":", "result", "[", "attr", "]", "=", "value", ".", "to_dict", "(", ")", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "result", "[", "attr", "]", "=", "dict", "(", "map", "(", "lambda", "item", ":", "(", "item", "[", "0", "]", ",", "item", "[", "1", "]", ".", "to_dict", "(", ")", ")", "if", "hasattr", "(", "item", "[", "1", "]", ",", "\"to_dict\"", ")", "else", "item", ",", "value", ".", "items", "(", ")", ")", ")", "else", ":", "result", "[", "attr", "]", "=", "value", "return", "result" ]
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1_persistent_volume_list.py#L161-L183
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_adm_router.py
python
RoleBinding.role_ref
(self)
return self._role_ref
role_ref property
role_ref property
[ "role_ref", "property" ]
def role_ref(self): ''' role_ref property ''' if self._role_ref is None: self._role_ref = self.get_role_ref() return self._role_ref
[ "def", "role_ref", "(", "self", ")", ":", "if", "self", ".", "_role_ref", "is", "None", ":", "self", ".", "_role_ref", "=", "self", ".", "get_role_ref", "(", ")", "return", "self", ".", "_role_ref" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_adm_router.py#L2457-L2461
wandb/client
3963364d8112b7dedb928fa423b6878ea1b467d9
wandb/vendor/pygments/lexers/objective.py
python
objective
(baselexer)
return GeneratedObjectiveCVariant
Generate a subclass of baselexer that accepts the Objective-C syntax extensions.
Generate a subclass of baselexer that accepts the Objective-C syntax extensions.
[ "Generate", "a", "subclass", "of", "baselexer", "that", "accepts", "the", "Objective", "-", "C", "syntax", "extensions", "." ]
def objective(baselexer): """ Generate a subclass of baselexer that accepts the Objective-C syntax extensions. """ # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here, # since that's quite common in ordinary C/C++ files. It's OK to match # JavaDoc/Doxygen keywords that only apply to Objective-C, mind. # # The upshot of this is that we CANNOT match @class or @interface _oc_keywords = re.compile(r'@(?:end|implementation|protocol)') # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : ) # (note the identifier is *optional* when there is a ':'!) _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+' r'(?:[a-zA-Z_]\w*\s*\]|' r'(?:[a-zA-Z_]\w*)?:)') class GeneratedObjectiveCVariant(baselexer): """ Implements Objective-C syntax on top of an existing C family lexer. """ tokens = { 'statements': [ (r'@"', String, 'string'), (r'@(YES|NO)', Number), (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex), (r'@0[0-7]+[Ll]?', Number.Oct), (r'@\d+[Ll]?', Number.Integer), (r'@\(', Literal, 'literal_number'), (r'@\[', Literal, 'literal_array'), (r'@\{', Literal, 'literal_dictionary'), (words(( '@selector', '@private', '@protected', '@public', '@encode', '@synchronized', '@try', '@throw', '@catch', '@finally', '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer', '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong', 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic', 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in', 'out', 'inout', 'release', 'class', '@dynamic', '@optional', '@required', '@autoreleasepool'), suffix=r'\b'), Keyword), (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL', 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'), Keyword.Type), (r'@(true|false|YES|NO)\n', Name.Builtin), (r'(YES|NO|nil|self|super)\b', Name.Builtin), # Carbon types (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type), # Carbon built-ins (r'(TRUE|FALSE)\b', Name.Builtin), (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text), ('#pop', 'oc_classname')), (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text), ('#pop', 'oc_forward_classname')), # @ can also prefix other expressions like @{...} or @(...) (r'@', Punctuation), inherit, ], 'oc_classname': [ # interface definition that inherits ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)', bygroups(Name.Class, Text, Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')), ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', bygroups(Name.Class, Text, Name.Class), '#pop'), # interface definition for a category ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)', bygroups(Name.Class, Text, Name.Label, Text, Punctuation), ('#pop', 'oc_ivars')), ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))', bygroups(Name.Class, Text, Name.Label), '#pop'), # simple interface / implementation ('([a-zA-Z$_][\w$]*)(\s*)(\{)', bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')), ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop') ], 'oc_forward_classname': [ ('([a-zA-Z$_][\w$]*)(\s*,\s*)', bygroups(Name.Class, Text), 'oc_forward_classname'), ('([a-zA-Z$_][\w$]*)(\s*;?)', bygroups(Name.Class, Text), '#pop') ], 'oc_ivars': [ include('whitespace'), include('statements'), (';', Punctuation), (r'\{', Punctuation, '#push'), (r'\}', Punctuation, '#pop'), ], 'root': [ # methods (r'^([-+])(\s*)' # method marker r'(\(.*?\))?(\s*)' # return type r'([a-zA-Z$_][\w$]*:?)', # begin of method name bygroups(Punctuation, Text, using(this), Text, Name.Function), 'method'), inherit, ], 'method': [ include('whitespace'), # TODO unsure if ellipses are allowed elsewhere, see # discussion in Issue 789 (r',', Punctuation), (r'\.\.\.', Punctuation), (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)', bygroups(using(this), Text, Name.Variable)), (r'[a-zA-Z$_][\w$]*:', Name.Function), (';', Punctuation, '#pop'), (r'\{', Punctuation, 'function'), default('#pop'), ], 'literal_number': [ (r'\(', Punctuation, 'literal_number_inner'), (r'\)', Literal, '#pop'), include('statement'), ], 'literal_number_inner': [ (r'\(', Punctuation, '#push'), (r'\)', Punctuation, '#pop'), include('statement'), ], 'literal_array': [ (r'\[', Punctuation, 'literal_array_inner'), (r'\]', Literal, '#pop'), include('statement'), ], 'literal_array_inner': [ (r'\[', Punctuation, '#push'), (r'\]', Punctuation, '#pop'), include('statement'), ], 'literal_dictionary': [ (r'\}', Literal, '#pop'), include('statement'), ], } def analyse_text(text): if _oc_keywords.search(text): return 1.0 elif '@"' in text: # strings return 0.8 elif re.search('@[0-9]+', text): return 0.7 elif _oc_message.search(text): return 0.8 return 0 def get_tokens_unprocessed(self, text): from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \ COCOA_PROTOCOLS, COCOA_PRIMITIVES for index, token, value in \ baselexer.get_tokens_unprocessed(self, text): if token is Name or token is Name.Class: if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \ or value in COCOA_PRIMITIVES: token = Name.Builtin.Pseudo yield index, token, value return GeneratedObjectiveCVariant
[ "def", "objective", "(", "baselexer", ")", ":", "# Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,", "# since that's quite common in ordinary C/C++ files. It's OK to match", "# JavaDoc/Doxygen keywords that only apply to Objective-C, mind.", "#", "# The upshot of this is that we CANNOT match @class or @interface", "_oc_keywords", "=", "re", ".", "compile", "(", "r'@(?:end|implementation|protocol)'", ")", "# Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )", "# (note the identifier is *optional* when there is a ':'!)", "_oc_message", "=", "re", ".", "compile", "(", "r'\\[\\s*[a-zA-Z_]\\w*\\s+'", "r'(?:[a-zA-Z_]\\w*\\s*\\]|'", "r'(?:[a-zA-Z_]\\w*)?:)'", ")", "class", "GeneratedObjectiveCVariant", "(", "baselexer", ")", ":", "\"\"\"\n Implements Objective-C syntax on top of an existing C family lexer.\n \"\"\"", "tokens", "=", "{", "'statements'", ":", "[", "(", "r'@\"'", ",", "String", ",", "'string'", ")", ",", "(", "r'@(YES|NO)'", ",", "Number", ")", ",", "(", "r\"@'(\\\\.|\\\\[0-7]{1,3}|\\\\x[a-fA-F0-9]{1,2}|[^\\\\\\'\\n])'\"", ",", "String", ".", "Char", ")", ",", "(", "r'@(\\d+\\.\\d*|\\.\\d+|\\d+)[eE][+-]?\\d+[lL]?'", ",", "Number", ".", "Float", ")", ",", "(", "r'@(\\d+\\.\\d*|\\.\\d+|\\d+[fF])[fF]?'", ",", "Number", ".", "Float", ")", ",", "(", "r'@0x[0-9a-fA-F]+[Ll]?'", ",", "Number", ".", "Hex", ")", ",", "(", "r'@0[0-7]+[Ll]?'", ",", "Number", ".", "Oct", ")", ",", "(", "r'@\\d+[Ll]?'", ",", "Number", ".", "Integer", ")", ",", "(", "r'@\\('", ",", "Literal", ",", "'literal_number'", ")", ",", "(", "r'@\\['", ",", "Literal", ",", "'literal_array'", ")", ",", "(", "r'@\\{'", ",", "Literal", ",", "'literal_dictionary'", ")", ",", "(", "words", "(", "(", "'@selector'", ",", "'@private'", ",", "'@protected'", ",", "'@public'", ",", "'@encode'", ",", "'@synchronized'", ",", "'@try'", ",", "'@throw'", ",", "'@catch'", ",", "'@finally'", ",", "'@end'", ",", "'@property'", ",", "'@synthesize'", ",", "'__bridge'", ",", "'__bridge_transfer'", ",", "'__autoreleasing'", ",", "'__block'", ",", "'__weak'", ",", "'__strong'", ",", "'weak'", ",", "'strong'", ",", "'copy'", ",", "'retain'", ",", "'assign'", ",", "'unsafe_unretained'", ",", "'atomic'", ",", "'nonatomic'", ",", "'readonly'", ",", "'readwrite'", ",", "'setter'", ",", "'getter'", ",", "'typeof'", ",", "'in'", ",", "'out'", ",", "'inout'", ",", "'release'", ",", "'class'", ",", "'@dynamic'", ",", "'@optional'", ",", "'@required'", ",", "'@autoreleasepool'", ")", ",", "suffix", "=", "r'\\b'", ")", ",", "Keyword", ")", ",", "(", "words", "(", "(", "'id'", ",", "'instancetype'", ",", "'Class'", ",", "'IMP'", ",", "'SEL'", ",", "'BOOL'", ",", "'IBOutlet'", ",", "'IBAction'", ",", "'unichar'", ")", ",", "suffix", "=", "r'\\b'", ")", ",", "Keyword", ".", "Type", ")", ",", "(", "r'@(true|false|YES|NO)\\n'", ",", "Name", ".", "Builtin", ")", ",", "(", "r'(YES|NO|nil|self|super)\\b'", ",", "Name", ".", "Builtin", ")", ",", "# Carbon types", "(", "r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\\b'", ",", "Keyword", ".", "Type", ")", ",", "# Carbon built-ins", "(", "r'(TRUE|FALSE)\\b'", ",", "Name", ".", "Builtin", ")", ",", "(", "r'(@interface|@implementation)(\\s+)'", ",", "bygroups", "(", "Keyword", ",", "Text", ")", ",", "(", "'#pop'", ",", "'oc_classname'", ")", ")", ",", "(", "r'(@class|@protocol)(\\s+)'", ",", "bygroups", "(", "Keyword", ",", "Text", ")", ",", "(", "'#pop'", ",", "'oc_forward_classname'", ")", ")", ",", "# @ can also prefix other expressions like @{...} or @(...)", "(", "r'@'", ",", "Punctuation", ")", ",", "inherit", ",", "]", ",", "'oc_classname'", ":", "[", "# interface definition that inherits", "(", "'([a-zA-Z$_][\\w$]*)(\\s*:\\s*)([a-zA-Z$_][\\w$]*)?(\\s*)(\\{)'", ",", "bygroups", "(", "Name", ".", "Class", ",", "Text", ",", "Name", ".", "Class", ",", "Text", ",", "Punctuation", ")", ",", "(", "'#pop'", ",", "'oc_ivars'", ")", ")", ",", "(", "'([a-zA-Z$_][\\w$]*)(\\s*:\\s*)([a-zA-Z$_][\\w$]*)?'", ",", "bygroups", "(", "Name", ".", "Class", ",", "Text", ",", "Name", ".", "Class", ")", ",", "'#pop'", ")", ",", "# interface definition for a category", "(", "'([a-zA-Z$_][\\w$]*)(\\s*)(\\([a-zA-Z$_][\\w$]*\\))(\\s*)(\\{)'", ",", "bygroups", "(", "Name", ".", "Class", ",", "Text", ",", "Name", ".", "Label", ",", "Text", ",", "Punctuation", ")", ",", "(", "'#pop'", ",", "'oc_ivars'", ")", ")", ",", "(", "'([a-zA-Z$_][\\w$]*)(\\s*)(\\([a-zA-Z$_][\\w$]*\\))'", ",", "bygroups", "(", "Name", ".", "Class", ",", "Text", ",", "Name", ".", "Label", ")", ",", "'#pop'", ")", ",", "# simple interface / implementation", "(", "'([a-zA-Z$_][\\w$]*)(\\s*)(\\{)'", ",", "bygroups", "(", "Name", ".", "Class", ",", "Text", ",", "Punctuation", ")", ",", "(", "'#pop'", ",", "'oc_ivars'", ")", ")", ",", "(", "'([a-zA-Z$_][\\w$]*)'", ",", "Name", ".", "Class", ",", "'#pop'", ")", "]", ",", "'oc_forward_classname'", ":", "[", "(", "'([a-zA-Z$_][\\w$]*)(\\s*,\\s*)'", ",", "bygroups", "(", "Name", ".", "Class", ",", "Text", ")", ",", "'oc_forward_classname'", ")", ",", "(", "'([a-zA-Z$_][\\w$]*)(\\s*;?)'", ",", "bygroups", "(", "Name", ".", "Class", ",", "Text", ")", ",", "'#pop'", ")", "]", ",", "'oc_ivars'", ":", "[", "include", "(", "'whitespace'", ")", ",", "include", "(", "'statements'", ")", ",", "(", "';'", ",", "Punctuation", ")", ",", "(", "r'\\{'", ",", "Punctuation", ",", "'#push'", ")", ",", "(", "r'\\}'", ",", "Punctuation", ",", "'#pop'", ")", ",", "]", ",", "'root'", ":", "[", "# methods", "(", "r'^([-+])(\\s*)'", "# method marker", "r'(\\(.*?\\))?(\\s*)'", "# return type", "r'([a-zA-Z$_][\\w$]*:?)'", ",", "# begin of method name", "bygroups", "(", "Punctuation", ",", "Text", ",", "using", "(", "this", ")", ",", "Text", ",", "Name", ".", "Function", ")", ",", "'method'", ")", ",", "inherit", ",", "]", ",", "'method'", ":", "[", "include", "(", "'whitespace'", ")", ",", "# TODO unsure if ellipses are allowed elsewhere, see", "# discussion in Issue 789", "(", "r','", ",", "Punctuation", ")", ",", "(", "r'\\.\\.\\.'", ",", "Punctuation", ")", ",", "(", "r'(\\(.*?\\))(\\s*)([a-zA-Z$_][\\w$]*)'", ",", "bygroups", "(", "using", "(", "this", ")", ",", "Text", ",", "Name", ".", "Variable", ")", ")", ",", "(", "r'[a-zA-Z$_][\\w$]*:'", ",", "Name", ".", "Function", ")", ",", "(", "';'", ",", "Punctuation", ",", "'#pop'", ")", ",", "(", "r'\\{'", ",", "Punctuation", ",", "'function'", ")", ",", "default", "(", "'#pop'", ")", ",", "]", ",", "'literal_number'", ":", "[", "(", "r'\\('", ",", "Punctuation", ",", "'literal_number_inner'", ")", ",", "(", "r'\\)'", ",", "Literal", ",", "'#pop'", ")", ",", "include", "(", "'statement'", ")", ",", "]", ",", "'literal_number_inner'", ":", "[", "(", "r'\\('", ",", "Punctuation", ",", "'#push'", ")", ",", "(", "r'\\)'", ",", "Punctuation", ",", "'#pop'", ")", ",", "include", "(", "'statement'", ")", ",", "]", ",", "'literal_array'", ":", "[", "(", "r'\\['", ",", "Punctuation", ",", "'literal_array_inner'", ")", ",", "(", "r'\\]'", ",", "Literal", ",", "'#pop'", ")", ",", "include", "(", "'statement'", ")", ",", "]", ",", "'literal_array_inner'", ":", "[", "(", "r'\\['", ",", "Punctuation", ",", "'#push'", ")", ",", "(", "r'\\]'", ",", "Punctuation", ",", "'#pop'", ")", ",", "include", "(", "'statement'", ")", ",", "]", ",", "'literal_dictionary'", ":", "[", "(", "r'\\}'", ",", "Literal", ",", "'#pop'", ")", ",", "include", "(", "'statement'", ")", ",", "]", ",", "}", "def", "analyse_text", "(", "text", ")", ":", "if", "_oc_keywords", ".", "search", "(", "text", ")", ":", "return", "1.0", "elif", "'@\"'", "in", "text", ":", "# strings", "return", "0.8", "elif", "re", ".", "search", "(", "'@[0-9]+'", ",", "text", ")", ":", "return", "0.7", "elif", "_oc_message", ".", "search", "(", "text", ")", ":", "return", "0.8", "return", "0", "def", "get_tokens_unprocessed", "(", "self", ",", "text", ")", ":", "from", "pygments", ".", "lexers", ".", "_cocoa_builtins", "import", "COCOA_INTERFACES", ",", "COCOA_PROTOCOLS", ",", "COCOA_PRIMITIVES", "for", "index", ",", "token", ",", "value", "in", "baselexer", ".", "get_tokens_unprocessed", "(", "self", ",", "text", ")", ":", "if", "token", "is", "Name", "or", "token", "is", "Name", ".", "Class", ":", "if", "value", "in", "COCOA_INTERFACES", "or", "value", "in", "COCOA_PROTOCOLS", "or", "value", "in", "COCOA_PRIMITIVES", ":", "token", "=", "Name", ".", "Builtin", ".", "Pseudo", "yield", "index", ",", "token", ",", "value", "return", "GeneratedObjectiveCVariant" ]
https://github.com/wandb/client/blob/3963364d8112b7dedb928fa423b6878ea1b467d9/wandb/vendor/pygments/lexers/objective.py#L24-L192
OpenNMT/OpenNMT-py
4815f07fcd482af9a1fe1d3b620d144197178bc5
onmt/modules/weight_norm.py
python
WeightNormConvTranspose2d.__init__
(self, in_channels, out_channels, kernel_size, stride=1, padding=0, output_padding=0, groups=1, init_scale=1., polyak_decay=0.9995)
[]
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, output_padding=0, groups=1, init_scale=1., polyak_decay=0.9995): super(WeightNormConvTranspose2d, self).__init__( in_channels, out_channels, kernel_size, stride, padding, output_padding, groups) # in_channels, out_channels, *kernel_size self.V = self.weight self.g = Parameter(torch.Tensor(out_channels)) self.b = self.bias self.register_buffer('V_avg', torch.zeros(self.V.size())) self.register_buffer('g_avg', torch.zeros(out_channels)) self.register_buffer('b_avg', torch.zeros(out_channels)) self.init_scale = init_scale self.polyak_decay = polyak_decay self.reset_parameters()
[ "def", "__init__", "(", "self", ",", "in_channels", ",", "out_channels", ",", "kernel_size", ",", "stride", "=", "1", ",", "padding", "=", "0", ",", "output_padding", "=", "0", ",", "groups", "=", "1", ",", "init_scale", "=", "1.", ",", "polyak_decay", "=", "0.9995", ")", ":", "super", "(", "WeightNormConvTranspose2d", ",", "self", ")", ".", "__init__", "(", "in_channels", ",", "out_channels", ",", "kernel_size", ",", "stride", ",", "padding", ",", "output_padding", ",", "groups", ")", "# in_channels, out_channels, *kernel_size", "self", ".", "V", "=", "self", ".", "weight", "self", ".", "g", "=", "Parameter", "(", "torch", ".", "Tensor", "(", "out_channels", ")", ")", "self", ".", "b", "=", "self", ".", "bias", "self", ".", "register_buffer", "(", "'V_avg'", ",", "torch", ".", "zeros", "(", "self", ".", "V", ".", "size", "(", ")", ")", ")", "self", ".", "register_buffer", "(", "'g_avg'", ",", "torch", ".", "zeros", "(", "out_channels", ")", ")", "self", ".", "register_buffer", "(", "'b_avg'", ",", "torch", ".", "zeros", "(", "out_channels", ")", ")", "self", ".", "init_scale", "=", "init_scale", "self", ".", "polyak_decay", "=", "polyak_decay", "self", ".", "reset_parameters", "(", ")" ]
https://github.com/OpenNMT/OpenNMT-py/blob/4815f07fcd482af9a1fe1d3b620d144197178bc5/onmt/modules/weight_norm.py#L175-L194
trailofbits/manticore
b050fdf0939f6c63f503cdf87ec0ab159dd41159
manticore/native/cpu/aarch64.py
python
Aarch64Cpu._SUBS_immediate
(cpu, res_op, reg_op, imm_op)
SUBS (immediate). :param res_op: destination register. :param reg_op: source register. :param imm_op: immediate.
SUBS (immediate).
[ "SUBS", "(", "immediate", ")", "." ]
def _SUBS_immediate(cpu, res_op, reg_op, imm_op): """ SUBS (immediate). :param res_op: destination register. :param reg_op: source register. :param imm_op: immediate. """ cpu._adds_subs_immediate(res_op, reg_op, imm_op, mnem="subs")
[ "def", "_SUBS_immediate", "(", "cpu", ",", "res_op", ",", "reg_op", ",", "imm_op", ")", ":", "cpu", ".", "_adds_subs_immediate", "(", "res_op", ",", "reg_op", ",", "imm_op", ",", "mnem", "=", "\"subs\"", ")" ]
https://github.com/trailofbits/manticore/blob/b050fdf0939f6c63f503cdf87ec0ab159dd41159/manticore/native/cpu/aarch64.py#L4690-L4698
plotly/plotly.py
cfad7862594b35965c0e000813bd7805e8494a5b
packages/python/plotly/plotly/graph_objs/barpolar/_marker.py
python
Marker.cmin
(self)
return self["cmin"]
Sets the lower bound of the color domain. Has an effect only if in `marker.color`is set to a numerical array. Value should have the same units as in `marker.color` and if set, `marker.cmax` must be set as well. The 'cmin' property is a number and may be specified as: - An int or float Returns ------- int|float
Sets the lower bound of the color domain. Has an effect only if in `marker.color`is set to a numerical array. Value should have the same units as in `marker.color` and if set, `marker.cmax` must be set as well. The 'cmin' property is a number and may be specified as: - An int or float
[ "Sets", "the", "lower", "bound", "of", "the", "color", "domain", ".", "Has", "an", "effect", "only", "if", "in", "marker", ".", "color", "is", "set", "to", "a", "numerical", "array", ".", "Value", "should", "have", "the", "same", "units", "as", "in", "marker", ".", "color", "and", "if", "set", "marker", ".", "cmax", "must", "be", "set", "as", "well", ".", "The", "cmin", "property", "is", "a", "number", "and", "may", "be", "specified", "as", ":", "-", "An", "int", "or", "float" ]
def cmin(self): """ Sets the lower bound of the color domain. Has an effect only if in `marker.color`is set to a numerical array. Value should have the same units as in `marker.color` and if set, `marker.cmax` must be set as well. The 'cmin' property is a number and may be specified as: - An int or float Returns ------- int|float """ return self["cmin"]
[ "def", "cmin", "(", "self", ")", ":", "return", "self", "[", "\"cmin\"", "]" ]
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/barpolar/_marker.py#L131-L145
phantomcyber/playbooks
9e850ecc44cb98c5dde53784744213a1ed5799bd
user_approved_ticket_creation.py
python
set_status_2
(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs)
return
[]
def set_status_2(action=None, success=None, container=None, results=None, handle=None, filtered_artifacts=None, filtered_results=None, custom_function=None, **kwargs): phantom.debug('set_status_2() called') phantom.set_status(container=container, status="closed") return
[ "def", "set_status_2", "(", "action", "=", "None", ",", "success", "=", "None", ",", "container", "=", "None", ",", "results", "=", "None", ",", "handle", "=", "None", ",", "filtered_artifacts", "=", "None", ",", "filtered_results", "=", "None", ",", "custom_function", "=", "None", ",", "*", "*", "kwargs", ")", ":", "phantom", ".", "debug", "(", "'set_status_2() called'", ")", "phantom", ".", "set_status", "(", "container", "=", "container", ",", "status", "=", "\"closed\"", ")", "return" ]
https://github.com/phantomcyber/playbooks/blob/9e850ecc44cb98c5dde53784744213a1ed5799bd/user_approved_ticket_creation.py#L36-L41
opsmop/opsmop
376ca587f8c5f9ca8ed1829909d075c339066034
opsmop/inventory/inventory.py
python
Inventory._process_hosts
(self, data)
Inventory loading has two stages. Loading loose hosts happens before groups so that the data in the groups can win out over any host-specific defaults.
Inventory loading has two stages. Loading loose hosts happens before groups so that the data in the groups can win out over any host-specific defaults.
[ "Inventory", "loading", "has", "two", "stages", ".", "Loading", "loose", "hosts", "happens", "before", "groups", "so", "that", "the", "data", "in", "the", "groups", "can", "win", "out", "over", "any", "host", "-", "specific", "defaults", "." ]
def _process_hosts(self, data): """ Inventory loading has two stages. Loading loose hosts happens before groups so that the data in the groups can win out over any host-specific defaults. """ hosts = data.get('hosts', dict()) # traverse the dictionary of host names, where each value is a variable for (host_name, host_data) in hosts.items(): host_data = self._shlex_parse(host_data) host = self._get_or_create_host(host_name, host_data)
[ "def", "_process_hosts", "(", "self", ",", "data", ")", ":", "hosts", "=", "data", ".", "get", "(", "'hosts'", ",", "dict", "(", ")", ")", "# traverse the dictionary of host names, where each value is a variable", "for", "(", "host_name", ",", "host_data", ")", "in", "hosts", ".", "items", "(", ")", ":", "host_data", "=", "self", ".", "_shlex_parse", "(", "host_data", ")", "host", "=", "self", ".", "_get_or_create_host", "(", "host_name", ",", "host_data", ")" ]
https://github.com/opsmop/opsmop/blob/376ca587f8c5f9ca8ed1829909d075c339066034/opsmop/inventory/inventory.py#L107-L117
LonamiWebs/Telethon
f9643bf7376a5953da2050a5361c9b465f7ee7d9
telethon/tl/custom/button.py
python
Button.force_reply
(single_use=None, selective=None, placeholder=None)
return types.ReplyKeyboardForceReply( single_use=single_use, selective=selective, placeholder=placeholder)
Forces a reply to the message with this markup. If used, no other button should be present or it will be ignored. ``single_use`` and ``selective`` are as documented in `text`. Args: placeholder (str): text to show the user at typing place of message. If the placeholder is too long, Telegram applications will crop the text (for example, to 64 characters and adding an ellipsis (…) character as the 65th).
Forces a reply to the message with this markup. If used, no other button should be present or it will be ignored.
[ "Forces", "a", "reply", "to", "the", "message", "with", "this", "markup", ".", "If", "used", "no", "other", "button", "should", "be", "present", "or", "it", "will", "be", "ignored", "." ]
def force_reply(single_use=None, selective=None, placeholder=None): """ Forces a reply to the message with this markup. If used, no other button should be present or it will be ignored. ``single_use`` and ``selective`` are as documented in `text`. Args: placeholder (str): text to show the user at typing place of message. If the placeholder is too long, Telegram applications will crop the text (for example, to 64 characters and adding an ellipsis (…) character as the 65th). """ return types.ReplyKeyboardForceReply( single_use=single_use, selective=selective, placeholder=placeholder)
[ "def", "force_reply", "(", "single_use", "=", "None", ",", "selective", "=", "None", ",", "placeholder", "=", "None", ")", ":", "return", "types", ".", "ReplyKeyboardForceReply", "(", "single_use", "=", "single_use", ",", "selective", "=", "selective", ",", "placeholder", "=", "placeholder", ")" ]
https://github.com/LonamiWebs/Telethon/blob/f9643bf7376a5953da2050a5361c9b465f7ee7d9/telethon/tl/custom/button.py#L261-L279
pypiserver/pypiserver
bdbd839a1bbd5bdc20088074b4372531dabd5136
bin/ci_helper.py
python
parse_args
()
return parser.parse_args()
Parse cmdline args.
Parse cmdline args.
[ "Parse", "cmdline", "args", "." ]
def parse_args() -> Namespace: """Parse cmdline args.""" parser = ArgumentParser() parser.add_argument( "ref", help=( "The github ref for which CI is running. This may be a full ref " "like refs/tags/v1.2.3 or refs/heads/master, or just a tag/branch " "name like v1.2.3 or master." ), ) parser.add_argument( "action", help=("The action to perform"), choices=("docker_tags", "pypi_release", "has_tags"), ) return parser.parse_args()
[ "def", "parse_args", "(", ")", "->", "Namespace", ":", "parser", "=", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "\"ref\"", ",", "help", "=", "(", "\"The github ref for which CI is running. This may be a full ref \"", "\"like refs/tags/v1.2.3 or refs/heads/master, or just a tag/branch \"", "\"name like v1.2.3 or master.\"", ")", ",", ")", "parser", ".", "add_argument", "(", "\"action\"", ",", "help", "=", "(", "\"The action to perform\"", ")", ",", "choices", "=", "(", "\"docker_tags\"", ",", "\"pypi_release\"", ",", "\"has_tags\"", ")", ",", ")", "return", "parser", ".", "parse_args", "(", ")" ]
https://github.com/pypiserver/pypiserver/blob/bdbd839a1bbd5bdc20088074b4372531dabd5136/bin/ci_helper.py#L15-L31
pjlantz/droidbox
519ddd198ccef2e0d27e12929f25702f6a385d94
droidbox4.1.1/scripts/droidbox.py
python
getTags
(tagParam)
return tagsFound
Retrieve the tag names
Retrieve the tag names
[ "Retrieve", "the", "tag", "names" ]
def getTags(tagParam): """ Retrieve the tag names """ tagsFound = [] for tag in tags.keys(): if tagParam & tag != 0: tagsFound.append(tags[tag]) return tagsFound
[ "def", "getTags", "(", "tagParam", ")", ":", "tagsFound", "=", "[", "]", "for", "tag", "in", "tags", ".", "keys", "(", ")", ":", "if", "tagParam", "&", "tag", "!=", "0", ":", "tagsFound", ".", "append", "(", "tags", "[", "tag", "]", ")", "return", "tagsFound" ]
https://github.com/pjlantz/droidbox/blob/519ddd198ccef2e0d27e12929f25702f6a385d94/droidbox4.1.1/scripts/droidbox.py#L224-L233
replit-archive/empythoned
977ec10ced29a3541a4973dc2b59910805695752
cpython/Lib/mhlib.py
python
Folder.listsubfolders
(self)
return self.mh.listsubfolders(self.name)
Return list of direct subfolders.
Return list of direct subfolders.
[ "Return", "list", "of", "direct", "subfolders", "." ]
def listsubfolders(self): """Return list of direct subfolders.""" return self.mh.listsubfolders(self.name)
[ "def", "listsubfolders", "(", "self", ")", ":", "return", "self", ".", "mh", ".", "listsubfolders", "(", "self", ".", "name", ")" ]
https://github.com/replit-archive/empythoned/blob/977ec10ced29a3541a4973dc2b59910805695752/cpython/Lib/mhlib.py#L272-L274
thomasweng15/E.V.E.
e3bea3e45d0c549eccc6824c9cadbcc6980545f6
stt/google.py
python
Google.get_text
(self)
return str(phrase)
Send speech file to Google STT and then return text
Send speech file to Google STT and then return text
[ "Send", "speech", "file", "to", "Google", "STT", "and", "then", "return", "text" ]
def get_text(self): """Send speech file to Google STT and then return text""" # convert wav file to FLAC (_,stt_flac_filename) = tempfile.mkstemp('.flac') sound = AudioSegment.from_wav(self.audio.filename()) sound.export(stt_flac_filename, format="flac") # send to Google to interpret into text g_url = "http://www.google.com/speech-api/v1/recognize?lang=en" headers = {'Content-Type': 'audio/x-flac; rate= %d;' % self.rec_rate} recording_flac_data = open(stt_flac_filename, 'rb').read() try: r = requests.post(g_url, data=recording_flac_data, headers=headers) except requests.exceptions.ConnectionError: raise ConnectionLostException() response = r.text os.remove(stt_flac_filename) self.audio.housekeeping() if not 'hypotheses' in response: raise NotUnderstoodException() # we are only interested in the most likely utterance phrase = json.loads(response)['hypotheses'][0]['utterance'] print "Heard: " + phrase return str(phrase)
[ "def", "get_text", "(", "self", ")", ":", "# convert wav file to FLAC", "(", "_", ",", "stt_flac_filename", ")", "=", "tempfile", ".", "mkstemp", "(", "'.flac'", ")", "sound", "=", "AudioSegment", ".", "from_wav", "(", "self", ".", "audio", ".", "filename", "(", ")", ")", "sound", ".", "export", "(", "stt_flac_filename", ",", "format", "=", "\"flac\"", ")", "# send to Google to interpret into text", "g_url", "=", "\"http://www.google.com/speech-api/v1/recognize?lang=en\"", "headers", "=", "{", "'Content-Type'", ":", "'audio/x-flac; rate= %d;'", "%", "self", ".", "rec_rate", "}", "recording_flac_data", "=", "open", "(", "stt_flac_filename", ",", "'rb'", ")", ".", "read", "(", ")", "try", ":", "r", "=", "requests", ".", "post", "(", "g_url", ",", "data", "=", "recording_flac_data", ",", "headers", "=", "headers", ")", "except", "requests", ".", "exceptions", ".", "ConnectionError", ":", "raise", "ConnectionLostException", "(", ")", "response", "=", "r", ".", "text", "os", ".", "remove", "(", "stt_flac_filename", ")", "self", ".", "audio", ".", "housekeeping", "(", ")", "if", "not", "'hypotheses'", "in", "response", ":", "raise", "NotUnderstoodException", "(", ")", "# we are only interested in the most likely utterance", "phrase", "=", "json", ".", "loads", "(", "response", ")", "[", "'hypotheses'", "]", "[", "0", "]", "[", "'utterance'", "]", "print", "\"Heard: \"", "+", "phrase", "return", "str", "(", "phrase", ")" ]
https://github.com/thomasweng15/E.V.E./blob/e3bea3e45d0c549eccc6824c9cadbcc6980545f6/stt/google.py#L23-L49
aws-samples/aws-kube-codesuite
ab4e5ce45416b83bffb947ab8d234df5437f4fca
src/kubernetes/client/apis/authentication_v1beta1_api.py
python
AuthenticationV1beta1Api.create_token_review
(self, body, **kwargs)
create a TokenReview This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_token_review(body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param V1beta1TokenReview body: (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1beta1TokenReview If the method is called asynchronously, returns the request thread.
create a TokenReview This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_token_review(body, callback=callback_function)
[ "create", "a", "TokenReview", "This", "method", "makes", "a", "synchronous", "HTTP", "request", "by", "default", ".", "To", "make", "an", "asynchronous", "HTTP", "request", "please", "define", "a", "callback", "function", "to", "be", "invoked", "when", "receiving", "the", "response", ".", ">>>", "def", "callback_function", "(", "response", ")", ":", ">>>", "pprint", "(", "response", ")", ">>>", ">>>", "thread", "=", "api", ".", "create_token_review", "(", "body", "callback", "=", "callback_function", ")" ]
def create_token_review(self, body, **kwargs): """ create a TokenReview This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_token_review(body, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param V1beta1TokenReview body: (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1beta1TokenReview If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.create_token_review_with_http_info(body, **kwargs) else: (data) = self.create_token_review_with_http_info(body, **kwargs) return data
[ "def", "create_token_review", "(", "self", ",", "body", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'callback'", ")", ":", "return", "self", ".", "create_token_review_with_http_info", "(", "body", ",", "*", "*", "kwargs", ")", "else", ":", "(", "data", ")", "=", "self", ".", "create_token_review_with_http_info", "(", "body", ",", "*", "*", "kwargs", ")", "return", "data" ]
https://github.com/aws-samples/aws-kube-codesuite/blob/ab4e5ce45416b83bffb947ab8d234df5437f4fca/src/kubernetes/client/apis/authentication_v1beta1_api.py#L43-L67
ales-tsurko/cells
4cf7e395cd433762bea70cdc863a346f3a6fe1d0
packaging/macos/python/lib/python3.7/tkinter/__init__.py
python
Listbox.index
(self, index)
return self.tk.getint(i)
Return index of item identified with INDEX.
Return index of item identified with INDEX.
[ "Return", "index", "of", "item", "identified", "with", "INDEX", "." ]
def index(self, index): """Return index of item identified with INDEX.""" i = self.tk.call(self._w, 'index', index) if i == 'none': return None return self.tk.getint(i)
[ "def", "index", "(", "self", ",", "index", ")", ":", "i", "=", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'index'", ",", "index", ")", "if", "i", "==", "'none'", ":", "return", "None", "return", "self", ".", "tk", ".", "getint", "(", "i", ")" ]
https://github.com/ales-tsurko/cells/blob/4cf7e395cd433762bea70cdc863a346f3a6fe1d0/packaging/macos/python/lib/python3.7/tkinter/__init__.py#L2799-L2803
jupyterhub/repo2docker
a37a205c0e8a59240933d20c1c4fb80767e71db2
repo2docker/buildpacks/base.py
python
BuildPack.get_post_build_scripts
(self)
return []
An ordered list of executable scripts to execute after build. Is run as a non-root user, and must be executable. Used for performing build time steps that can not be performed with standard tools. The scripts should be as deterministic as possible - running it twice should not produce different results!
An ordered list of executable scripts to execute after build.
[ "An", "ordered", "list", "of", "executable", "scripts", "to", "execute", "after", "build", "." ]
def get_post_build_scripts(self): """ An ordered list of executable scripts to execute after build. Is run as a non-root user, and must be executable. Used for performing build time steps that can not be performed with standard tools. The scripts should be as deterministic as possible - running it twice should not produce different results! """ return []
[ "def", "get_post_build_scripts", "(", "self", ")", ":", "return", "[", "]" ]
https://github.com/jupyterhub/repo2docker/blob/a37a205c0e8a59240933d20c1c4fb80767e71db2/repo2docker/buildpacks/base.py#L392-L402
google-research/mixmatch
1011a1d51eaa9ca6f5dba02096a848d1fe3fc38e
scripts/create_datasets.py
python
_encode_png
(images)
return raw
[]
def _encode_png(images): raw = [] with tf.Session() as sess, tf.device('cpu:0'): image_x = tf.placeholder(tf.uint8, [None, None, None], 'image_x') to_png = tf.image.encode_png(image_x) for x in trange(images.shape[0], desc='PNG Encoding', leave=False): raw.append(sess.run(to_png, feed_dict={image_x: images[x]})) return raw
[ "def", "_encode_png", "(", "images", ")", ":", "raw", "=", "[", "]", "with", "tf", ".", "Session", "(", ")", "as", "sess", ",", "tf", ".", "device", "(", "'cpu:0'", ")", ":", "image_x", "=", "tf", ".", "placeholder", "(", "tf", ".", "uint8", ",", "[", "None", ",", "None", ",", "None", "]", ",", "'image_x'", ")", "to_png", "=", "tf", ".", "image", ".", "encode_png", "(", "image_x", ")", "for", "x", "in", "trange", "(", "images", ".", "shape", "[", "0", "]", ",", "desc", "=", "'PNG Encoding'", ",", "leave", "=", "False", ")", ":", "raw", ".", "append", "(", "sess", ".", "run", "(", "to_png", ",", "feed_dict", "=", "{", "image_x", ":", "images", "[", "x", "]", "}", ")", ")", "return", "raw" ]
https://github.com/google-research/mixmatch/blob/1011a1d51eaa9ca6f5dba02096a848d1fe3fc38e/scripts/create_datasets.py#L43-L50
meejah/txtorcon
7da6ad6f91c395951be1b4e7e1011baa2f7a689f
examples/launch_tor_with_simplehttpd.py
python
setup_complete
(config, port, proto)
[]
def setup_complete(config, port, proto): # Callback from twisted when tor has booted. # We create a reference to this function via functools.partial that # provides us with a reference to 'config' and 'port', twisted then adds # the 'proto' argument print('\nTor is now running. The hidden service is available at') print('\n\thttp://%s:%i\n' % (config.HiddenServices[0].hostname, port)) # This is probably more secure than any other httpd... print('## DO NOT RELY ON THIS SERVER TO TRANSFER FILES IN A SECURE WAY ##')
[ "def", "setup_complete", "(", "config", ",", "port", ",", "proto", ")", ":", "# Callback from twisted when tor has booted.", "# We create a reference to this function via functools.partial that", "# provides us with a reference to 'config' and 'port', twisted then adds", "# the 'proto' argument", "print", "(", "'\\nTor is now running. The hidden service is available at'", ")", "print", "(", "'\\n\\thttp://%s:%i\\n'", "%", "(", "config", ".", "HiddenServices", "[", "0", "]", ".", "hostname", ",", "port", ")", ")", "# This is probably more secure than any other httpd...", "print", "(", "'## DO NOT RELY ON THIS SERVER TO TRANSFER FILES IN A SECURE WAY ##'", ")" ]
https://github.com/meejah/txtorcon/blob/7da6ad6f91c395951be1b4e7e1011baa2f7a689f/examples/launch_tor_with_simplehttpd.py#L51-L59
YoYo000/MVSNet
3ae2cb2b72c6df58ebcb321d7d243d4efd01fbc5
cnn_wrapper/network.py
python
Network.conv
(self, input_tensor, kernel_size, filters, strides, name, relu=True, dilation_rate=1, padding=DEFAULT_PADDING, biased=True, reuse=False, separable=False)
2D/3D convolution.
2D/3D convolution.
[ "2D", "/", "3D", "convolution", "." ]
def conv(self, input_tensor, kernel_size, filters, strides, name, relu=True, dilation_rate=1, padding=DEFAULT_PADDING, biased=True, reuse=False, separable=False): """2D/3D convolution.""" kwargs = {'filters': filters, 'kernel_size': kernel_size, 'strides': strides, 'activation': tf.nn.relu if relu else None, 'use_bias': biased, 'dilation_rate': dilation_rate, 'trainable': self.trainable, 'reuse': self.reuse or reuse, 'bias_regularizer': self.regularizer if biased else None, 'name': name, 'padding': padding} if separable: kwargs['depthwise_regularizer'] = self.regularizer kwargs['pointwise_regularizer'] = self.regularizer else: kwargs['kernel_regularizer'] = self.regularizer if len(input_tensor.get_shape()) == 4: if not separable: return tf.layers.conv2d(input_tensor, **kwargs) else: return tf.layers.separable_conv2d(input_tensor, **kwargs) elif len(input_tensor.get_shape()) == 5: if not separable: return tf.layers.conv3d(input_tensor, **kwargs) else: raise NotImplementedError('No official implementation for separable_conv3d') else: raise ValueError('Improper input rank for layer: ' + name)
[ "def", "conv", "(", "self", ",", "input_tensor", ",", "kernel_size", ",", "filters", ",", "strides", ",", "name", ",", "relu", "=", "True", ",", "dilation_rate", "=", "1", ",", "padding", "=", "DEFAULT_PADDING", ",", "biased", "=", "True", ",", "reuse", "=", "False", ",", "separable", "=", "False", ")", ":", "kwargs", "=", "{", "'filters'", ":", "filters", ",", "'kernel_size'", ":", "kernel_size", ",", "'strides'", ":", "strides", ",", "'activation'", ":", "tf", ".", "nn", ".", "relu", "if", "relu", "else", "None", ",", "'use_bias'", ":", "biased", ",", "'dilation_rate'", ":", "dilation_rate", ",", "'trainable'", ":", "self", ".", "trainable", ",", "'reuse'", ":", "self", ".", "reuse", "or", "reuse", ",", "'bias_regularizer'", ":", "self", ".", "regularizer", "if", "biased", "else", "None", ",", "'name'", ":", "name", ",", "'padding'", ":", "padding", "}", "if", "separable", ":", "kwargs", "[", "'depthwise_regularizer'", "]", "=", "self", ".", "regularizer", "kwargs", "[", "'pointwise_regularizer'", "]", "=", "self", ".", "regularizer", "else", ":", "kwargs", "[", "'kernel_regularizer'", "]", "=", "self", ".", "regularizer", "if", "len", "(", "input_tensor", ".", "get_shape", "(", ")", ")", "==", "4", ":", "if", "not", "separable", ":", "return", "tf", ".", "layers", ".", "conv2d", "(", "input_tensor", ",", "*", "*", "kwargs", ")", "else", ":", "return", "tf", ".", "layers", ".", "separable_conv2d", "(", "input_tensor", ",", "*", "*", "kwargs", ")", "elif", "len", "(", "input_tensor", ".", "get_shape", "(", ")", ")", "==", "5", ":", "if", "not", "separable", ":", "return", "tf", ".", "layers", ".", "conv3d", "(", "input_tensor", ",", "*", "*", "kwargs", ")", "else", ":", "raise", "NotImplementedError", "(", "'No official implementation for separable_conv3d'", ")", "else", ":", "raise", "ValueError", "(", "'Improper input rank for layer: '", "+", "name", ")" ]
https://github.com/YoYo000/MVSNet/blob/3ae2cb2b72c6df58ebcb321d7d243d4efd01fbc5/cnn_wrapper/network.py#L160-L202
tav/pylibs
3c16b843681f54130ee6a022275289cadb2f2a69
genshi/template/base.py
python
Context.keys
(self)
return keys
Return the name of all variables in the context. :return: a list of variable names
Return the name of all variables in the context. :return: a list of variable names
[ "Return", "the", "name", "of", "all", "variables", "in", "the", "context", ".", ":", "return", ":", "a", "list", "of", "variable", "names" ]
def keys(self): """Return the name of all variables in the context. :return: a list of variable names """ keys = [] for frame in self.frames: keys += [key for key in frame if key not in keys] return keys
[ "def", "keys", "(", "self", ")", ":", "keys", "=", "[", "]", "for", "frame", "in", "self", ".", "frames", ":", "keys", "+=", "[", "key", "for", "key", "in", "frame", "if", "key", "not", "in", "keys", "]", "return", "keys" ]
https://github.com/tav/pylibs/blob/3c16b843681f54130ee6a022275289cadb2f2a69/genshi/template/base.py#L219-L227
moloch--/RootTheBox
097272332b9f9b7e2df31ca0823ed10c7b66ac81
handlers/AdminHandlers/AdminGameObjectHandlers.py
python
AdminCreateHandler.add_attachments
(self, flag)
Add uploaded files as attachments to flags
Add uploaded files as attachments to flags
[ "Add", "uploaded", "files", "as", "attachments", "to", "flags" ]
def add_attachments(self, flag): """ Add uploaded files as attachments to flags """ if hasattr(self.request, "files"): if "flag" not in self.request.files: return for attachment in self.request.files["flag"]: flag_attachment = FlagAttachment(file_name=attachment["filename"]) flag_attachment.data = attachment["body"] flag.flag_attachments.append(flag_attachment) self.dbsession.add(flag_attachment) self.dbsession.flush()
[ "def", "add_attachments", "(", "self", ",", "flag", ")", ":", "if", "hasattr", "(", "self", ".", "request", ",", "\"files\"", ")", ":", "if", "\"flag\"", "not", "in", "self", ".", "request", ".", "files", ":", "return", "for", "attachment", "in", "self", ".", "request", ".", "files", "[", "\"flag\"", "]", ":", "flag_attachment", "=", "FlagAttachment", "(", "file_name", "=", "attachment", "[", "\"filename\"", "]", ")", "flag_attachment", ".", "data", "=", "attachment", "[", "\"body\"", "]", "flag", ".", "flag_attachments", ".", "append", "(", "flag_attachment", ")", "self", ".", "dbsession", ".", "add", "(", "flag_attachment", ")", "self", ".", "dbsession", ".", "flush", "(", ")" ]
https://github.com/moloch--/RootTheBox/blob/097272332b9f9b7e2df31ca0823ed10c7b66ac81/handlers/AdminHandlers/AdminGameObjectHandlers.py#L361-L371
projecthamster/hamster
19d160090de30e756bdc3122ff935bdaa86e2843
waflib/Tools/qt5.py
python
rcc.scan
(self)
return (nodes, names)
Parse the *.qrc* files
Parse the *.qrc* files
[ "Parse", "the", "*", ".", "qrc", "*", "files" ]
def scan(self): """Parse the *.qrc* files""" if not has_xml: Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') return ([], []) parser = make_parser() curHandler = XMLHandler() parser.setContentHandler(curHandler) with open(self.inputs[0].abspath(), 'r') as f: parser.parse(f) nodes = [] names = [] root = self.inputs[0].parent for x in curHandler.files: nd = root.find_resource(x) if nd: nodes.append(nd) else: names.append(x) return (nodes, names)
[ "def", "scan", "(", "self", ")", ":", "if", "not", "has_xml", ":", "Logs", ".", "error", "(", "'No xml.sax support was found, rcc dependencies will be incomplete!'", ")", "return", "(", "[", "]", ",", "[", "]", ")", "parser", "=", "make_parser", "(", ")", "curHandler", "=", "XMLHandler", "(", ")", "parser", ".", "setContentHandler", "(", "curHandler", ")", "with", "open", "(", "self", ".", "inputs", "[", "0", "]", ".", "abspath", "(", ")", ",", "'r'", ")", "as", "f", ":", "parser", ".", "parse", "(", "f", ")", "nodes", "=", "[", "]", "names", "=", "[", "]", "root", "=", "self", ".", "inputs", "[", "0", "]", ".", "parent", "for", "x", "in", "curHandler", ".", "files", ":", "nd", "=", "root", ".", "find_resource", "(", "x", ")", "if", "nd", ":", "nodes", ".", "append", "(", "nd", ")", "else", ":", "names", ".", "append", "(", "x", ")", "return", "(", "nodes", ",", "names", ")" ]
https://github.com/projecthamster/hamster/blob/19d160090de30e756bdc3122ff935bdaa86e2843/waflib/Tools/qt5.py#L379-L400
DexterInd/BrickPi
0f4b64c03d734f31e07d4d462cc5347ed2fea54a
Software/BrickPi_Python/Contrib/ThreadSafeBrickPi/ThreadSafeBrickPi.py
python
BrickPiSensor.callback_setup
(self, stage)
return -1, 0, 0, 0
[]
def callback_setup(self, stage): return -1, 0, 0, 0
[ "def", "callback_setup", "(", "self", ",", "stage", ")", ":", "return", "-", "1", ",", "0", ",", "0", ",", "0" ]
https://github.com/DexterInd/BrickPi/blob/0f4b64c03d734f31e07d4d462cc5347ed2fea54a/Software/BrickPi_Python/Contrib/ThreadSafeBrickPi/ThreadSafeBrickPi.py#L726-L727
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/plugins/audit/global_redirect.py
python
global_redirect._javascript_redirect
(self, response)
return False
Test for JavaScript redirects, these are some common redirects: // These also work without the `window.` at the beginning window.location = "http://www.w3af.org/"; window.location.href = "http://www.w3af.org/"; window.location.replace("http://www.w3af.org"); window.location.assign('http://www.w3af.org'); self.location = 'http://www.w3af.org'; top.location = 'http://www.w3af.org'; // jQuery $(location).attr('href', 'http://www.w3af.org'); $(window).attr('location', 'http://www.w3af.org'); $(location).prop('href', 'http://www.w3af.org'); // Only for old IE window.navigate('http://www.w3af.org');
Test for JavaScript redirects, these are some common redirects:
[ "Test", "for", "JavaScript", "redirects", "these", "are", "some", "common", "redirects", ":" ]
def _javascript_redirect(self, response): """ Test for JavaScript redirects, these are some common redirects: // These also work without the `window.` at the beginning window.location = "http://www.w3af.org/"; window.location.href = "http://www.w3af.org/"; window.location.replace("http://www.w3af.org"); window.location.assign('http://www.w3af.org'); self.location = 'http://www.w3af.org'; top.location = 'http://www.w3af.org'; // jQuery $(location).attr('href', 'http://www.w3af.org'); $(window).attr('location', 'http://www.w3af.org'); $(location).prop('href', 'http://www.w3af.org'); // Only for old IE window.navigate('http://www.w3af.org'); """ for statement in self._extract_script_code(response): if self.TEST_DOMAIN not in statement: continue for redir_to_test_domain_re in self.REDIR_TO_TEST_DOMAIN_JS_RE: if redir_to_test_domain_re.search(statement): return True return False
[ "def", "_javascript_redirect", "(", "self", ",", "response", ")", ":", "for", "statement", "in", "self", ".", "_extract_script_code", "(", "response", ")", ":", "if", "self", ".", "TEST_DOMAIN", "not", "in", "statement", ":", "continue", "for", "redir_to_test_domain_re", "in", "self", ".", "REDIR_TO_TEST_DOMAIN_JS_RE", ":", "if", "redir_to_test_domain_re", ".", "search", "(", "statement", ")", ":", "return", "True", "return", "False" ]
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/plugins/audit/global_redirect.py#L293-L322
openstack/nova
b49b7663e1c3073917d5844b81d38db8e86d05c4
nova/virt/driver.py
python
ComputeDriver.manages_network_binding_host_id
(self)
return False
Compute driver manages port bindings. Used to indicate whether or not the compute driver is responsible for managing port binding details, such as the host_id. By default the ComputeManager will manage port bindings and the host_id associated with a binding using the network API. However, some backends, like Ironic, will manage the port binding host_id out-of-band and the compute service should not override what is set by the backing hypervisor.
Compute driver manages port bindings.
[ "Compute", "driver", "manages", "port", "bindings", "." ]
def manages_network_binding_host_id(self): """Compute driver manages port bindings. Used to indicate whether or not the compute driver is responsible for managing port binding details, such as the host_id. By default the ComputeManager will manage port bindings and the host_id associated with a binding using the network API. However, some backends, like Ironic, will manage the port binding host_id out-of-band and the compute service should not override what is set by the backing hypervisor. """ return False
[ "def", "manages_network_binding_host_id", "(", "self", ")", ":", "return", "False" ]
https://github.com/openstack/nova/blob/b49b7663e1c3073917d5844b81d38db8e86d05c4/nova/virt/driver.py#L1761-L1772
jupyterhub/jupyterhub
e58cf0670690d11631f9dc6e1ab702c60f7bfd13
jupyterhub/utils.py
python
auth_decorator
(check_auth)
return decorator
Make an authentication decorator. I heard you like decorators, so I put a decorator in your decorator, so you can decorate while you decorate.
Make an authentication decorator.
[ "Make", "an", "authentication", "decorator", "." ]
def auth_decorator(check_auth): """Make an authentication decorator. I heard you like decorators, so I put a decorator in your decorator, so you can decorate while you decorate. """ def decorator(method): def decorated(self, *args, **kwargs): check_auth(self, **kwargs) return method(self, *args, **kwargs) # Perhaps replace with functools.wrap decorated.__name__ = method.__name__ decorated.__doc__ = method.__doc__ return decorated decorator.__name__ = check_auth.__name__ decorator.__doc__ = check_auth.__doc__ return decorator
[ "def", "auth_decorator", "(", "check_auth", ")", ":", "def", "decorator", "(", "method", ")", ":", "def", "decorated", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "check_auth", "(", "self", ",", "*", "*", "kwargs", ")", "return", "method", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "# Perhaps replace with functools.wrap", "decorated", ".", "__name__", "=", "method", ".", "__name__", "decorated", ".", "__doc__", "=", "method", ".", "__doc__", "return", "decorated", "decorator", ".", "__name__", "=", "check_auth", ".", "__name__", "decorator", ".", "__doc__", "=", "check_auth", ".", "__doc__", "return", "decorator" ]
https://github.com/jupyterhub/jupyterhub/blob/e58cf0670690d11631f9dc6e1ab702c60f7bfd13/jupyterhub/utils.py#L252-L271
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/plugins/attack/db/sqlmap/lib/core/target.py
python
setupTargetEnv
()
[]
def setupTargetEnv(): _createTargetDirs() _setRequestParams() _setHashDB() _resumeHashDBValues() _setResultsFile() _setAuthCred()
[ "def", "setupTargetEnv", "(", ")", ":", "_createTargetDirs", "(", ")", "_setRequestParams", "(", ")", "_setHashDB", "(", ")", "_resumeHashDBValues", "(", ")", "_setResultsFile", "(", ")", "_setAuthCred", "(", ")" ]
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/plugins/attack/db/sqlmap/lib/core/target.py#L737-L743
DLR-RM/stable-baselines3
e9a8979022d7005560d43b7a9c1dc1ba85f7989a
stable_baselines3/common/logger.py
python
TensorBoardOutputFormat.close
(self)
closes the file
closes the file
[ "closes", "the", "file" ]
def close(self) -> None: """ closes the file """ if self.writer: self.writer.close() self.writer = None
[ "def", "close", "(", "self", ")", "->", "None", ":", "if", "self", ".", "writer", ":", "self", ".", "writer", ".", "close", "(", ")", "self", ".", "writer", "=", "None" ]
https://github.com/DLR-RM/stable-baselines3/blob/e9a8979022d7005560d43b7a9c1dc1ba85f7989a/stable_baselines3/common/logger.py#L368-L374
4shadoww/hakkuframework
409a11fc3819d251f86faa3473439f8c19066a21
lib/dns/update.py
python
UpdateMessage.__init__
(self, zone=None, rdclass=dns.rdataclass.IN, keyring=None, keyname=None, keyalgorithm=dns.tsig.default_algorithm, id=None)
Initialize a new DNS Update object. See the documentation of the Message class for a complete description of the keyring dictionary. *zone*, a ``dns.name.Name``, ``str``, or ``None``, the zone which is being updated. ``None`` should only be used by dnspython's message constructors, as a zone is required for the convenience methods like ``add()``, ``replace()``, etc. *rdclass*, an ``int`` or ``str``, the class of the zone. The *keyring*, *keyname*, and *keyalgorithm* parameters are passed to ``use_tsig()``; see its documentation for details.
Initialize a new DNS Update object.
[ "Initialize", "a", "new", "DNS", "Update", "object", "." ]
def __init__(self, zone=None, rdclass=dns.rdataclass.IN, keyring=None, keyname=None, keyalgorithm=dns.tsig.default_algorithm, id=None): """Initialize a new DNS Update object. See the documentation of the Message class for a complete description of the keyring dictionary. *zone*, a ``dns.name.Name``, ``str``, or ``None``, the zone which is being updated. ``None`` should only be used by dnspython's message constructors, as a zone is required for the convenience methods like ``add()``, ``replace()``, etc. *rdclass*, an ``int`` or ``str``, the class of the zone. The *keyring*, *keyname*, and *keyalgorithm* parameters are passed to ``use_tsig()``; see its documentation for details. """ super().__init__(id=id) self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE) if isinstance(zone, str): zone = dns.name.from_text(zone) self.origin = zone rdclass = dns.rdataclass.RdataClass.make(rdclass) self.zone_rdclass = rdclass if self.origin: self.find_rrset(self.zone, self.origin, rdclass, dns.rdatatype.SOA, create=True, force_unique=True) if keyring is not None: self.use_tsig(keyring, keyname, algorithm=keyalgorithm)
[ "def", "__init__", "(", "self", ",", "zone", "=", "None", ",", "rdclass", "=", "dns", ".", "rdataclass", ".", "IN", ",", "keyring", "=", "None", ",", "keyname", "=", "None", ",", "keyalgorithm", "=", "dns", ".", "tsig", ".", "default_algorithm", ",", "id", "=", "None", ")", ":", "super", "(", ")", ".", "__init__", "(", "id", "=", "id", ")", "self", ".", "flags", "|=", "dns", ".", "opcode", ".", "to_flags", "(", "dns", ".", "opcode", ".", "UPDATE", ")", "if", "isinstance", "(", "zone", ",", "str", ")", ":", "zone", "=", "dns", ".", "name", ".", "from_text", "(", "zone", ")", "self", ".", "origin", "=", "zone", "rdclass", "=", "dns", ".", "rdataclass", ".", "RdataClass", ".", "make", "(", "rdclass", ")", "self", ".", "zone_rdclass", "=", "rdclass", "if", "self", ".", "origin", ":", "self", ".", "find_rrset", "(", "self", ".", "zone", ",", "self", ".", "origin", ",", "rdclass", ",", "dns", ".", "rdatatype", ".", "SOA", ",", "create", "=", "True", ",", "force_unique", "=", "True", ")", "if", "keyring", "is", "not", "None", ":", "self", ".", "use_tsig", "(", "keyring", ",", "keyname", ",", "algorithm", "=", "keyalgorithm", ")" ]
https://github.com/4shadoww/hakkuframework/blob/409a11fc3819d251f86faa3473439f8c19066a21/lib/dns/update.py#L46-L75
KvasirSecurity/Kvasir
a5b3775184a8343240e1154a1f762f75df04dc0a
modules/zenmapCore_Kvasir/ScriptArgsParser.py
python
parse_string
(s, start)
Parses a single string that is quoted, unquoted or empty. It returns the found string along with the next starting position
Parses a single string that is quoted, unquoted or empty. It returns the found string along with the next starting position
[ "Parses", "a", "single", "string", "that", "is", "quoted", "unquoted", "or", "empty", ".", "It", "returns", "the", "found", "string", "along", "with", "the", "next", "starting", "position" ]
def parse_string(s, start): """Parses a single string that is quoted, unquoted or empty. It returns the found string along with the next starting position """ for pattern in unquoted_re, quoted_re, empty_re: m = pattern.match(s, start) or quoted_re.match(s, start) if m: return m.group(1), m.end(1) raise ValueError("No string found at %s." % repr(s[start:]))
[ "def", "parse_string", "(", "s", ",", "start", ")", ":", "for", "pattern", "in", "unquoted_re", ",", "quoted_re", ",", "empty_re", ":", "m", "=", "pattern", ".", "match", "(", "s", ",", "start", ")", "or", "quoted_re", ".", "match", "(", "s", ",", "start", ")", "if", "m", ":", "return", "m", ".", "group", "(", "1", ")", ",", "m", ".", "end", "(", "1", ")", "raise", "ValueError", "(", "\"No string found at %s.\"", "%", "repr", "(", "s", "[", "start", ":", "]", ")", ")" ]
https://github.com/KvasirSecurity/Kvasir/blob/a5b3775184a8343240e1154a1f762f75df04dc0a/modules/zenmapCore_Kvasir/ScriptArgsParser.py#L134-L141
getsentry/zeus
6d4a490c19ebe406b551641a022ca08f26c21fcb
zeus/api/resources/change_request_index.py
python
ChangeRequestIndexResource.get
(self)
return self.paginate_with_schema(schema, query)
Return a list of change requests.
Return a list of change requests.
[ "Return", "a", "list", "of", "change", "requests", "." ]
def get(self): """ Return a list of change requests. """ tenant = auth.get_current_tenant() if not tenant.repository_ids: return self.respond([]) query = ( ChangeRequest.query.options( joinedload("head_revision"), joinedload("parent_revision"), subqueryload_all("authors"), ) .filter(ChangeRequest.repository_id.in_(tenant.repository_ids)) .order_by(ChangeRequest.date_created.desc()) ) user = request.args.get("user") if user: if user == "me": user = auth.get_current_user() else: user = User.query.get(user) if not user: return self.respond([]) query = query.filter( ChangeRequest.author_id.in_( db.session.query(Author.id).filter( Author.email.in_( db.session.query(Email.email).filter( Email.user_id == user.id, Email.verified == True # NOQA ) ) ) ) ) repository = request.args.get("repository") if repository: repo = Repository.from_full_name(repository) if not repo: return self.respond([]) query = query.filter(ChangeRequest.repository_id == repo.id) schema = ChangeRequestWithBuildSchema(many=True) return self.paginate_with_schema(schema, query)
[ "def", "get", "(", "self", ")", ":", "tenant", "=", "auth", ".", "get_current_tenant", "(", ")", "if", "not", "tenant", ".", "repository_ids", ":", "return", "self", ".", "respond", "(", "[", "]", ")", "query", "=", "(", "ChangeRequest", ".", "query", ".", "options", "(", "joinedload", "(", "\"head_revision\"", ")", ",", "joinedload", "(", "\"parent_revision\"", ")", ",", "subqueryload_all", "(", "\"authors\"", ")", ",", ")", ".", "filter", "(", "ChangeRequest", ".", "repository_id", ".", "in_", "(", "tenant", ".", "repository_ids", ")", ")", ".", "order_by", "(", "ChangeRequest", ".", "date_created", ".", "desc", "(", ")", ")", ")", "user", "=", "request", ".", "args", ".", "get", "(", "\"user\"", ")", "if", "user", ":", "if", "user", "==", "\"me\"", ":", "user", "=", "auth", ".", "get_current_user", "(", ")", "else", ":", "user", "=", "User", ".", "query", ".", "get", "(", "user", ")", "if", "not", "user", ":", "return", "self", ".", "respond", "(", "[", "]", ")", "query", "=", "query", ".", "filter", "(", "ChangeRequest", ".", "author_id", ".", "in_", "(", "db", ".", "session", ".", "query", "(", "Author", ".", "id", ")", ".", "filter", "(", "Author", ".", "email", ".", "in_", "(", "db", ".", "session", ".", "query", "(", "Email", ".", "email", ")", ".", "filter", "(", "Email", ".", "user_id", "==", "user", ".", "id", ",", "Email", ".", "verified", "==", "True", "# NOQA", ")", ")", ")", ")", ")", "repository", "=", "request", ".", "args", ".", "get", "(", "\"repository\"", ")", "if", "repository", ":", "repo", "=", "Repository", ".", "from_full_name", "(", "repository", ")", "if", "not", "repo", ":", "return", "self", ".", "respond", "(", "[", "]", ")", "query", "=", "query", ".", "filter", "(", "ChangeRequest", ".", "repository_id", "==", "repo", ".", "id", ")", "schema", "=", "ChangeRequestWithBuildSchema", "(", "many", "=", "True", ")", "return", "self", ".", "paginate_with_schema", "(", "schema", ",", "query", ")" ]
https://github.com/getsentry/zeus/blob/6d4a490c19ebe406b551641a022ca08f26c21fcb/zeus/api/resources/change_request_index.py#L38-L83
ydkhatri/mac_apt
729630c8bbe7a73cce3ca330305d3301a919cb07
plugins/helpers/common.py
python
CommonFunctions.ReadMacAbsoluteTime
(mac_abs_time)
return ''
Returns datetime object, or empty string upon error
Returns datetime object, or empty string upon error
[ "Returns", "datetime", "object", "or", "empty", "string", "upon", "error" ]
def ReadMacAbsoluteTime(mac_abs_time): # Mac Absolute time is time epoch beginning 2001/1/1 '''Returns datetime object, or empty string upon error''' if mac_abs_time not in ( 0, None, ''): try: if isinstance(mac_abs_time, str): mac_abs_time = float(mac_abs_time) if mac_abs_time in (-63114076800, -63114076800000000000) : # MS & Python considers -63113904000 as 01-01-0001, Apple considers -63114076800 return datetime.datetime(1,1,1) if abs(mac_abs_time) > 0xFFFFFFFF: # more than 32 bits, this should be nano-second resolution timestamp (seen only in HighSierra) return datetime.datetime(2001, 1, 1) + datetime.timedelta(seconds=mac_abs_time/1000000000.) return datetime.datetime(2001, 1, 1) + datetime.timedelta(seconds=mac_abs_time) except (ValueError, OverflowError, TypeError) as ex: log.error("ReadMacAbsoluteTime() Failed to convert timestamp from value " + str(mac_abs_time) + " Error was: " + str(ex)) return ''
[ "def", "ReadMacAbsoluteTime", "(", "mac_abs_time", ")", ":", "# Mac Absolute time is time epoch beginning 2001/1/1", "if", "mac_abs_time", "not", "in", "(", "0", ",", "None", ",", "''", ")", ":", "try", ":", "if", "isinstance", "(", "mac_abs_time", ",", "str", ")", ":", "mac_abs_time", "=", "float", "(", "mac_abs_time", ")", "if", "mac_abs_time", "in", "(", "-", "63114076800", ",", "-", "63114076800000000000", ")", ":", "# MS & Python considers -63113904000 as 01-01-0001, Apple considers -63114076800", "return", "datetime", ".", "datetime", "(", "1", ",", "1", ",", "1", ")", "if", "abs", "(", "mac_abs_time", ")", ">", "0xFFFFFFFF", ":", "# more than 32 bits, this should be nano-second resolution timestamp (seen only in HighSierra)", "return", "datetime", ".", "datetime", "(", "2001", ",", "1", ",", "1", ")", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "mac_abs_time", "/", "1000000000.", ")", "return", "datetime", ".", "datetime", "(", "2001", ",", "1", ",", "1", ")", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "mac_abs_time", ")", "except", "(", "ValueError", ",", "OverflowError", ",", "TypeError", ")", "as", "ex", ":", "log", ".", "error", "(", "\"ReadMacAbsoluteTime() Failed to convert timestamp from value \"", "+", "str", "(", "mac_abs_time", ")", "+", "\" Error was: \"", "+", "str", "(", "ex", ")", ")", "return", "''" ]
https://github.com/ydkhatri/mac_apt/blob/729630c8bbe7a73cce3ca330305d3301a919cb07/plugins/helpers/common.py#L47-L60
plotly/plotly.py
cfad7862594b35965c0e000813bd7805e8494a5b
packages/python/plotly/plotly/graph_objs/_heatmap.py
python
Heatmap.coloraxis
(self)
return self["coloraxis"]
Sets a reference to a shared color axis. References to these shared color axes are "coloraxis", "coloraxis2", "coloraxis3", etc. Settings for these shared color axes are set in the layout, under `layout.coloraxis`, `layout.coloraxis2`, etc. Note that multiple color scales can be linked to the same color axis. The 'coloraxis' property is an identifier of a particular subplot, of type 'coloraxis', that may be specified as the string 'coloraxis' optionally followed by an integer >= 1 (e.g. 'coloraxis', 'coloraxis1', 'coloraxis2', 'coloraxis3', etc.) Returns ------- str
Sets a reference to a shared color axis. References to these shared color axes are "coloraxis", "coloraxis2", "coloraxis3", etc. Settings for these shared color axes are set in the layout, under `layout.coloraxis`, `layout.coloraxis2`, etc. Note that multiple color scales can be linked to the same color axis. The 'coloraxis' property is an identifier of a particular subplot, of type 'coloraxis', that may be specified as the string 'coloraxis' optionally followed by an integer >= 1 (e.g. 'coloraxis', 'coloraxis1', 'coloraxis2', 'coloraxis3', etc.)
[ "Sets", "a", "reference", "to", "a", "shared", "color", "axis", ".", "References", "to", "these", "shared", "color", "axes", "are", "coloraxis", "coloraxis2", "coloraxis3", "etc", ".", "Settings", "for", "these", "shared", "color", "axes", "are", "set", "in", "the", "layout", "under", "layout", ".", "coloraxis", "layout", ".", "coloraxis2", "etc", ".", "Note", "that", "multiple", "color", "scales", "can", "be", "linked", "to", "the", "same", "color", "axis", ".", "The", "coloraxis", "property", "is", "an", "identifier", "of", "a", "particular", "subplot", "of", "type", "coloraxis", "that", "may", "be", "specified", "as", "the", "string", "coloraxis", "optionally", "followed", "by", "an", "integer", ">", "=", "1", "(", "e", ".", "g", ".", "coloraxis", "coloraxis1", "coloraxis2", "coloraxis3", "etc", ".", ")" ]
def coloraxis(self): """ Sets a reference to a shared color axis. References to these shared color axes are "coloraxis", "coloraxis2", "coloraxis3", etc. Settings for these shared color axes are set in the layout, under `layout.coloraxis`, `layout.coloraxis2`, etc. Note that multiple color scales can be linked to the same color axis. The 'coloraxis' property is an identifier of a particular subplot, of type 'coloraxis', that may be specified as the string 'coloraxis' optionally followed by an integer >= 1 (e.g. 'coloraxis', 'coloraxis1', 'coloraxis2', 'coloraxis3', etc.) Returns ------- str """ return self["coloraxis"]
[ "def", "coloraxis", "(", "self", ")", ":", "return", "self", "[", "\"coloraxis\"", "]" ]
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/_heatmap.py#L111-L129
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/IronPython/repackage/pip/pip/_vendor/distlib/locators.py
python
Page.__init__
(self, data, url)
Initialise an instance with the Unicode page contents and the URL they came from.
Initialise an instance with the Unicode page contents and the URL they came from.
[ "Initialise", "an", "instance", "with", "the", "Unicode", "page", "contents", "and", "the", "URL", "they", "came", "from", "." ]
def __init__(self, data, url): """ Initialise an instance with the Unicode page contents and the URL they came from. """ self.data = data self.base_url = self.url = url m = self._base.search(self.data) if m: self.base_url = m.group(1)
[ "def", "__init__", "(", "self", ",", "data", ",", "url", ")", ":", "self", ".", "data", "=", "data", "self", ".", "base_url", "=", "self", ".", "url", "=", "url", "m", "=", "self", ".", "_base", ".", "search", "(", "self", ".", "data", ")", "if", "m", ":", "self", ".", "base_url", "=", "m", ".", "group", "(", "1", ")" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/IronPython/repackage/pip/pip/_vendor/distlib/locators.py#L512-L521
the4thdoctor/pg_chameleon
9d80212541559c8d0a42b3e7c1b2c67bb7606411
pg_chameleon/lib/pg_lib.py
python
pg_engine.reindex_table
(self, schema, table)
The method run a REINDEX TABLE on the table defined by schema and name. :param schema: the table's schema :param table: the table's name
The method run a REINDEX TABLE on the table defined by schema and name. :param schema: the table's schema :param table: the table's name
[ "The", "method", "run", "a", "REINDEX", "TABLE", "on", "the", "table", "defined", "by", "schema", "and", "name", ".", ":", "param", "schema", ":", "the", "table", "s", "schema", ":", "param", "table", ":", "the", "table", "s", "name" ]
def reindex_table(self, schema, table): """ The method run a REINDEX TABLE on the table defined by schema and name. :param schema: the table's schema :param table: the table's name """ sql_reindex = sql.SQL("REINDEX TABLE {}.{} ;").format(sql.Identifier(schema), sql.Identifier(table)) self.pgsql_cur.execute(sql_reindex)
[ "def", "reindex_table", "(", "self", ",", "schema", ",", "table", ")", ":", "sql_reindex", "=", "sql", ".", "SQL", "(", "\"REINDEX TABLE {}.{} ;\"", ")", ".", "format", "(", "sql", ".", "Identifier", "(", "schema", ")", ",", "sql", ".", "Identifier", "(", "table", ")", ")", "self", ".", "pgsql_cur", ".", "execute", "(", "sql_reindex", ")" ]
https://github.com/the4thdoctor/pg_chameleon/blob/9d80212541559c8d0a42b3e7c1b2c67bb7606411/pg_chameleon/lib/pg_lib.py#L3508-L3515
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-linux/x64/mako/lookup.py
python
TemplateLookup._load
(self, filename, uri)
[]
def _load(self, filename, uri): self._mutex.acquire() try: try: # try returning from collection one # more time in case concurrent thread already loaded return self._collection[uri] except KeyError: pass try: if self.modulename_callable is not None: module_filename = self.modulename_callable(filename, uri) else: module_filename = None self._collection[uri] = template = Template( uri=uri, filename=posixpath.normpath(filename), lookup=self, module_filename=module_filename, **self.template_args ) return template except: # if compilation fails etc, ensure # template is removed from collection, # re-raise self._collection.pop(uri, None) raise finally: self._mutex.release()
[ "def", "_load", "(", "self", ",", "filename", ",", "uri", ")", ":", "self", ".", "_mutex", ".", "acquire", "(", ")", "try", ":", "try", ":", "# try returning from collection one", "# more time in case concurrent thread already loaded", "return", "self", ".", "_collection", "[", "uri", "]", "except", "KeyError", ":", "pass", "try", ":", "if", "self", ".", "modulename_callable", "is", "not", "None", ":", "module_filename", "=", "self", ".", "modulename_callable", "(", "filename", ",", "uri", ")", "else", ":", "module_filename", "=", "None", "self", ".", "_collection", "[", "uri", "]", "=", "template", "=", "Template", "(", "uri", "=", "uri", ",", "filename", "=", "posixpath", ".", "normpath", "(", "filename", ")", ",", "lookup", "=", "self", ",", "module_filename", "=", "module_filename", ",", "*", "*", "self", ".", "template_args", ")", "return", "template", "except", ":", "# if compilation fails etc, ensure", "# template is removed from collection,", "# re-raise", "self", ".", "_collection", ".", "pop", "(", "uri", ",", "None", ")", "raise", "finally", ":", "self", ".", "_mutex", ".", "release", "(", ")" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-linux/x64/mako/lookup.py#L308-L337
shiweibsw/Translation-Tools
2fbbf902364e557fa7017f9a74a8797b7440c077
venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/six.py
python
_SixMetaPathImporter._add_module
(self, mod, *fullnames)
[]
def _add_module(self, mod, *fullnames): for fullname in fullnames: self.known_modules[self.name + "." + fullname] = mod
[ "def", "_add_module", "(", "self", ",", "mod", ",", "*", "fullnames", ")", ":", "for", "fullname", "in", "fullnames", ":", "self", ".", "known_modules", "[", "self", ".", "name", "+", "\".\"", "+", "fullname", "]", "=", "mod" ]
https://github.com/shiweibsw/Translation-Tools/blob/2fbbf902364e557fa7017f9a74a8797b7440c077/venv/Lib/site-packages/pip-9.0.3-py3.6.egg/pip/_vendor/six.py#L177-L179
brython-dev/brython
9cba5fb7f43a9b52fff13e89b403e02a1dfaa5f3
www/src/Lib/optparse.py
python
OptionContainer.add_option
(self, *args, **kwargs)
return option
add_option(Option) add_option(opt_str, ..., kwarg=val, ...)
add_option(Option) add_option(opt_str, ..., kwarg=val, ...)
[ "add_option", "(", "Option", ")", "add_option", "(", "opt_str", "...", "kwarg", "=", "val", "...", ")" ]
def add_option(self, *args, **kwargs): """add_option(Option) add_option(opt_str, ..., kwarg=val, ...) """ if isinstance(args[0], str): option = self.option_class(*args, **kwargs) elif len(args) == 1 and not kwargs: option = args[0] if not isinstance(option, Option): raise TypeError("not an Option instance: %r" % option) else: raise TypeError("invalid arguments") self._check_conflict(option) self.option_list.append(option) option.container = self for opt in option._short_opts: self._short_opt[opt] = option for opt in option._long_opts: self._long_opt[opt] = option if option.dest is not None: # option has a dest, we need a default if option.default is not NO_DEFAULT: self.defaults[option.dest] = option.default elif option.dest not in self.defaults: self.defaults[option.dest] = None return option
[ "def", "add_option", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "args", "[", "0", "]", ",", "str", ")", ":", "option", "=", "self", ".", "option_class", "(", "*", "args", ",", "*", "*", "kwargs", ")", "elif", "len", "(", "args", ")", "==", "1", "and", "not", "kwargs", ":", "option", "=", "args", "[", "0", "]", "if", "not", "isinstance", "(", "option", ",", "Option", ")", ":", "raise", "TypeError", "(", "\"not an Option instance: %r\"", "%", "option", ")", "else", ":", "raise", "TypeError", "(", "\"invalid arguments\"", ")", "self", ".", "_check_conflict", "(", "option", ")", "self", ".", "option_list", ".", "append", "(", "option", ")", "option", ".", "container", "=", "self", "for", "opt", "in", "option", ".", "_short_opts", ":", "self", ".", "_short_opt", "[", "opt", "]", "=", "option", "for", "opt", "in", "option", ".", "_long_opts", ":", "self", ".", "_long_opt", "[", "opt", "]", "=", "option", "if", "option", ".", "dest", "is", "not", "None", ":", "# option has a dest, we need a default", "if", "option", ".", "default", "is", "not", "NO_DEFAULT", ":", "self", ".", "defaults", "[", "option", ".", "dest", "]", "=", "option", ".", "default", "elif", "option", ".", "dest", "not", "in", "self", ".", "defaults", ":", "self", ".", "defaults", "[", "option", ".", "dest", "]", "=", "None", "return", "option" ]
https://github.com/brython-dev/brython/blob/9cba5fb7f43a9b52fff13e89b403e02a1dfaa5f3/www/src/Lib/optparse.py#L995-L1023
quentinhardy/msdat
879377410f9063c58b67f4624c082faa26169e5d
Cleaner.py
python
runCleaner
(args)
Clean traces and logs
Clean traces and logs
[ "Clean", "traces", "and", "logs" ]
def runCleaner (args): ''' Clean traces and logs ''' logging.info("Cleaning files generated by PasswordGuesser ...") nbFileDeleted, nbFileToDelete = 0, 0 exts=[PASSWORD_EXTENSION_FILE] pathOfMsat = os.path.dirname(os.path.abspath(__file__)) for root, dirs, files in os.walk(pathOfMsat): for currentFile in files: logging.debug("Processing file: {0}".format(currentFile)) for ext in exts: if currentFile.lower().endswith(ext) : rep = input("Do you want to delete this file (Y for yes): {0}/{1}? ".format(root, currentFile)) if rep.replace('\n','') == 'Y' : os.remove(os.path.join(root, currentFile)) logging.info("Removing {0}/{1}".format(root, currentFile)) nbFileDeleted += 1 nbFileToDelete += 1 args['print'].goodNews("Finish: {0}/{1} file(s) deleted".format(nbFileDeleted, nbFileToDelete)) logging.info("Cleaning is finished")
[ "def", "runCleaner", "(", "args", ")", ":", "logging", ".", "info", "(", "\"Cleaning files generated by PasswordGuesser ...\"", ")", "nbFileDeleted", ",", "nbFileToDelete", "=", "0", ",", "0", "exts", "=", "[", "PASSWORD_EXTENSION_FILE", "]", "pathOfMsat", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "pathOfMsat", ")", ":", "for", "currentFile", "in", "files", ":", "logging", ".", "debug", "(", "\"Processing file: {0}\"", ".", "format", "(", "currentFile", ")", ")", "for", "ext", "in", "exts", ":", "if", "currentFile", ".", "lower", "(", ")", ".", "endswith", "(", "ext", ")", ":", "rep", "=", "input", "(", "\"Do you want to delete this file (Y for yes): {0}/{1}? \"", ".", "format", "(", "root", ",", "currentFile", ")", ")", "if", "rep", ".", "replace", "(", "'\\n'", ",", "''", ")", "==", "'Y'", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "root", ",", "currentFile", ")", ")", "logging", ".", "info", "(", "\"Removing {0}/{1}\"", ".", "format", "(", "root", ",", "currentFile", ")", ")", "nbFileDeleted", "+=", "1", "nbFileToDelete", "+=", "1", "args", "[", "'print'", "]", ".", "goodNews", "(", "\"Finish: {0}/{1} file(s) deleted\"", ".", "format", "(", "nbFileDeleted", ",", "nbFileToDelete", ")", ")", "logging", ".", "info", "(", "\"Cleaning is finished\"", ")" ]
https://github.com/quentinhardy/msdat/blob/879377410f9063c58b67f4624c082faa26169e5d/Cleaner.py#L4-L24
WerWolv/EdiZon_CheatsConfigsAndScripts
d16d36c7509c01dca770f402babd83ff2e9ae6e7
Scripts/lib/python3.5/optparse.py
python
OptionParser.disable_interspersed_args
(self)
Set parsing to stop on the first non-option. Use this if you have a command processor which runs another command that has options of its own and you want to make sure these options don't get confused.
Set parsing to stop on the first non-option. Use this if you have a command processor which runs another command that has options of its own and you want to make sure these options don't get confused.
[ "Set", "parsing", "to", "stop", "on", "the", "first", "non", "-", "option", ".", "Use", "this", "if", "you", "have", "a", "command", "processor", "which", "runs", "another", "command", "that", "has", "options", "of", "its", "own", "and", "you", "want", "to", "make", "sure", "these", "options", "don", "t", "get", "confused", "." ]
def disable_interspersed_args(self): """Set parsing to stop on the first non-option. Use this if you have a command processor which runs another command that has options of its own and you want to make sure these options don't get confused. """ self.allow_interspersed_args = False
[ "def", "disable_interspersed_args", "(", "self", ")", ":", "self", ".", "allow_interspersed_args", "=", "False" ]
https://github.com/WerWolv/EdiZon_CheatsConfigsAndScripts/blob/d16d36c7509c01dca770f402babd83ff2e9ae6e7/Scripts/lib/python3.5/optparse.py#L1282-L1288
wwqgtxx/wwqLyParse
33136508e52821babd9294fdecffbdf02d73a6fc
wwqLyParse/lib/fallback_lib_py352/asyncio/tasks.py
python
__sleep0
()
Skip one event loop run cycle. This is a private helper for 'asyncio.sleep()', used when the 'delay' is set to 0. It uses a bare 'yield' expression (which Task.__step knows how to handle) instead of creating a Future object.
Skip one event loop run cycle.
[ "Skip", "one", "event", "loop", "run", "cycle", "." ]
def __sleep0(): """Skip one event loop run cycle. This is a private helper for 'asyncio.sleep()', used when the 'delay' is set to 0. It uses a bare 'yield' expression (which Task.__step knows how to handle) instead of creating a Future object. """ yield
[ "def", "__sleep0", "(", ")", ":", "yield" ]
https://github.com/wwqgtxx/wwqLyParse/blob/33136508e52821babd9294fdecffbdf02d73a6fc/wwqLyParse/lib/fallback_lib_py352/asyncio/tasks.py#L540-L548
tomplus/kubernetes_asyncio
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
kubernetes_asyncio/client/api/apiregistration_v1_api.py
python
ApiregistrationV1Api.delete_collection_api_service
(self, **kwargs)
return self.delete_collection_api_service_with_http_info(**kwargs)
delete_collection_api_service # noqa: E501 delete collection of APIService # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_api_service(async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :param str resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param str resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param V1DeleteOptions body: :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: V1Status If the method is called asynchronously, returns the request thread.
delete_collection_api_service # noqa: E501
[ "delete_collection_api_service", "#", "noqa", ":", "E501" ]
def delete_collection_api_service(self, **kwargs): # noqa: E501 """delete_collection_api_service # noqa: E501 delete collection of APIService # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_api_service(async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :param str resource_version: resourceVersion sets a constraint on what resource versions a request may be served from. See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param str resource_version_match: resourceVersionMatch determines how resourceVersion is applied to list calls. It is highly recommended that resourceVersionMatch be set for list calls where resourceVersion is set See https://kubernetes.io/docs/reference/using-api/api-concepts/#resource-versions for details. Defaults to unset :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param V1DeleteOptions body: :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.delete_collection_api_service_with_http_info(**kwargs)
[ "def", "delete_collection_api_service", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "return", "self", ".", "delete_collection_api_service_with_http_info", "(", "*", "*", "kwargs", ")" ]
https://github.com/tomplus/kubernetes_asyncio/blob/f028cc793e3a2c519be6a52a49fb77ff0b014c9b/kubernetes_asyncio/client/api/apiregistration_v1_api.py#L312-L347
AstroPrint/AstroBox
e7e3b8a7d33ea85fcb6b2696869c0d719ceb8b75
src/astroprint/printer/marlin/__init__.py
python
PrinterMarlin.isConnected
(self)
return (bool) (self._comm and self._comm.isOperational())
[]
def isConnected(self): return (bool) (self._comm and self._comm.isOperational())
[ "def", "isConnected", "(", "self", ")", ":", "return", "(", "bool", ")", "(", "self", ".", "_comm", "and", "self", ".", "_comm", ".", "isOperational", "(", ")", ")" ]
https://github.com/AstroPrint/AstroBox/blob/e7e3b8a7d33ea85fcb6b2696869c0d719ceb8b75/src/astroprint/printer/marlin/__init__.py#L657-L658
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/databases/cremona.py
python
LargeCremonaDatabase.degphi
(self, N)
return ret
Return the degphi table for conductor N. INPUT: - ``N`` - int, the conductor OUTPUT: - ``dict`` - id:degphi, ... EXAMPLES:: sage: c = CremonaDatabase() sage: c.degphi(11) # optional - database_cremona_ellcurve {'a1': 1} sage: c.degphi(12001)['c1'] # optional - database_cremona_ellcurve 1640
Return the degphi table for conductor N.
[ "Return", "the", "degphi", "table", "for", "conductor", "N", "." ]
def degphi(self, N): """ Return the degphi table for conductor N. INPUT: - ``N`` - int, the conductor OUTPUT: - ``dict`` - id:degphi, ... EXAMPLES:: sage: c = CremonaDatabase() sage: c.degphi(11) # optional - database_cremona_ellcurve {'a1': 1} sage: c.degphi(12001)['c1'] # optional - database_cremona_ellcurve 1640 """ ret = {} for c in self.__connection__.cursor().execute('SELECT curve,deg FROM' + ' t_curve,t_class USING(class) WHERE curve=class||1 AND ' + 'conductor=?', (int(N),)): N,iso,num = parse_cremona_label(c[0]) ret[iso+str(num)] = c[1] return ret
[ "def", "degphi", "(", "self", ",", "N", ")", ":", "ret", "=", "{", "}", "for", "c", "in", "self", ".", "__connection__", ".", "cursor", "(", ")", ".", "execute", "(", "'SELECT curve,deg FROM'", "+", "' t_curve,t_class USING(class) WHERE curve=class||1 AND '", "+", "'conductor=?'", ",", "(", "int", "(", "N", ")", ",", ")", ")", ":", "N", ",", "iso", ",", "num", "=", "parse_cremona_label", "(", "c", "[", "0", "]", ")", "ret", "[", "iso", "+", "str", "(", "num", ")", "]", "=", "c", "[", "1", "]", "return", "ret" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/databases/cremona.py#L1538-L1564
CvvT/dumpDex
92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1
python/idaapi.py
python
cdo_t.__init__
(self, *args)
__init__(self) -> cdo_t
__init__(self) -> cdo_t
[ "__init__", "(", "self", ")", "-", ">", "cdo_t" ]
def __init__(self, *args): """ __init__(self) -> cdo_t """ this = _idaapi.new_cdo_t(*args) try: self.this.append(this) except: self.this = this
[ "def", "__init__", "(", "self", ",", "*", "args", ")", ":", "this", "=", "_idaapi", ".", "new_cdo_t", "(", "*", "args", ")", "try", ":", "self", ".", "this", ".", "append", "(", "this", ")", "except", ":", "self", ".", "this", "=", "this" ]
https://github.com/CvvT/dumpDex/blob/92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1/python/idaapi.py#L38042-L38048
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/Python-2.7.9/Tools/iobench/iobench.py
python
with_open_mode
(mode)
return decorate
[]
def with_open_mode(mode): def decorate(f): f.file_open_mode = mode return f return decorate
[ "def", "with_open_mode", "(", "mode", ")", ":", "def", "decorate", "(", "f", ")", ":", "f", ".", "file_open_mode", "=", "mode", "return", "f", "return", "decorate" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/Python-2.7.9/Tools/iobench/iobench.py#L43-L47
ladybug-tools/butterfly
c8fc0bbe317bb41bfe5f28305782a82347b8c776
butterfly/vectormath.py
python
determinant
(v, w)
return v[0] * w[1] - v[1] * w[0]
determinant.
determinant.
[ "determinant", "." ]
def determinant(v, w): """determinant.""" return v[0] * w[1] - v[1] * w[0]
[ "def", "determinant", "(", "v", ",", "w", ")", ":", "return", "v", "[", "0", "]", "*", "w", "[", "1", "]", "-", "v", "[", "1", "]", "*", "w", "[", "0", "]" ]
https://github.com/ladybug-tools/butterfly/blob/c8fc0bbe317bb41bfe5f28305782a82347b8c776/butterfly/vectormath.py#L25-L27
openstack/python-keystoneclient
100253d52e0c62dffffddb6f046ad660a9bce1a9
keystoneclient/common/cms.py
python
set_subprocess
(_subprocess=None)
Set subprocess module to use. The subprocess could be eventlet.green.subprocess if using eventlet, or Python's subprocess otherwise.
Set subprocess module to use.
[ "Set", "subprocess", "module", "to", "use", "." ]
def set_subprocess(_subprocess=None): """Set subprocess module to use. The subprocess could be eventlet.green.subprocess if using eventlet, or Python's subprocess otherwise. """ global subprocess subprocess = _subprocess
[ "def", "set_subprocess", "(", "_subprocess", "=", "None", ")", ":", "global", "subprocess", "subprocess", "=", "_subprocess" ]
https://github.com/openstack/python-keystoneclient/blob/100253d52e0c62dffffddb6f046ad660a9bce1a9/keystoneclient/common/cms.py#L76-L83
SCSSoftware/BlenderTools
96f323d3bdf2d8cb8ed7f882dcdf036277a802dd
addon/io_scs_tools/internals/shaders/eut2/truckpaint/colormask.py
python
TruckpaintColormask.init
(node_tree)
Initialize node tree with links for this shader. :param node_tree: node tree on which this shader should be created :type node_tree: bpy.types.NodeTree
Initialize node tree with links for this shader.
[ "Initialize", "node", "tree", "with", "links", "for", "this", "shader", "." ]
def init(node_tree): """Initialize node tree with links for this shader. :param node_tree: node tree on which this shader should be created :type node_tree: bpy.types.NodeTree """ # init parent Truckpaint.init(node_tree) Truckpaint.init_colormask_or_airbrush(node_tree) blend_mix_n = node_tree.nodes[Truckpaint.BLEND_MIX_NODE] blend_mix_n.inputs['Fac'].default_value = 1.0 paint_diff_mult_n = node_tree.nodes[Truckpaint.PAINT_DIFFUSE_MULT_NODE] paint_diff_mult_n.inputs['Fac'].default_value = 1.0 paint_spec_mult_n = node_tree.nodes[Truckpaint.PAINT_SPECULAR_MULT_NODE] paint_spec_mult_n.inputs['Fac'].default_value = 1.0
[ "def", "init", "(", "node_tree", ")", ":", "# init parent", "Truckpaint", ".", "init", "(", "node_tree", ")", "Truckpaint", ".", "init_colormask_or_airbrush", "(", "node_tree", ")", "blend_mix_n", "=", "node_tree", ".", "nodes", "[", "Truckpaint", ".", "BLEND_MIX_NODE", "]", "blend_mix_n", ".", "inputs", "[", "'Fac'", "]", ".", "default_value", "=", "1.0", "paint_diff_mult_n", "=", "node_tree", ".", "nodes", "[", "Truckpaint", ".", "PAINT_DIFFUSE_MULT_NODE", "]", "paint_diff_mult_n", ".", "inputs", "[", "'Fac'", "]", ".", "default_value", "=", "1.0", "paint_spec_mult_n", "=", "node_tree", ".", "nodes", "[", "Truckpaint", ".", "PAINT_SPECULAR_MULT_NODE", "]", "paint_spec_mult_n", ".", "inputs", "[", "'Fac'", "]", ".", "default_value", "=", "1.0" ]
https://github.com/SCSSoftware/BlenderTools/blob/96f323d3bdf2d8cb8ed7f882dcdf036277a802dd/addon/io_scs_tools/internals/shaders/eut2/truckpaint/colormask.py#L31-L49
deanishe/alfred-fakeum
12a7e64d9c099c0f11416ee99fae064d6360aab2
src/libs/dateutil/parser/_parser.py
python
_timelex.isword
(cls, nextchar)
return nextchar.isalpha()
Whether or not the next character is part of a word
Whether or not the next character is part of a word
[ "Whether", "or", "not", "the", "next", "character", "is", "part", "of", "a", "word" ]
def isword(cls, nextchar): """ Whether or not the next character is part of a word """ return nextchar.isalpha()
[ "def", "isword", "(", "cls", ",", "nextchar", ")", ":", "return", "nextchar", ".", "isalpha", "(", ")" ]
https://github.com/deanishe/alfred-fakeum/blob/12a7e64d9c099c0f11416ee99fae064d6360aab2/src/libs/dateutil/parser/_parser.py#L210-L212
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/google/appengine/ext/mapreduce/api/map_job/map_job_config.py
python
JobConfig._get_default_mr_params
(cls)
return mr_params
Gets default values for old API.
Gets default values for old API.
[ "Gets", "default", "values", "for", "old", "API", "." ]
def _get_default_mr_params(cls): """Gets default values for old API.""" cfg = cls(_lenient=True) mr_params = cfg._get_mr_params() mr_params["api_version"] = 0 return mr_params
[ "def", "_get_default_mr_params", "(", "cls", ")", ":", "cfg", "=", "cls", "(", "_lenient", "=", "True", ")", "mr_params", "=", "cfg", ".", "_get_mr_params", "(", ")", "mr_params", "[", "\"api_version\"", "]", "=", "0", "return", "mr_params" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/google/appengine/ext/mapreduce/api/map_job/map_job_config.py#L156-L161
hacktoolkit/django-htk
902f3780630f1308aa97a70b9b62a5682239ff2d
apps/accounts/models.py
python
BaseAbstractUserProfile.get_nonprimary_emails
(self)
return user_emails
Returns a list of UserEmail objects associated with `self.user`, besides the primary email We can just get primary email from self.get_primary_email()
Returns a list of UserEmail objects associated with `self.user`, besides the primary email We can just get primary email from self.get_primary_email()
[ "Returns", "a", "list", "of", "UserEmail", "objects", "associated", "with", "self", ".", "user", "besides", "the", "primary", "email", "We", "can", "just", "get", "primary", "email", "from", "self", ".", "get_primary_email", "()" ]
def get_nonprimary_emails(self): """Returns a list of UserEmail objects associated with `self.user`, besides the primary email We can just get primary email from self.get_primary_email() """ # TODO: cache this primary_email = self.get_primary_email() if primary_email: user_emails = self.user.emails.exclude(email=primary_email).order_by('-is_confirmed', 'id') else: user_emails = self.user.emails.order_by('-is_confirmed', 'id') return user_emails
[ "def", "get_nonprimary_emails", "(", "self", ")", ":", "# TODO: cache this", "primary_email", "=", "self", ".", "get_primary_email", "(", ")", "if", "primary_email", ":", "user_emails", "=", "self", ".", "user", ".", "emails", ".", "exclude", "(", "email", "=", "primary_email", ")", ".", "order_by", "(", "'-is_confirmed'", ",", "'id'", ")", "else", ":", "user_emails", "=", "self", ".", "user", ".", "emails", ".", "order_by", "(", "'-is_confirmed'", ",", "'id'", ")", "return", "user_emails" ]
https://github.com/hacktoolkit/django-htk/blob/902f3780630f1308aa97a70b9b62a5682239ff2d/apps/accounts/models.py#L216-L226
schodet/nxt-python
b434303f098d13677bceb664810f03e8c5057c82
nxt/brick.py
python
Brick.reset_motor_position
(self, port, relative)
Reset block or program motor position for a brick output port. :param nxt.motor.Port port: Output port identifier. :param bool relative: If ``True``, reset block position, if ``False``, reset program position. .. warning:: This is a low level function, prefer to use :meth:`nxt.motor.Motor`, you can get one from :meth:`get_motor`.
Reset block or program motor position for a brick output port.
[ "Reset", "block", "or", "program", "motor", "position", "for", "a", "brick", "output", "port", "." ]
def reset_motor_position(self, port, relative): """Reset block or program motor position for a brick output port. :param nxt.motor.Port port: Output port identifier. :param bool relative: If ``True``, reset block position, if ``False``, reset program position. .. warning:: This is a low level function, prefer to use :meth:`nxt.motor.Motor`, you can get one from :meth:`get_motor`. """ tgram = Telegram(Opcode.DIRECT_RESET_POSITION) tgram.add_u8(port.value) tgram.add_bool(relative) self._cmd(tgram)
[ "def", "reset_motor_position", "(", "self", ",", "port", ",", "relative", ")", ":", "tgram", "=", "Telegram", "(", "Opcode", ".", "DIRECT_RESET_POSITION", ")", "tgram", ".", "add_u8", "(", "port", ".", "value", ")", "tgram", ".", "add_bool", "(", "relative", ")", "self", ".", "_cmd", "(", "tgram", ")" ]
https://github.com/schodet/nxt-python/blob/b434303f098d13677bceb664810f03e8c5057c82/nxt/brick.py#L552-L565
biolab/orange3
41685e1c7b1d1babe680113685a2d44bcc9fec0b
Orange/base.py
python
SklLearner._initialize_wrapped
(self)
return self.__wraps__(**self.params)
[]
def _initialize_wrapped(self): # pylint: disable=not-callable return self.__wraps__(**self.params)
[ "def", "_initialize_wrapped", "(", "self", ")", ":", "# pylint: disable=not-callable", "return", "self", ".", "__wraps__", "(", "*", "*", "self", ".", "params", ")" ]
https://github.com/biolab/orange3/blob/41685e1c7b1d1babe680113685a2d44bcc9fec0b/Orange/base.py#L577-L579
compas-dev/compas
0b33f8786481f710115fb1ae5fe79abc2a9a5175
src/compas_rhino/utilities/drawing.py
python
wrap_drawfunc
(f)
return wrapper
Wraps all ``draw_`` functions with support for recurring keyword arguments.
Wraps all ``draw_`` functions with support for recurring keyword arguments.
[ "Wraps", "all", "draw_", "functions", "with", "support", "for", "recurring", "keyword", "arguments", "." ]
def wrap_drawfunc(f): """Wraps all ``draw_`` functions with support for recurring keyword arguments.""" @wraps(f) def wrapper(*args, **kwargs): layer = kwargs.get('layer', None) clear = kwargs.get('clear', False) redraw = kwargs.get('redraw', False) if layer: if not rs.IsLayer(layer): create_layers_from_path(layer) previous = rs.CurrentLayer(layer) if clear: if not layer: clear_current_layer() else: clear_layer(layer) rs.EnableRedraw(False) res = f(*args, **kwargs) if redraw: rs.EnableRedraw(True) if layer: rs.CurrentLayer(previous) return res return wrapper
[ "def", "wrap_drawfunc", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "layer", "=", "kwargs", ".", "get", "(", "'layer'", ",", "None", ")", "clear", "=", "kwargs", ".", "get", "(", "'clear'", ",", "False", ")", "redraw", "=", "kwargs", ".", "get", "(", "'redraw'", ",", "False", ")", "if", "layer", ":", "if", "not", "rs", ".", "IsLayer", "(", "layer", ")", ":", "create_layers_from_path", "(", "layer", ")", "previous", "=", "rs", ".", "CurrentLayer", "(", "layer", ")", "if", "clear", ":", "if", "not", "layer", ":", "clear_current_layer", "(", ")", "else", ":", "clear_layer", "(", "layer", ")", "rs", ".", "EnableRedraw", "(", "False", ")", "res", "=", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "redraw", ":", "rs", ".", "EnableRedraw", "(", "True", ")", "if", "layer", ":", "rs", ".", "CurrentLayer", "(", "previous", ")", "return", "res", "return", "wrapper" ]
https://github.com/compas-dev/compas/blob/0b33f8786481f710115fb1ae5fe79abc2a9a5175/src/compas_rhino/utilities/drawing.py#L86-L109
ClusterHQ/flocker
eaa586248986d7cd681c99c948546c2b507e44de
flocker/provision/_ssh/_model.py
python
run_remotely
( username, address, commands, port=22, log_command_filter=identity)
return Effect(RunRemotely( username=username, address=address, commands=commands, port=port, log_command_filter=log_command_filter))
Run some commands on a remote host. :param bytes address: The address of the remote host to connect to. :param bytes username: The user to connect as. :param Effect commands: The commands to run. :param int port: The port of the ssh server to connect to. :param callable log_command_filter: A filter to apply to any logging of the executed command. :return Effect:
Run some commands on a remote host.
[ "Run", "some", "commands", "on", "a", "remote", "host", "." ]
def run_remotely( username, address, commands, port=22, log_command_filter=identity): """ Run some commands on a remote host. :param bytes address: The address of the remote host to connect to. :param bytes username: The user to connect as. :param Effect commands: The commands to run. :param int port: The port of the ssh server to connect to. :param callable log_command_filter: A filter to apply to any logging of the executed command. :return Effect: """ return Effect(RunRemotely( username=username, address=address, commands=commands, port=port, log_command_filter=log_command_filter))
[ "def", "run_remotely", "(", "username", ",", "address", ",", "commands", ",", "port", "=", "22", ",", "log_command_filter", "=", "identity", ")", ":", "return", "Effect", "(", "RunRemotely", "(", "username", "=", "username", ",", "address", "=", "address", ",", "commands", "=", "commands", ",", "port", "=", "port", ",", "log_command_filter", "=", "log_command_filter", ")", ")" ]
https://github.com/ClusterHQ/flocker/blob/eaa586248986d7cd681c99c948546c2b507e44de/flocker/provision/_ssh/_model.py#L37-L53
gwpy/gwpy
82becd78d166a32985cb657a54d0d39f6a207739
gwpy/plot/plot.py
python
Plot.get_axes
(self, projection=None)
return [ax for ax in self.axes if ax.name == projection.lower()]
Find all `Axes`, optionally matching the given projection Parameters ---------- projection : `str` name of axes types to return Returns ------- axlist : `list` of `~matplotlib.axes.Axes`
Find all `Axes`, optionally matching the given projection
[ "Find", "all", "Axes", "optionally", "matching", "the", "given", "projection" ]
def get_axes(self, projection=None): """Find all `Axes`, optionally matching the given projection Parameters ---------- projection : `str` name of axes types to return Returns ------- axlist : `list` of `~matplotlib.axes.Axes` """ if projection is None: return self.axes return [ax for ax in self.axes if ax.name == projection.lower()]
[ "def", "get_axes", "(", "self", ",", "projection", "=", "None", ")", ":", "if", "projection", "is", "None", ":", "return", "self", ".", "axes", "return", "[", "ax", "for", "ax", "in", "self", ".", "axes", "if", "ax", ".", "name", "==", "projection", ".", "lower", "(", ")", "]" ]
https://github.com/gwpy/gwpy/blob/82becd78d166a32985cb657a54d0d39f6a207739/gwpy/plot/plot.py#L313-L327
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/ex-submodules/casexml/apps/case/models.py
python
CommCareCase.get_version_token
(self)
return "%s::%s" % (self.case_id, self.modified_on)
A unique token for this version.
A unique token for this version.
[ "A", "unique", "token", "for", "this", "version", "." ]
def get_version_token(self): """ A unique token for this version. """ # in theory since case ids are unique and modification dates get updated # upon any change, this is all we need return "%s::%s" % (self.case_id, self.modified_on)
[ "def", "get_version_token", "(", "self", ")", ":", "# in theory since case ids are unique and modification dates get updated", "# upon any change, this is all we need", "return", "\"%s::%s\"", "%", "(", "self", ".", "case_id", ",", "self", ".", "modified_on", ")" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/ex-submodules/casexml/apps/case/models.py#L353-L359
tomplus/kubernetes_asyncio
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
kubernetes_asyncio/client/models/core_v1_event_list.py
python
CoreV1EventList.__init__
(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None)
CoreV1EventList - a model defined in OpenAPI
CoreV1EventList - a model defined in OpenAPI
[ "CoreV1EventList", "-", "a", "model", "defined", "in", "OpenAPI" ]
def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501 """CoreV1EventList - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._api_version = None self._items = None self._kind = None self._metadata = None self.discriminator = None if api_version is not None: self.api_version = api_version self.items = items if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata
[ "def", "__init__", "(", "self", ",", "api_version", "=", "None", ",", "items", "=", "None", ",", "kind", "=", "None", ",", "metadata", "=", "None", ",", "local_vars_configuration", "=", "None", ")", ":", "# noqa: E501", "# noqa: E501", "if", "local_vars_configuration", "is", "None", ":", "local_vars_configuration", "=", "Configuration", "(", ")", "self", ".", "local_vars_configuration", "=", "local_vars_configuration", "self", ".", "_api_version", "=", "None", "self", ".", "_items", "=", "None", "self", ".", "_kind", "=", "None", "self", ".", "_metadata", "=", "None", "self", ".", "discriminator", "=", "None", "if", "api_version", "is", "not", "None", ":", "self", ".", "api_version", "=", "api_version", "self", ".", "items", "=", "items", "if", "kind", "is", "not", "None", ":", "self", ".", "kind", "=", "kind", "if", "metadata", "is", "not", "None", ":", "self", ".", "metadata", "=", "metadata" ]
https://github.com/tomplus/kubernetes_asyncio/blob/f028cc793e3a2c519be6a52a49fb77ff0b014c9b/kubernetes_asyncio/client/models/core_v1_event_list.py#L49-L67
pdfminer/pdfminer.six
10f6fb40c258c86fd04d86bade20f69fb07faabd
pdfminer/utils.py
python
drange
(v0: float, v1: float, d: int)
return range(int(v0) // d, int(v1 + d) // d)
Returns a discrete range.
Returns a discrete range.
[ "Returns", "a", "discrete", "range", "." ]
def drange(v0: float, v1: float, d: int) -> range: """Returns a discrete range.""" return range(int(v0) // d, int(v1 + d) // d)
[ "def", "drange", "(", "v0", ":", "float", ",", "v1", ":", "float", ",", "d", ":", "int", ")", "->", "range", ":", "return", "range", "(", "int", "(", "v0", ")", "//", "d", ",", "int", "(", "v1", "+", "d", ")", "//", "d", ")" ]
https://github.com/pdfminer/pdfminer.six/blob/10f6fb40c258c86fd04d86bade20f69fb07faabd/pdfminer/utils.py#L300-L302
ilastik/ilastik
6acd2c554bc517e9c8ddad3623a7aaa2e6970c28
ilastik/applets/dataExport/dataExportGui.py
python
DataExportGui.exportAsync
(self, laneViewList)
Export data from lanes in a separate thread. See Also: :meth:`exportSync`.
Export data from lanes in a separate thread.
[ "Export", "data", "from", "lanes", "in", "a", "separate", "thread", "." ]
def exportAsync(self, laneViewList) -> None: """Export data from lanes in a separate thread. See Also: :meth:`exportSync`. """ threading.Thread(target=self.exportSync, name="DataExportThread", args=(laneViewList,)).start()
[ "def", "exportAsync", "(", "self", ",", "laneViewList", ")", "->", "None", ":", "threading", ".", "Thread", "(", "target", "=", "self", ".", "exportSync", ",", "name", "=", "\"DataExportThread\"", ",", "args", "=", "(", "laneViewList", ",", ")", ")", ".", "start", "(", ")" ]
https://github.com/ilastik/ilastik/blob/6acd2c554bc517e9c8ddad3623a7aaa2e6970c28/ilastik/applets/dataExport/dataExportGui.py#L460-L466
christabor/flask_jsondash
c8984790722327e86694e15409ff4dea0621aab0
example_app/endpoints.py
python
flamegraph
()
return jsonify({})
Fake endpoint.
Fake endpoint.
[ "Fake", "endpoint", "." ]
def flamegraph(): """Fake endpoint.""" chart_name = request.args.get('name', 'stacks') filename = '{}/examples/flamegraph/{}.json'.format(cwd, chart_name) try: with open(filename, 'r') as chartjson: return chartjson.read() except IOError: pass return jsonify({})
[ "def", "flamegraph", "(", ")", ":", "chart_name", "=", "request", ".", "args", ".", "get", "(", "'name'", ",", "'stacks'", ")", "filename", "=", "'{}/examples/flamegraph/{}.json'", ".", "format", "(", "cwd", ",", "chart_name", ")", "try", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "chartjson", ":", "return", "chartjson", ".", "read", "(", ")", "except", "IOError", ":", "pass", "return", "jsonify", "(", "{", "}", ")" ]
https://github.com/christabor/flask_jsondash/blob/c8984790722327e86694e15409ff4dea0621aab0/example_app/endpoints.py#L284-L293
jesse-ai/jesse
28759547138fbc76dff12741204833e39c93b083
jesse/indicators/marketfi.py
python
marketfi
(candles: np.ndarray, sequential: bool = False)
return same_length(candles, res) if sequential else res[-1]
MARKETFI - Market Facilitation Index :param candles: np.ndarray :param sequential: bool - default: False :return: float | np.ndarray
MARKETFI - Market Facilitation Index
[ "MARKETFI", "-", "Market", "Facilitation", "Index" ]
def marketfi(candles: np.ndarray, sequential: bool = False) -> Union[float, np.ndarray]: """ MARKETFI - Market Facilitation Index :param candles: np.ndarray :param sequential: bool - default: False :return: float | np.ndarray """ candles = slice_candles(candles, sequential) res = ti.marketfi(np.ascontiguousarray(candles[:, 3]), np.ascontiguousarray(candles[:, 4]), np.ascontiguousarray(candles[:, 5])) return same_length(candles, res) if sequential else res[-1]
[ "def", "marketfi", "(", "candles", ":", "np", ".", "ndarray", ",", "sequential", ":", "bool", "=", "False", ")", "->", "Union", "[", "float", ",", "np", ".", "ndarray", "]", ":", "candles", "=", "slice_candles", "(", "candles", ",", "sequential", ")", "res", "=", "ti", ".", "marketfi", "(", "np", ".", "ascontiguousarray", "(", "candles", "[", ":", ",", "3", "]", ")", ",", "np", ".", "ascontiguousarray", "(", "candles", "[", ":", ",", "4", "]", ")", ",", "np", ".", "ascontiguousarray", "(", "candles", "[", ":", ",", "5", "]", ")", ")", "return", "same_length", "(", "candles", ",", "res", ")", "if", "sequential", "else", "res", "[", "-", "1", "]" ]
https://github.com/jesse-ai/jesse/blob/28759547138fbc76dff12741204833e39c93b083/jesse/indicators/marketfi.py#L10-L24
IDArlingTeam/IDArling
d15b9b7c8bdeb992c569efcc49adf7642bb82cdf
idarling/shared/packets.py
python
Event.build_event
(self, dct)
Build the event into a dictionary.
Build the event into a dictionary.
[ "Build", "the", "event", "into", "a", "dictionary", "." ]
def build_event(self, dct): """Build the event into a dictionary.""" pass
[ "def", "build_event", "(", "self", ",", "dct", ")", ":", "pass" ]
https://github.com/IDArlingTeam/IDArling/blob/d15b9b7c8bdeb992c569efcc49adf7642bb82cdf/idarling/shared/packets.py#L298-L300
TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials
5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e
tensorflow_dl_models/research/slim/nets/nasnet/nasnet.py
python
_cifar_stem
(inputs, hparams)
return net, [None, net]
Stem used for models trained on Cifar.
Stem used for models trained on Cifar.
[ "Stem", "used", "for", "models", "trained", "on", "Cifar", "." ]
def _cifar_stem(inputs, hparams): """Stem used for models trained on Cifar.""" num_stem_filters = int(hparams.num_conv_filters * hparams.stem_multiplier) net = slim.conv2d( inputs, num_stem_filters, 3, scope='l1_stem_3x3') net = slim.batch_norm(net, scope='l1_stem_bn') return net, [None, net]
[ "def", "_cifar_stem", "(", "inputs", ",", "hparams", ")", ":", "num_stem_filters", "=", "int", "(", "hparams", ".", "num_conv_filters", "*", "hparams", ".", "stem_multiplier", ")", "net", "=", "slim", ".", "conv2d", "(", "inputs", ",", "num_stem_filters", ",", "3", ",", "scope", "=", "'l1_stem_3x3'", ")", "net", "=", "slim", ".", "batch_norm", "(", "net", ",", "scope", "=", "'l1_stem_bn'", ")", "return", "net", ",", "[", "None", ",", "net", "]" ]
https://github.com/TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials/blob/5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e/tensorflow_dl_models/research/slim/nets/nasnet/nasnet.py#L270-L279
tendenci/tendenci
0f2c348cc0e7d41bc56f50b00ce05544b083bf1d
tendenci/apps/help_files/models.py
python
Topic.get_absolute_url
(self)
return reverse('help_files.topic', args=[self.pk])
[]
def get_absolute_url(self): return reverse('help_files.topic', args=[self.pk])
[ "def", "get_absolute_url", "(", "self", ")", ":", "return", "reverse", "(", "'help_files.topic'", ",", "args", "=", "[", "self", ".", "pk", "]", ")" ]
https://github.com/tendenci/tendenci/blob/0f2c348cc0e7d41bc56f50b00ce05544b083bf1d/tendenci/apps/help_files/models.py#L23-L24
largelymfs/topical_word_embeddings
1ae3d15d0afcd3fcd39cc81eec4ad9463413a9f6
TWE-2/gensim/parsing/porter.py
python
PorterStemmer._step1ab
(self)
Get rid of plurals and -ed or -ing. E.g., caresses -> caress ponies -> poni ties -> ti caress -> caress cats -> cat feed -> feed agreed -> agree disabled -> disable matting -> mat mating -> mate meeting -> meet milling -> mill messing -> mess meetings -> meet
Get rid of plurals and -ed or -ing. E.g.,
[ "Get", "rid", "of", "plurals", "and", "-", "ed", "or", "-", "ing", ".", "E", ".", "g", "." ]
def _step1ab(self): """Get rid of plurals and -ed or -ing. E.g., caresses -> caress ponies -> poni ties -> ti caress -> caress cats -> cat feed -> feed agreed -> agree disabled -> disable matting -> mat mating -> mate meeting -> meet milling -> mill messing -> mess meetings -> meet """ if self.b[self.k] == 's': if self._ends("sses"): self.k -= 2 elif self._ends("ies"): self._setto("i") elif self.b[self.k - 1] != 's': self.k -= 1 if self._ends("eed"): if self._m() > 0: self.k -= 1 elif (self._ends("ed") or self._ends("ing")) and self._vowelinstem(): self.k = self.j if self._ends("at"): self._setto("ate") elif self._ends("bl"): self._setto("ble") elif self._ends("iz"): self._setto("ize") elif self._doublec(self.k): if self.b[self.k - 1] not in "lsz": self.k -= 1 elif self._m() == 1 and self._cvc(self.k): self._setto("e")
[ "def", "_step1ab", "(", "self", ")", ":", "if", "self", ".", "b", "[", "self", ".", "k", "]", "==", "'s'", ":", "if", "self", ".", "_ends", "(", "\"sses\"", ")", ":", "self", ".", "k", "-=", "2", "elif", "self", ".", "_ends", "(", "\"ies\"", ")", ":", "self", ".", "_setto", "(", "\"i\"", ")", "elif", "self", ".", "b", "[", "self", ".", "k", "-", "1", "]", "!=", "'s'", ":", "self", ".", "k", "-=", "1", "if", "self", ".", "_ends", "(", "\"eed\"", ")", ":", "if", "self", ".", "_m", "(", ")", ">", "0", ":", "self", ".", "k", "-=", "1", "elif", "(", "self", ".", "_ends", "(", "\"ed\"", ")", "or", "self", ".", "_ends", "(", "\"ing\"", ")", ")", "and", "self", ".", "_vowelinstem", "(", ")", ":", "self", ".", "k", "=", "self", ".", "j", "if", "self", ".", "_ends", "(", "\"at\"", ")", ":", "self", ".", "_setto", "(", "\"ate\"", ")", "elif", "self", ".", "_ends", "(", "\"bl\"", ")", ":", "self", ".", "_setto", "(", "\"ble\"", ")", "elif", "self", ".", "_ends", "(", "\"iz\"", ")", ":", "self", ".", "_setto", "(", "\"ize\"", ")", "elif", "self", ".", "_doublec", "(", "self", ".", "k", ")", ":", "if", "self", ".", "b", "[", "self", ".", "k", "-", "1", "]", "not", "in", "\"lsz\"", ":", "self", ".", "k", "-=", "1", "elif", "self", ".", "_m", "(", ")", "==", "1", "and", "self", ".", "_cvc", "(", "self", ".", "k", ")", ":", "self", ".", "_setto", "(", "\"e\"", ")" ]
https://github.com/largelymfs/topical_word_embeddings/blob/1ae3d15d0afcd3fcd39cc81eec4ad9463413a9f6/TWE-2/gensim/parsing/porter.py#L141-L181
benoitc/couchdbkit
6be148640c00b54ee87a2f2d502e9d67fa5b45a8
couchdbkit/schema/properties.py
python
dict_to_json
(value, item_type=None)
return dict([(k, value_to_json(v, item_type=item_type)) for k, v in value.iteritems()])
convert a dict to json
convert a dict to json
[ "convert", "a", "dict", "to", "json" ]
def dict_to_json(value, item_type=None): """ convert a dict to json """ return dict([(k, value_to_json(v, item_type=item_type)) for k, v in value.iteritems()])
[ "def", "dict_to_json", "(", "value", ",", "item_type", "=", "None", ")", ":", "return", "dict", "(", "[", "(", "k", ",", "value_to_json", "(", "v", ",", "item_type", "=", "item_type", ")", ")", "for", "k", ",", "v", "in", "value", ".", "iteritems", "(", ")", "]", ")" ]
https://github.com/benoitc/couchdbkit/blob/6be148640c00b54ee87a2f2d502e9d67fa5b45a8/couchdbkit/schema/properties.py#L1011-L1013
NanYoMy/DHT-woodworm
e28bbff214bc3c41ea462854256dd499fb8a6eb0
btdht/btdht.py
python
DHTRequestHandler.handle_response
(self, message)
here we do not annouce a peer, so we needn't implement the code for a "announce peer" message
here we do not annouce a peer, so we needn't implement the code for a "announce peer" message
[ "here", "we", "do", "not", "annouce", "a", "peer", "so", "we", "needn", "t", "implement", "the", "code", "for", "a", "announce", "peer", "message" ]
def handle_response(self, message): trans_id = message["t"] args = message["r"] node_id = args["id"] client_host, client_port = self.client_address logger.debug("Response message from %s:%d, t:%r, id:%r" % (client_host, client_port, trans_id.encode("hex"), node_id.encode("hex"))) # Do we already know about this node? node = self.server.dht.rt.node_by_id(node_id) if not node: logger.debug("Cannot find appropriate node during simple search: %r" % (node_id.encode("hex"))) #Trying to search via transaction id #get the node who sent the request, that correspondents to the response node = self.server.dht.rt.node_by_trans(trans_id) if not node: logger.debug("Cannot find appropriate node for transaction: %r" % (trans_id.encode("hex"))) return logger.debug("We found apropriate node %r for %r" % (node, node_id.encode("hex"))) if trans_id in node.trans: logger.debug("Found and deleting transaction %r in node: %r" % (trans_id.encode("hex"), node)) #由于长时间没有响应的node会被自动删除,这里关系到多线程并发。所以可能会有bug #the server thread competes "node" resource with the iterative_thread try: trans = node.trans[trans_id] node.delete_trans(trans_id) except: logger.debug('delete trans on a deleted node') return else: logger.debug("Cannot find transaction %r in node: %r" % (trans_id.encode("hex"), node)) return if "ip" in args: logger.debug("They try to SECURE me: %s", unpack_host(args["ip"].encode('hex'))) #the server thread competes "node" resource with the iterative_thread try: t_name = trans["name"] except: logger.debug('get name on a deleted trans') return if t_name == "find_node": node.update_access() logger.debug("find_node response from %r" % (node)) if "nodes" in args: new_nodes = decode_nodes(args["nodes"]) logger.debug("We got new nodes from %r" % (node)) for new_node_id, new_node_host, new_node_port in new_nodes: logger.debug("Adding %r %s:%d as new node" % (new_node_id.encode("hex"), new_node_host, new_node_port)) self.server.dht.rt.update_node(new_node_id, Node(new_node_host, new_node_port, new_node_id)) self.server.dht.rt.update_node(new_node_id, Node(new_node_host, new_node_port, new_node_id)) # cleanup boot node if node._id == "boot": logger.debug("This is response from \"boot\" node, replacing it") # Create new node instance and move transactions from boot node to newly node new_boot_node = Node(client_host, client_port, node_id) new_boot_node.trans = node.trans self.server.dht.rt.update_node(node_id, new_boot_node) # Remove old boot node self.server.dht.rt.remove_node(node._id) elif t_name == "ping": #update the node if it can pong us!!! node.update_access() logger.debug("ping response for: %r" % (node)) elif t_name == "get_peers": node.update_access() info_hash = trans["info_hash"] logger.debug("get_peers response for %r" % (node)) if "token" in args: token = args["token"] logger.debug("Got token: %s" % (token.encode("hex"))) else: token = None #logger.error(self.server.dht.name+" No token in get_peers response from %r" % (node)) if "values" in args: logger.debug("We got new peers for %s" % (info_hash.encode("hex"))) values = args["values"] for addr in values: hp = unpack_hostport(addr) self.server.dht.ht.add_peer(info_hash, hp) logger.debug("Got new peer for %s: %r" % (info_hash.encode("hex"), hp)) if "nodes" in args: logger.debug("We got new nodes from %r" % (node)) new_nodes = decode_nodes(args["nodes"]) for new_node_id, new_node_host, new_node_port in new_nodes: logger.debug("Adding %r %s:%d as new node" % (new_node_id.encode("hex"), new_node_host, new_node_port)) self.server.dht.rt.update_node(new_node_id, Node(new_node_host, new_node_port, new_node_id)) ''' here we do not annouce a peer, so we needn't implement the code for a "announce peer" message '''
[ "def", "handle_response", "(", "self", ",", "message", ")", ":", "trans_id", "=", "message", "[", "\"t\"", "]", "args", "=", "message", "[", "\"r\"", "]", "node_id", "=", "args", "[", "\"id\"", "]", "client_host", ",", "client_port", "=", "self", ".", "client_address", "logger", ".", "debug", "(", "\"Response message from %s:%d, t:%r, id:%r\"", "%", "(", "client_host", ",", "client_port", ",", "trans_id", ".", "encode", "(", "\"hex\"", ")", ",", "node_id", ".", "encode", "(", "\"hex\"", ")", ")", ")", "# Do we already know about this node?", "node", "=", "self", ".", "server", ".", "dht", ".", "rt", ".", "node_by_id", "(", "node_id", ")", "if", "not", "node", ":", "logger", ".", "debug", "(", "\"Cannot find appropriate node during simple search: %r\"", "%", "(", "node_id", ".", "encode", "(", "\"hex\"", ")", ")", ")", "#Trying to search via transaction id", "#get the node who sent the request, that correspondents to the response", "node", "=", "self", ".", "server", ".", "dht", ".", "rt", ".", "node_by_trans", "(", "trans_id", ")", "if", "not", "node", ":", "logger", ".", "debug", "(", "\"Cannot find appropriate node for transaction: %r\"", "%", "(", "trans_id", ".", "encode", "(", "\"hex\"", ")", ")", ")", "return", "logger", ".", "debug", "(", "\"We found apropriate node %r for %r\"", "%", "(", "node", ",", "node_id", ".", "encode", "(", "\"hex\"", ")", ")", ")", "if", "trans_id", "in", "node", ".", "trans", ":", "logger", ".", "debug", "(", "\"Found and deleting transaction %r in node: %r\"", "%", "(", "trans_id", ".", "encode", "(", "\"hex\"", ")", ",", "node", ")", ")", "#由于长时间没有响应的node会被自动删除,这里关系到多线程并发。所以可能会有bug", "#the server thread competes \"node\" resource with the iterative_thread", "try", ":", "trans", "=", "node", ".", "trans", "[", "trans_id", "]", "node", ".", "delete_trans", "(", "trans_id", ")", "except", ":", "logger", ".", "debug", "(", "'delete trans on a deleted node'", ")", "return", "else", ":", "logger", ".", "debug", "(", "\"Cannot find transaction %r in node: %r\"", "%", "(", "trans_id", ".", "encode", "(", "\"hex\"", ")", ",", "node", ")", ")", "return", "if", "\"ip\"", "in", "args", ":", "logger", ".", "debug", "(", "\"They try to SECURE me: %s\"", ",", "unpack_host", "(", "args", "[", "\"ip\"", "]", ".", "encode", "(", "'hex'", ")", ")", ")", "#the server thread competes \"node\" resource with the iterative_thread", "try", ":", "t_name", "=", "trans", "[", "\"name\"", "]", "except", ":", "logger", ".", "debug", "(", "'get name on a deleted trans'", ")", "return", "if", "t_name", "==", "\"find_node\"", ":", "node", ".", "update_access", "(", ")", "logger", ".", "debug", "(", "\"find_node response from %r\"", "%", "(", "node", ")", ")", "if", "\"nodes\"", "in", "args", ":", "new_nodes", "=", "decode_nodes", "(", "args", "[", "\"nodes\"", "]", ")", "logger", ".", "debug", "(", "\"We got new nodes from %r\"", "%", "(", "node", ")", ")", "for", "new_node_id", ",", "new_node_host", ",", "new_node_port", "in", "new_nodes", ":", "logger", ".", "debug", "(", "\"Adding %r %s:%d as new node\"", "%", "(", "new_node_id", ".", "encode", "(", "\"hex\"", ")", ",", "new_node_host", ",", "new_node_port", ")", ")", "self", ".", "server", ".", "dht", ".", "rt", ".", "update_node", "(", "new_node_id", ",", "Node", "(", "new_node_host", ",", "new_node_port", ",", "new_node_id", ")", ")", "self", ".", "server", ".", "dht", ".", "rt", ".", "update_node", "(", "new_node_id", ",", "Node", "(", "new_node_host", ",", "new_node_port", ",", "new_node_id", ")", ")", "# cleanup boot node", "if", "node", ".", "_id", "==", "\"boot\"", ":", "logger", ".", "debug", "(", "\"This is response from \\\"boot\\\" node, replacing it\"", ")", "# Create new node instance and move transactions from boot node to newly node", "new_boot_node", "=", "Node", "(", "client_host", ",", "client_port", ",", "node_id", ")", "new_boot_node", ".", "trans", "=", "node", ".", "trans", "self", ".", "server", ".", "dht", ".", "rt", ".", "update_node", "(", "node_id", ",", "new_boot_node", ")", "# Remove old boot node", "self", ".", "server", ".", "dht", ".", "rt", ".", "remove_node", "(", "node", ".", "_id", ")", "elif", "t_name", "==", "\"ping\"", ":", "#update the node if it can pong us!!!", "node", ".", "update_access", "(", ")", "logger", ".", "debug", "(", "\"ping response for: %r\"", "%", "(", "node", ")", ")", "elif", "t_name", "==", "\"get_peers\"", ":", "node", ".", "update_access", "(", ")", "info_hash", "=", "trans", "[", "\"info_hash\"", "]", "logger", ".", "debug", "(", "\"get_peers response for %r\"", "%", "(", "node", ")", ")", "if", "\"token\"", "in", "args", ":", "token", "=", "args", "[", "\"token\"", "]", "logger", ".", "debug", "(", "\"Got token: %s\"", "%", "(", "token", ".", "encode", "(", "\"hex\"", ")", ")", ")", "else", ":", "token", "=", "None", "#logger.error(self.server.dht.name+\" No token in get_peers response from %r\" % (node))", "if", "\"values\"", "in", "args", ":", "logger", ".", "debug", "(", "\"We got new peers for %s\"", "%", "(", "info_hash", ".", "encode", "(", "\"hex\"", ")", ")", ")", "values", "=", "args", "[", "\"values\"", "]", "for", "addr", "in", "values", ":", "hp", "=", "unpack_hostport", "(", "addr", ")", "self", ".", "server", ".", "dht", ".", "ht", ".", "add_peer", "(", "info_hash", ",", "hp", ")", "logger", ".", "debug", "(", "\"Got new peer for %s: %r\"", "%", "(", "info_hash", ".", "encode", "(", "\"hex\"", ")", ",", "hp", ")", ")", "if", "\"nodes\"", "in", "args", ":", "logger", ".", "debug", "(", "\"We got new nodes from %r\"", "%", "(", "node", ")", ")", "new_nodes", "=", "decode_nodes", "(", "args", "[", "\"nodes\"", "]", ")", "for", "new_node_id", ",", "new_node_host", ",", "new_node_port", "in", "new_nodes", ":", "logger", ".", "debug", "(", "\"Adding %r %s:%d as new node\"", "%", "(", "new_node_id", ".", "encode", "(", "\"hex\"", ")", ",", "new_node_host", ",", "new_node_port", ")", ")", "self", ".", "server", ".", "dht", ".", "rt", ".", "update_node", "(", "new_node_id", ",", "Node", "(", "new_node_host", ",", "new_node_port", ",", "new_node_id", ")", ")" ]
https://github.com/NanYoMy/DHT-woodworm/blob/e28bbff214bc3c41ea462854256dd499fb8a6eb0/btdht/btdht.py#L53-L149
SamSchott/maestral
a32653bac7b5a76cb326d4fd5a4fb2c11f19a2fc
src/maestral/sync.py
python
SyncEngine._create_remote_entry
(self, event: SyncEvent)
return event
Applies a local file system event to the remote Dropbox and clears any existing sync errors belonging to that path. Any :class:`maestral.errors.SyncError` will be caught and logged as appropriate. This method always uses a new copy of client and closes the network session afterwards. :param event: SyncEvent for local file event. :returns: SyncEvent with updated status.
Applies a local file system event to the remote Dropbox and clears any existing sync errors belonging to that path. Any :class:`maestral.errors.SyncError` will be caught and logged as appropriate.
[ "Applies", "a", "local", "file", "system", "event", "to", "the", "remote", "Dropbox", "and", "clears", "any", "existing", "sync", "errors", "belonging", "to", "that", "path", ".", "Any", ":", "class", ":", "maestral", ".", "errors", ".", "SyncError", "will", "be", "caught", "and", "logged", "as", "appropriate", "." ]
def _create_remote_entry(self, event: SyncEvent) -> SyncEvent: """ Applies a local file system event to the remote Dropbox and clears any existing sync errors belonging to that path. Any :class:`maestral.errors.SyncError` will be caught and logged as appropriate. This method always uses a new copy of client and closes the network session afterwards. :param event: SyncEvent for local file event. :returns: SyncEvent with updated status. """ if self._cancel_requested.is_set(): raise CancelledError("Sync cancelled") self._slow_down() self.clear_sync_error(event) event.status = SyncStatus.Syncing try: with self.client.clone_with_new_session() as client: if event.is_added: res = self._on_local_created(event, client) elif event.is_moved: res = self._on_local_moved(event, client) elif event.is_changed: res = self._on_local_modified(event, client) elif event.is_deleted: res = self._on_local_deleted(event, client) else: res = None if res is not None: event.status = SyncStatus.Done else: event.status = SyncStatus.Skipped except SyncError as err: self._handle_sync_error(err, direction=SyncDirection.Up) event.status = SyncStatus.Failed finally: self.syncing.pop(event.local_path, None) # Add events to history database. if event.status == SyncStatus.Done: with self._database_access(): self._db_manager_history.save(event) return event
[ "def", "_create_remote_entry", "(", "self", ",", "event", ":", "SyncEvent", ")", "->", "SyncEvent", ":", "if", "self", ".", "_cancel_requested", ".", "is_set", "(", ")", ":", "raise", "CancelledError", "(", "\"Sync cancelled\"", ")", "self", ".", "_slow_down", "(", ")", "self", ".", "clear_sync_error", "(", "event", ")", "event", ".", "status", "=", "SyncStatus", ".", "Syncing", "try", ":", "with", "self", ".", "client", ".", "clone_with_new_session", "(", ")", "as", "client", ":", "if", "event", ".", "is_added", ":", "res", "=", "self", ".", "_on_local_created", "(", "event", ",", "client", ")", "elif", "event", ".", "is_moved", ":", "res", "=", "self", ".", "_on_local_moved", "(", "event", ",", "client", ")", "elif", "event", ".", "is_changed", ":", "res", "=", "self", ".", "_on_local_modified", "(", "event", ",", "client", ")", "elif", "event", ".", "is_deleted", ":", "res", "=", "self", ".", "_on_local_deleted", "(", "event", ",", "client", ")", "else", ":", "res", "=", "None", "if", "res", "is", "not", "None", ":", "event", ".", "status", "=", "SyncStatus", ".", "Done", "else", ":", "event", ".", "status", "=", "SyncStatus", ".", "Skipped", "except", "SyncError", "as", "err", ":", "self", ".", "_handle_sync_error", "(", "err", ",", "direction", "=", "SyncDirection", ".", "Up", ")", "event", ".", "status", "=", "SyncStatus", ".", "Failed", "finally", ":", "self", ".", "syncing", ".", "pop", "(", "event", ".", "local_path", ",", "None", ")", "# Add events to history database.", "if", "event", ".", "status", "==", "SyncStatus", ".", "Done", ":", "with", "self", ".", "_database_access", "(", ")", ":", "self", ".", "_db_manager_history", ".", "save", "(", "event", ")", "return", "event" ]
https://github.com/SamSchott/maestral/blob/a32653bac7b5a76cb326d4fd5a4fb2c11f19a2fc/src/maestral/sync.py#L2202-L2253
tensorflow/neural-structured-learning
a43fcfca1f97ecc0ee99e688e5c8bf16c8fb6629
research/gam/gam/data/dataset.py
python
GCNDataset.sparse_to_tuple
(sparse_mx)
return sparse_mx
Convert sparse matrix to tuple representation.
Convert sparse matrix to tuple representation.
[ "Convert", "sparse", "matrix", "to", "tuple", "representation", "." ]
def sparse_to_tuple(sparse_mx): """Convert sparse matrix to tuple representation.""" def to_tuple(mx): if not scipy.sparse.isspmatrix_coo(mx): mx = scipy.sparse.coo_matrix(mx) coords = np.vstack((mx.row, mx.col)).transpose() values = mx.data shape = mx.shape return coords, values, shape if isinstance(sparse_mx, list): for i in range(len(sparse_mx)): sparse_mx[i] = to_tuple(sparse_mx[i]) else: sparse_mx = to_tuple(sparse_mx) return sparse_mx
[ "def", "sparse_to_tuple", "(", "sparse_mx", ")", ":", "def", "to_tuple", "(", "mx", ")", ":", "if", "not", "scipy", ".", "sparse", ".", "isspmatrix_coo", "(", "mx", ")", ":", "mx", "=", "scipy", ".", "sparse", ".", "coo_matrix", "(", "mx", ")", "coords", "=", "np", ".", "vstack", "(", "(", "mx", ".", "row", ",", "mx", ".", "col", ")", ")", ".", "transpose", "(", ")", "values", "=", "mx", ".", "data", "shape", "=", "mx", ".", "shape", "return", "coords", ",", "values", ",", "shape", "if", "isinstance", "(", "sparse_mx", ",", "list", ")", ":", "for", "i", "in", "range", "(", "len", "(", "sparse_mx", ")", ")", ":", "sparse_mx", "[", "i", "]", "=", "to_tuple", "(", "sparse_mx", "[", "i", "]", ")", "else", ":", "sparse_mx", "=", "to_tuple", "(", "sparse_mx", ")", "return", "sparse_mx" ]
https://github.com/tensorflow/neural-structured-learning/blob/a43fcfca1f97ecc0ee99e688e5c8bf16c8fb6629/research/gam/gam/data/dataset.py#L560-L577
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/openshift_logging/library/openshift_logging_facts.py
python
OpenshiftLoggingFacts.default_keys_for
(self, kind)
Sets the default key values for kind
Sets the default key values for kind
[ "Sets", "the", "default", "key", "values", "for", "kind" ]
def default_keys_for(self, kind): ''' Sets the default key values for kind ''' for comp in COMPONENTS: self.add_facts_for(comp, kind)
[ "def", "default_keys_for", "(", "self", ",", "kind", ")", ":", "for", "comp", "in", "COMPONENTS", ":", "self", ".", "add_facts_for", "(", "comp", ",", "kind", ")" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/openshift_logging/library/openshift_logging_facts.py#L101-L104
CGATOxford/cgat
326aad4694bdfae8ddc194171bb5d73911243947
obsolete/pipeline_species_conservation.py
python
orthologPairsWithFeature
( infile, outfile)
Generate list of conserved genes associated with feature in all species
Generate list of conserved genes associated with feature in all species
[ "Generate", "list", "of", "conserved", "genes", "associated", "with", "feature", "in", "all", "species" ]
def orthologPairsWithFeature( infile, outfile): '''Generate list of conserved genes associated with feature in all species ''' tablename = "ortholog_pairs_with_feature" species_list = P.asList(PARAMS["species"]) anno_list = P.asList(PARAMS["annotations_db"]) species_lookup = dict(zip(species_list, anno_list)) # Connect to database and attach annotation databases dbhandle = sqlite3.connect( PARAMS["database"] ) for species in species_lookup.iterkeys(): species_db = species_lookup[species] cc = dbhandle.cursor() statement = '''ATTACH DATABASE '%(species_db)s' as %(species)s''' % locals() cc.execute( statement ) cc.close() # Extract data from db cc = dbhandle.cursor() cc.execute( "DROP TABLE IF EXISTS %(tablename)s" % locals() ) statement = '''CREATE TABLE %(tablename)s AS SELECT count(distinct o.schema) as species_count, group_concat(o.gene_id,",") as gene_ids, group_concat(g.gene_name,",") as gene_names, set_id, p.pattern as pattern, p.species_pair as species_pair FROM genelists_merged g, pairwise_ortholog_groups o, pattern_lookup p WHERE g.gene_id=o.gene_id AND p.pattern=o.pattern GROUP BY set_id ''' % locals() cc.execute( statement ) cc.close() statement = "touch %s" % outfile P.run()
[ "def", "orthologPairsWithFeature", "(", "infile", ",", "outfile", ")", ":", "tablename", "=", "\"ortholog_pairs_with_feature\"", "species_list", "=", "P", ".", "asList", "(", "PARAMS", "[", "\"species\"", "]", ")", "anno_list", "=", "P", ".", "asList", "(", "PARAMS", "[", "\"annotations_db\"", "]", ")", "species_lookup", "=", "dict", "(", "zip", "(", "species_list", ",", "anno_list", ")", ")", "# Connect to database and attach annotation databases", "dbhandle", "=", "sqlite3", ".", "connect", "(", "PARAMS", "[", "\"database\"", "]", ")", "for", "species", "in", "species_lookup", ".", "iterkeys", "(", ")", ":", "species_db", "=", "species_lookup", "[", "species", "]", "cc", "=", "dbhandle", ".", "cursor", "(", ")", "statement", "=", "'''ATTACH DATABASE '%(species_db)s' as %(species)s'''", "%", "locals", "(", ")", "cc", ".", "execute", "(", "statement", ")", "cc", ".", "close", "(", ")", "# Extract data from db", "cc", "=", "dbhandle", ".", "cursor", "(", ")", "cc", ".", "execute", "(", "\"DROP TABLE IF EXISTS %(tablename)s\"", "%", "locals", "(", ")", ")", "statement", "=", "'''CREATE TABLE %(tablename)s AS \n SELECT count(distinct o.schema) as species_count, \n group_concat(o.gene_id,\",\") as gene_ids,\n group_concat(g.gene_name,\",\") as gene_names,\n set_id, p.pattern as pattern, p.species_pair as species_pair\n FROM genelists_merged g, pairwise_ortholog_groups o, pattern_lookup p\n WHERE g.gene_id=o.gene_id\n AND p.pattern=o.pattern\n GROUP BY set_id '''", "%", "locals", "(", ")", "cc", ".", "execute", "(", "statement", ")", "cc", ".", "close", "(", ")", "statement", "=", "\"touch %s\"", "%", "outfile", "P", ".", "run", "(", ")" ]
https://github.com/CGATOxford/cgat/blob/326aad4694bdfae8ddc194171bb5d73911243947/obsolete/pipeline_species_conservation.py#L539-L570
owid/covid-19-data
936aeae6cfbdc0163939ed7bd8ecdbb2582c0a92
scripts/src/cowidev/vax/cmd/generate_dataset.py
python
DatasetGenerator.pipe_manufacturer_select_cols
(self, df: pd.DataFrame)
return df[ [ "location", "date", "vaccine", "total_vaccinations", ] ].sort_values(["location", "date", "vaccine"])
[]
def pipe_manufacturer_select_cols(self, df: pd.DataFrame) -> pd.DataFrame: return df[ [ "location", "date", "vaccine", "total_vaccinations", ] ].sort_values(["location", "date", "vaccine"])
[ "def", "pipe_manufacturer_select_cols", "(", "self", ",", "df", ":", "pd", ".", "DataFrame", ")", "->", "pd", ".", "DataFrame", ":", "return", "df", "[", "[", "\"location\"", ",", "\"date\"", ",", "\"vaccine\"", ",", "\"total_vaccinations\"", ",", "]", "]", ".", "sort_values", "(", "[", "\"location\"", ",", "\"date\"", ",", "\"vaccine\"", "]", ")" ]
https://github.com/owid/covid-19-data/blob/936aeae6cfbdc0163939ed7bd8ecdbb2582c0a92/scripts/src/cowidev/vax/cmd/generate_dataset.py#L398-L406
enthought/traitsui
b7c38c7a47bf6ae7971f9ddab70c8a358647dd25
traitsui/wx/tree_editor.py
python
SimpleEditor._get_icon
(self, node, object, is_expanded=False)
return self._image_list.GetIndex(image)
Returns the index of the specified object icon.
Returns the index of the specified object icon.
[ "Returns", "the", "index", "of", "the", "specified", "object", "icon", "." ]
def _get_icon(self, node, object, is_expanded=False): """Returns the index of the specified object icon.""" if self._image_list is None: return -1 icon_name = node.get_icon(object, is_expanded) if isinstance(icon_name, str): if icon_name.startswith("@"): image = convert_image(icon_name, 3) if image is None: return -1 else: if icon_name[:1] == "<": icon_name = icon_name[1:-1] path = self else: path = node.get_icon_path(object) if isinstance(path, str): path = [path, node] else: path.append(node) image = ImageResource(icon_name, path).absolute_path elif isinstance(icon_name, ImageResource): image = icon_name.absolute_path else: raise ValueError( "Icon value must be a string or IImageResource instance: " + "given {!r}".format(icon_name) ) return self._image_list.GetIndex(image)
[ "def", "_get_icon", "(", "self", ",", "node", ",", "object", ",", "is_expanded", "=", "False", ")", ":", "if", "self", ".", "_image_list", "is", "None", ":", "return", "-", "1", "icon_name", "=", "node", ".", "get_icon", "(", "object", ",", "is_expanded", ")", "if", "isinstance", "(", "icon_name", ",", "str", ")", ":", "if", "icon_name", ".", "startswith", "(", "\"@\"", ")", ":", "image", "=", "convert_image", "(", "icon_name", ",", "3", ")", "if", "image", "is", "None", ":", "return", "-", "1", "else", ":", "if", "icon_name", "[", ":", "1", "]", "==", "\"<\"", ":", "icon_name", "=", "icon_name", "[", "1", ":", "-", "1", "]", "path", "=", "self", "else", ":", "path", "=", "node", ".", "get_icon_path", "(", "object", ")", "if", "isinstance", "(", "path", ",", "str", ")", ":", "path", "=", "[", "path", ",", "node", "]", "else", ":", "path", ".", "append", "(", "node", ")", "image", "=", "ImageResource", "(", "icon_name", ",", "path", ")", ".", "absolute_path", "elif", "isinstance", "(", "icon_name", ",", "ImageResource", ")", ":", "image", "=", "icon_name", ".", "absolute_path", "else", ":", "raise", "ValueError", "(", "\"Icon value must be a string or IImageResource instance: \"", "+", "\"given {!r}\"", ".", "format", "(", "icon_name", ")", ")", "return", "self", ".", "_image_list", ".", "GetIndex", "(", "image", ")" ]
https://github.com/enthought/traitsui/blob/b7c38c7a47bf6ae7971f9ddab70c8a358647dd25/traitsui/wx/tree_editor.py#L554-L584
tornadomeet/mxnet-face
25d212791908c9dc0ea0c1de8e05af6d56f15e81
detection/symbol/resnet.py
python
resnet
(units, num_stage, filter_list, num_class=2, num_anchor=12, bottle_neck=True, bn_mom=0.9, bn_global=True, workspace=512)
return mx.symbol.Group([rpn_roi, cls_prob, bbox_pred])
Return ResNet symbol of cifar10 and imagenet Parameters ---------- units : list Number of units in each stage num_stage : int Number of stage filter_list : list Channel size of each stage num_class : int Ouput size of symbol dataset : str Dataset type, only cifar10 and imagenet supports workspace : int Workspace used in convolution operator
Return ResNet symbol of cifar10 and imagenet Parameters ---------- units : list Number of units in each stage num_stage : int Number of stage filter_list : list Channel size of each stage num_class : int Ouput size of symbol dataset : str Dataset type, only cifar10 and imagenet supports workspace : int Workspace used in convolution operator
[ "Return", "ResNet", "symbol", "of", "cifar10", "and", "imagenet", "Parameters", "----------", "units", ":", "list", "Number", "of", "units", "in", "each", "stage", "num_stage", ":", "int", "Number", "of", "stage", "filter_list", ":", "list", "Channel", "size", "of", "each", "stage", "num_class", ":", "int", "Ouput", "size", "of", "symbol", "dataset", ":", "str", "Dataset", "type", "only", "cifar10", "and", "imagenet", "supports", "workspace", ":", "int", "Workspace", "used", "in", "convolution", "operator" ]
def resnet(units, num_stage, filter_list, num_class=2, num_anchor=12, bottle_neck=True, bn_mom=0.9, bn_global=True, workspace=512): """Return ResNet symbol of cifar10 and imagenet Parameters ---------- units : list Number of units in each stage num_stage : int Number of stage filter_list : list Channel size of each stage num_class : int Ouput size of symbol dataset : str Dataset type, only cifar10 and imagenet supports workspace : int Workspace used in convolution operator """ num_unit = len(units) assert(num_unit == num_stage) data = mx.sym.Variable(name='data') data = mx.sym.BatchNorm(data=data, fix_gamma=True, eps=2e-5, momentum=bn_mom, use_global_stats=bn_global, name='bn_data') body = mx.sym.Convolution(data=data, num_filter=filter_list[0], kernel=(7, 7), stride=(2,2), pad=(3, 3), no_bias=True, name="conv0", workspace=workspace) body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, use_global_stats=bn_global, name='bn0') body = mx.sym.Activation(data=body, act_type='relu', name='relu0') body = mx.symbol.Pooling(data=body, kernel=(3, 3), stride=(2,2), pad=(1,1), pool_type='max') for i in range(num_stage): bn_global_ = bn_global if i < num_stage-1 else False # after roi-pooling, do not use use_global_stats body = residual_unit(body, filter_list[i+1], (1 if i==0 else 2, 1 if i==0 else 2), False, name='stage%d_unit%d' % (i + 1, 1), bottle_neck=bottle_neck, workspace=workspace, bn_global=bn_global_) for j in range(units[i]-1): body = residual_unit(body, filter_list[i+1], (1,1), True, name='stage%d_unit%d' % (i + 1, j + 2), bottle_neck=bottle_neck, workspace=workspace, bn_global=bn_global_) if i == num_stage - 2: body, rpn_roi = rpn(body, num_class=num_class, num_anchor=num_anchor) bn1 = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, use_global_stats=False, name='bn1') relu1 = mx.sym.Activation(data=bn1, act_type='relu', name='relu1') pool1 = mx.symbol.Pooling(data=relu1, global_pool=True, kernel=(7, 7), pool_type='avg', name='pool1') flat = mx.symbol.Flatten(data=pool1) # cls cls_score = mx.symbol.FullyConnected(name='cls_score', data=flat, num_hidden=num_class) cls_prob = mx.symbol.SoftmaxActivation(name='cls_prob', data=cls_score) # reg bbox_pred = mx.symbol.FullyConnected(name='bbox_pred', data=flat, num_hidden=num_class * 4) # reshape output cls_prob = mx.symbol.Reshape(data=cls_prob, shape=(1, -1, num_class), name='cls_prob_reshape') bbox_pred = mx.symbol.Reshape(data=bbox_pred, shape=(1, -1, 4 * num_class), name='bbox_pred_reshape') return mx.symbol.Group([rpn_roi, cls_prob, bbox_pred])
[ "def", "resnet", "(", "units", ",", "num_stage", ",", "filter_list", ",", "num_class", "=", "2", ",", "num_anchor", "=", "12", ",", "bottle_neck", "=", "True", ",", "bn_mom", "=", "0.9", ",", "bn_global", "=", "True", ",", "workspace", "=", "512", ")", ":", "num_unit", "=", "len", "(", "units", ")", "assert", "(", "num_unit", "==", "num_stage", ")", "data", "=", "mx", ".", "sym", ".", "Variable", "(", "name", "=", "'data'", ")", "data", "=", "mx", ".", "sym", ".", "BatchNorm", "(", "data", "=", "data", ",", "fix_gamma", "=", "True", ",", "eps", "=", "2e-5", ",", "momentum", "=", "bn_mom", ",", "use_global_stats", "=", "bn_global", ",", "name", "=", "'bn_data'", ")", "body", "=", "mx", ".", "sym", ".", "Convolution", "(", "data", "=", "data", ",", "num_filter", "=", "filter_list", "[", "0", "]", ",", "kernel", "=", "(", "7", ",", "7", ")", ",", "stride", "=", "(", "2", ",", "2", ")", ",", "pad", "=", "(", "3", ",", "3", ")", ",", "no_bias", "=", "True", ",", "name", "=", "\"conv0\"", ",", "workspace", "=", "workspace", ")", "body", "=", "mx", ".", "sym", ".", "BatchNorm", "(", "data", "=", "body", ",", "fix_gamma", "=", "False", ",", "eps", "=", "2e-5", ",", "momentum", "=", "bn_mom", ",", "use_global_stats", "=", "bn_global", ",", "name", "=", "'bn0'", ")", "body", "=", "mx", ".", "sym", ".", "Activation", "(", "data", "=", "body", ",", "act_type", "=", "'relu'", ",", "name", "=", "'relu0'", ")", "body", "=", "mx", ".", "symbol", ".", "Pooling", "(", "data", "=", "body", ",", "kernel", "=", "(", "3", ",", "3", ")", ",", "stride", "=", "(", "2", ",", "2", ")", ",", "pad", "=", "(", "1", ",", "1", ")", ",", "pool_type", "=", "'max'", ")", "for", "i", "in", "range", "(", "num_stage", ")", ":", "bn_global_", "=", "bn_global", "if", "i", "<", "num_stage", "-", "1", "else", "False", "# after roi-pooling, do not use use_global_stats", "body", "=", "residual_unit", "(", "body", ",", "filter_list", "[", "i", "+", "1", "]", ",", "(", "1", "if", "i", "==", "0", "else", "2", ",", "1", "if", "i", "==", "0", "else", "2", ")", ",", "False", ",", "name", "=", "'stage%d_unit%d'", "%", "(", "i", "+", "1", ",", "1", ")", ",", "bottle_neck", "=", "bottle_neck", ",", "workspace", "=", "workspace", ",", "bn_global", "=", "bn_global_", ")", "for", "j", "in", "range", "(", "units", "[", "i", "]", "-", "1", ")", ":", "body", "=", "residual_unit", "(", "body", ",", "filter_list", "[", "i", "+", "1", "]", ",", "(", "1", ",", "1", ")", ",", "True", ",", "name", "=", "'stage%d_unit%d'", "%", "(", "i", "+", "1", ",", "j", "+", "2", ")", ",", "bottle_neck", "=", "bottle_neck", ",", "workspace", "=", "workspace", ",", "bn_global", "=", "bn_global_", ")", "if", "i", "==", "num_stage", "-", "2", ":", "body", ",", "rpn_roi", "=", "rpn", "(", "body", ",", "num_class", "=", "num_class", ",", "num_anchor", "=", "num_anchor", ")", "bn1", "=", "mx", ".", "sym", ".", "BatchNorm", "(", "data", "=", "body", ",", "fix_gamma", "=", "False", ",", "eps", "=", "2e-5", ",", "momentum", "=", "bn_mom", ",", "use_global_stats", "=", "False", ",", "name", "=", "'bn1'", ")", "relu1", "=", "mx", ".", "sym", ".", "Activation", "(", "data", "=", "bn1", ",", "act_type", "=", "'relu'", ",", "name", "=", "'relu1'", ")", "pool1", "=", "mx", ".", "symbol", ".", "Pooling", "(", "data", "=", "relu1", ",", "global_pool", "=", "True", ",", "kernel", "=", "(", "7", ",", "7", ")", ",", "pool_type", "=", "'avg'", ",", "name", "=", "'pool1'", ")", "flat", "=", "mx", ".", "symbol", ".", "Flatten", "(", "data", "=", "pool1", ")", "# cls", "cls_score", "=", "mx", ".", "symbol", ".", "FullyConnected", "(", "name", "=", "'cls_score'", ",", "data", "=", "flat", ",", "num_hidden", "=", "num_class", ")", "cls_prob", "=", "mx", ".", "symbol", ".", "SoftmaxActivation", "(", "name", "=", "'cls_prob'", ",", "data", "=", "cls_score", ")", "# reg", "bbox_pred", "=", "mx", ".", "symbol", ".", "FullyConnected", "(", "name", "=", "'bbox_pred'", ",", "data", "=", "flat", ",", "num_hidden", "=", "num_class", "*", "4", ")", "# reshape output", "cls_prob", "=", "mx", ".", "symbol", ".", "Reshape", "(", "data", "=", "cls_prob", ",", "shape", "=", "(", "1", ",", "-", "1", ",", "num_class", ")", ",", "name", "=", "'cls_prob_reshape'", ")", "bbox_pred", "=", "mx", ".", "symbol", ".", "Reshape", "(", "data", "=", "bbox_pred", ",", "shape", "=", "(", "1", ",", "-", "1", ",", "4", "*", "num_class", ")", ",", "name", "=", "'bbox_pred_reshape'", ")", "return", "mx", ".", "symbol", ".", "Group", "(", "[", "rpn_roi", ",", "cls_prob", ",", "bbox_pred", "]", ")" ]
https://github.com/tornadomeet/mxnet-face/blob/25d212791908c9dc0ea0c1de8e05af6d56f15e81/detection/symbol/resnet.py#L93-L145
williballenthin/python-registry
11e857623469dd28ed14519a08d2db7c8228ca0c
samples/forensicating.py
python
users_sids
(soft_reg)
return sid_list
Return a list of subkeys containing the users SIDs
Return a list of subkeys containing the users SIDs
[ "Return", "a", "list", "of", "subkeys", "containing", "the", "users", "SIDs" ]
def users_sids(soft_reg): ''' Return a list of subkeys containing the users SIDs ''' sid_list = [] registry = Registry.Registry(soft_reg) key = registry.open("Microsoft\\Windows NT\\CurrentVersion\\ProfileList") for v in key.subkeys(): sid_list.append(v.name()) return sid_list
[ "def", "users_sids", "(", "soft_reg", ")", ":", "sid_list", "=", "[", "]", "registry", "=", "Registry", ".", "Registry", "(", "soft_reg", ")", "key", "=", "registry", ".", "open", "(", "\"Microsoft\\\\Windows NT\\\\CurrentVersion\\\\ProfileList\"", ")", "for", "v", "in", "key", ".", "subkeys", "(", ")", ":", "sid_list", ".", "append", "(", "v", ".", "name", "(", ")", ")", "return", "sid_list" ]
https://github.com/williballenthin/python-registry/blob/11e857623469dd28ed14519a08d2db7c8228ca0c/samples/forensicating.py#L66-L76
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/distutils/command/bdist_msi.py
python
bdist_msi.initialize_options
(self)
[]
def initialize_options(self): self.bdist_dir = None self.plat_name = None self.keep_temp = 0 self.no_target_compile = 0 self.no_target_optimize = 0 self.target_version = None self.dist_dir = None self.skip_build = None self.install_script = None self.pre_install_script = None self.versions = None
[ "def", "initialize_options", "(", "self", ")", ":", "self", ".", "bdist_dir", "=", "None", "self", ".", "plat_name", "=", "None", "self", ".", "keep_temp", "=", "0", "self", ".", "no_target_compile", "=", "0", "self", ".", "no_target_optimize", "=", "0", "self", ".", "target_version", "=", "None", "self", ".", "dist_dir", "=", "None", "self", ".", "skip_build", "=", "None", "self", ".", "install_script", "=", "None", "self", ".", "pre_install_script", "=", "None", "self", ".", "versions", "=", "None" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/distutils/command/bdist_msi.py#L125-L136
Pymol-Scripts/Pymol-script-repo
bcd7bb7812dc6db1595953dfa4471fa15fb68c77
modules/idlelib/CallTipWindow.py
python
CallTip.showtip
(self, text, parenleft, parenright)
Show the calltip, bind events which will close it and reposition it.
Show the calltip, bind events which will close it and reposition it.
[ "Show", "the", "calltip", "bind", "events", "which", "will", "close", "it", "and", "reposition", "it", "." ]
def showtip(self, text, parenleft, parenright): """Show the calltip, bind events which will close it and reposition it. """ # truncate overly long calltip if len(text) >= 79: textlines = text.splitlines() for i, line in enumerate(textlines): if len(line) > 79: textlines[i] = line[:75] + ' ...' text = '\n'.join(textlines) self.text = text if self.tipwindow or not self.text: return self.widget.mark_set(MARK_RIGHT, parenright) self.parenline, self.parencol = map( int, self.widget.index(parenleft).split(".")) self.tipwindow = tw = Toplevel(self.widget) self.position_window() # remove border on calltip window tw.wm_overrideredirect(1) try: # This command is only needed and available on Tk >= 8.4.0 for OSX # Without it, call tips intrude on the typing process by grabbing # the focus. tw.tk.call("::tk::unsupported::MacWindowStyle", "style", tw._w, "help", "noActivates") except TclError: pass self.label = Label(tw, text=self.text, justify=LEFT, background="#ffffe0", relief=SOLID, borderwidth=1, font = self.widget['font']) self.label.pack() self.checkhideid = self.widget.bind(CHECKHIDE_VIRTUAL_EVENT_NAME, self.checkhide_event) for seq in CHECKHIDE_SEQUENCES: self.widget.event_add(CHECKHIDE_VIRTUAL_EVENT_NAME, seq) self.widget.after(CHECKHIDE_TIME, self.checkhide_event) self.hideid = self.widget.bind(HIDE_VIRTUAL_EVENT_NAME, self.hide_event) for seq in HIDE_SEQUENCES: self.widget.event_add(HIDE_VIRTUAL_EVENT_NAME, seq)
[ "def", "showtip", "(", "self", ",", "text", ",", "parenleft", ",", "parenright", ")", ":", "# truncate overly long calltip", "if", "len", "(", "text", ")", ">=", "79", ":", "textlines", "=", "text", ".", "splitlines", "(", ")", "for", "i", ",", "line", "in", "enumerate", "(", "textlines", ")", ":", "if", "len", "(", "line", ")", ">", "79", ":", "textlines", "[", "i", "]", "=", "line", "[", ":", "75", "]", "+", "' ...'", "text", "=", "'\\n'", ".", "join", "(", "textlines", ")", "self", ".", "text", "=", "text", "if", "self", ".", "tipwindow", "or", "not", "self", ".", "text", ":", "return", "self", ".", "widget", ".", "mark_set", "(", "MARK_RIGHT", ",", "parenright", ")", "self", ".", "parenline", ",", "self", ".", "parencol", "=", "map", "(", "int", ",", "self", ".", "widget", ".", "index", "(", "parenleft", ")", ".", "split", "(", "\".\"", ")", ")", "self", ".", "tipwindow", "=", "tw", "=", "Toplevel", "(", "self", ".", "widget", ")", "self", ".", "position_window", "(", ")", "# remove border on calltip window", "tw", ".", "wm_overrideredirect", "(", "1", ")", "try", ":", "# This command is only needed and available on Tk >= 8.4.0 for OSX", "# Without it, call tips intrude on the typing process by grabbing", "# the focus.", "tw", ".", "tk", ".", "call", "(", "\"::tk::unsupported::MacWindowStyle\"", ",", "\"style\"", ",", "tw", ".", "_w", ",", "\"help\"", ",", "\"noActivates\"", ")", "except", "TclError", ":", "pass", "self", ".", "label", "=", "Label", "(", "tw", ",", "text", "=", "self", ".", "text", ",", "justify", "=", "LEFT", ",", "background", "=", "\"#ffffe0\"", ",", "relief", "=", "SOLID", ",", "borderwidth", "=", "1", ",", "font", "=", "self", ".", "widget", "[", "'font'", "]", ")", "self", ".", "label", ".", "pack", "(", ")", "self", ".", "checkhideid", "=", "self", ".", "widget", ".", "bind", "(", "CHECKHIDE_VIRTUAL_EVENT_NAME", ",", "self", ".", "checkhide_event", ")", "for", "seq", "in", "CHECKHIDE_SEQUENCES", ":", "self", ".", "widget", ".", "event_add", "(", "CHECKHIDE_VIRTUAL_EVENT_NAME", ",", "seq", ")", "self", ".", "widget", ".", "after", "(", "CHECKHIDE_TIME", ",", "self", ".", "checkhide_event", ")", "self", ".", "hideid", "=", "self", ".", "widget", ".", "bind", "(", "HIDE_VIRTUAL_EVENT_NAME", ",", "self", ".", "hide_event", ")", "for", "seq", "in", "HIDE_SEQUENCES", ":", "self", ".", "widget", ".", "event_add", "(", "HIDE_VIRTUAL_EVENT_NAME", ",", "seq", ")" ]
https://github.com/Pymol-Scripts/Pymol-script-repo/blob/bcd7bb7812dc6db1595953dfa4471fa15fb68c77/modules/idlelib/CallTipWindow.py#L53-L96
CarterBain/AlephNull
796edec7e106cd76a5a69cb6e67a1a96c7a22cf6
alephnull/transforms/vwap.py
python
VWAPEventWindow.get_vwap
(self)
Return the calculated vwap for this sid.
Return the calculated vwap for this sid.
[ "Return", "the", "calculated", "vwap", "for", "this", "sid", "." ]
def get_vwap(self): """ Return the calculated vwap for this sid. """ # By convention, vwap is None if we have no events. if len(self.ticks) == 0: return None else: return (self.flux / self.totalvolume)
[ "def", "get_vwap", "(", "self", ")", ":", "# By convention, vwap is None if we have no events.", "if", "len", "(", "self", ".", "ticks", ")", "==", "0", ":", "return", "None", "else", ":", "return", "(", "self", ".", "flux", "/", "self", ".", "totalvolume", ")" ]
https://github.com/CarterBain/AlephNull/blob/796edec7e106cd76a5a69cb6e67a1a96c7a22cf6/alephnull/transforms/vwap.py#L90-L98
biopython/biopython
2dd97e71762af7b046d7f7f8a4f1e38db6b06c86
Bio/ExPASy/__init__.py
python
get_prosite_raw
(id, cgi=None)
return handle
Get a text handle to a raw PROSITE or PRODOC record at ExPASy. The cgi argument is deprecated due to changes in the ExPASy website. >>> from Bio import ExPASy >>> from Bio.ExPASy import Prosite >>> with ExPASy.get_prosite_raw('PS00001') as handle: ... record = Prosite.read(handle) ... >>> print(record.accession) PS00001 This function raises a ValueError if the identifier does not exist: >>> handle = ExPASy.get_prosite_raw("DOES_NOT_EXIST") Traceback (most recent call last): ... ValueError: Failed to find entry 'DOES_NOT_EXIST' on ExPASy
Get a text handle to a raw PROSITE or PRODOC record at ExPASy.
[ "Get", "a", "text", "handle", "to", "a", "raw", "PROSITE", "or", "PRODOC", "record", "at", "ExPASy", "." ]
def get_prosite_raw(id, cgi=None): """Get a text handle to a raw PROSITE or PRODOC record at ExPASy. The cgi argument is deprecated due to changes in the ExPASy website. >>> from Bio import ExPASy >>> from Bio.ExPASy import Prosite >>> with ExPASy.get_prosite_raw('PS00001') as handle: ... record = Prosite.read(handle) ... >>> print(record.accession) PS00001 This function raises a ValueError if the identifier does not exist: >>> handle = ExPASy.get_prosite_raw("DOES_NOT_EXIST") Traceback (most recent call last): ... ValueError: Failed to find entry 'DOES_NOT_EXIST' on ExPASy """ handle = _open(f"https://prosite.expasy.org/{id}.txt") if handle.url == "https://www.expasy.org/": raise ValueError(f"Failed to find entry '{id}' on ExPASy") from None return handle
[ "def", "get_prosite_raw", "(", "id", ",", "cgi", "=", "None", ")", ":", "handle", "=", "_open", "(", "f\"https://prosite.expasy.org/{id}.txt\"", ")", "if", "handle", ".", "url", "==", "\"https://www.expasy.org/\"", ":", "raise", "ValueError", "(", "f\"Failed to find entry '{id}' on ExPASy\"", ")", "from", "None", "return", "handle" ]
https://github.com/biopython/biopython/blob/2dd97e71762af7b046d7f7f8a4f1e38db6b06c86/Bio/ExPASy/__init__.py#L66-L91
ales-tsurko/cells
4cf7e395cd433762bea70cdc863a346f3a6fe1d0
packaging/macos/python/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py
python
HTMLUnicodeInputStream.openStream
(self, source)
return stream
Produces a file object from source. source can be either a file object, local filename or a string.
Produces a file object from source.
[ "Produces", "a", "file", "object", "from", "source", "." ]
def openStream(self, source): """Produces a file object from source. source can be either a file object, local filename or a string. """ # Already a file object if hasattr(source, 'read'): stream = source else: stream = StringIO(source) return stream
[ "def", "openStream", "(", "self", ",", "source", ")", ":", "# Already a file object", "if", "hasattr", "(", "source", ",", "'read'", ")", ":", "stream", "=", "source", "else", ":", "stream", "=", "StringIO", "(", "source", ")", "return", "stream" ]
https://github.com/ales-tsurko/cells/blob/4cf7e395cd433762bea70cdc863a346f3a6fe1d0/packaging/macos/python/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py#L210-L222