nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
google/skia
82d65d0487bd72f5f7332d002429ec2dc61d2463
infra/bots/recipe_modules/gold_upload/api.py
python
GoldUploadApi.upload
(self)
Attempt to upload files to Gold. This module assumes setup has occurred for the vars and flavor modules.
Attempt to upload files to Gold. This module assumes setup has occurred for the vars and flavor modules.
[ "Attempt", "to", "upload", "files", "to", "Gold", ".", "This", "module", "assumes", "setup", "has", "occurred", "for", "the", "vars", "and", "flavor", "modules", "." ]
def upload(self): """Attempt to upload files to Gold. This module assumes setup has occurred for the vars and flavor modules. """ revision = self.m.properties['revision'] results_dir = self.m.flavor.host_dirs.dm_dir # Upload the images. It is preferred that the images are uploaded first # so they exist whenever the json is processed. image_dest_path = 'gs://%s/dm-images-v1' % self.m.properties['gs_bucket'] for ext in ['.png']: files_to_upload = self.m.file.glob_paths( 'find %s images' % ext, results_dir, '*%s' % ext, test_data=['someimage.png']) # For some reason, glob returns results_dir when it should return nothing. files_to_upload = [f for f in files_to_upload if str(f).endswith(ext)] if len(files_to_upload) > 0: self.m.gsutil.cp('%s images' % ext, results_dir.join('*%s' % ext), image_dest_path, multithread=True) summary_dest_path = 'gs://%s' % self.m.properties['gs_bucket'] ref = revision # Trybot results are siloed by issue/patchset. if self.m.vars.is_trybot: summary_dest_path = '/'.join([summary_dest_path, 'trybot']) ref = '%s_%s' % (str(self.m.vars.issue), str(self.m.vars.patchset)) # Compute the directory to upload results to now = self.m.time.utcnow() summary_dest_path = '/'.join([ summary_dest_path, 'dm-json-v1', str(now.year ).zfill(4), str(now.month).zfill(2), str(now.day ).zfill(2), str(now.hour ).zfill(2), ref, self.m.vars.builder_name, str(int(calendar.timegm(now.utctimetuple())))]) # Directly upload dm.json if it exists. json_file = results_dir.join(DM_JSON) # -Z compresses the json file at rest with gzip. self.m.gsutil.cp('dm.json', json_file, summary_dest_path + '/' + DM_JSON, extra_args=['-Z'])
[ "def", "upload", "(", "self", ")", ":", "revision", "=", "self", ".", "m", ".", "properties", "[", "'revision'", "]", "results_dir", "=", "self", ".", "m", ".", "flavor", ".", "host_dirs", ".", "dm_dir", "# Upload the images. It is preferred that the images are uploaded first", "# so they exist whenever the json is processed.", "image_dest_path", "=", "'gs://%s/dm-images-v1'", "%", "self", ".", "m", ".", "properties", "[", "'gs_bucket'", "]", "for", "ext", "in", "[", "'.png'", "]", ":", "files_to_upload", "=", "self", ".", "m", ".", "file", ".", "glob_paths", "(", "'find %s images'", "%", "ext", ",", "results_dir", ",", "'*%s'", "%", "ext", ",", "test_data", "=", "[", "'someimage.png'", "]", ")", "# For some reason, glob returns results_dir when it should return nothing.", "files_to_upload", "=", "[", "f", "for", "f", "in", "files_to_upload", "if", "str", "(", "f", ")", ".", "endswith", "(", "ext", ")", "]", "if", "len", "(", "files_to_upload", ")", ">", "0", ":", "self", ".", "m", ".", "gsutil", ".", "cp", "(", "'%s images'", "%", "ext", ",", "results_dir", ".", "join", "(", "'*%s'", "%", "ext", ")", ",", "image_dest_path", ",", "multithread", "=", "True", ")", "summary_dest_path", "=", "'gs://%s'", "%", "self", ".", "m", ".", "properties", "[", "'gs_bucket'", "]", "ref", "=", "revision", "# Trybot results are siloed by issue/patchset.", "if", "self", ".", "m", ".", "vars", ".", "is_trybot", ":", "summary_dest_path", "=", "'/'", ".", "join", "(", "[", "summary_dest_path", ",", "'trybot'", "]", ")", "ref", "=", "'%s_%s'", "%", "(", "str", "(", "self", ".", "m", ".", "vars", ".", "issue", ")", ",", "str", "(", "self", ".", "m", ".", "vars", ".", "patchset", ")", ")", "# Compute the directory to upload results to", "now", "=", "self", ".", "m", ".", "time", ".", "utcnow", "(", ")", "summary_dest_path", "=", "'/'", ".", "join", "(", "[", "summary_dest_path", ",", "'dm-json-v1'", ",", "str", "(", "now", ".", "year", ")", ".", "zfill", "(", "4", ")", ",", "str", "(", "now", ".", "month", ")", ".", "zfill", "(", "2", ")", ",", "str", "(", "now", ".", "day", ")", ".", "zfill", "(", "2", ")", ",", "str", "(", "now", ".", "hour", ")", ".", "zfill", "(", "2", ")", ",", "ref", ",", "self", ".", "m", ".", "vars", ".", "builder_name", ",", "str", "(", "int", "(", "calendar", ".", "timegm", "(", "now", ".", "utctimetuple", "(", ")", ")", ")", ")", "]", ")", "# Directly upload dm.json if it exists.", "json_file", "=", "results_dir", ".", "join", "(", "DM_JSON", ")", "# -Z compresses the json file at rest with gzip.", "self", ".", "m", ".", "gsutil", ".", "cp", "(", "'dm.json'", ",", "json_file", ",", "summary_dest_path", "+", "'/'", "+", "DM_JSON", ",", "extra_args", "=", "[", "'-Z'", "]", ")" ]
https://github.com/google/skia/blob/82d65d0487bd72f5f7332d002429ec2dc61d2463/infra/bots/recipe_modules/gold_upload/api.py#L12-L58
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
lldb/third_party/Python/module/pexpect-4.6/pexpect/popen_spawn.py
python
PopenSpawn.writelines
(self, sequence)
This calls write() for each element in the sequence. The sequence can be any iterable object producing strings, typically a list of strings. This does not add line separators. There is no return value.
This calls write() for each element in the sequence.
[ "This", "calls", "write", "()", "for", "each", "element", "in", "the", "sequence", "." ]
def writelines(self, sequence): '''This calls write() for each element in the sequence. The sequence can be any iterable object producing strings, typically a list of strings. This does not add line separators. There is no return value. ''' for s in sequence: self.send(s)
[ "def", "writelines", "(", "self", ",", "sequence", ")", ":", "for", "s", "in", "sequence", ":", "self", ".", "send", "(", "s", ")" ]
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/lldb/third_party/Python/module/pexpect-4.6/pexpect/popen_spawn.py#L122-L130
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
python
SBTypeSummary.SetFunctionCode
(self, *args)
return _lldb.SBTypeSummary_SetFunctionCode(self, *args)
SetFunctionCode(self, str data)
SetFunctionCode(self, str data)
[ "SetFunctionCode", "(", "self", "str", "data", ")" ]
def SetFunctionCode(self, *args): """SetFunctionCode(self, str data)""" return _lldb.SBTypeSummary_SetFunctionCode(self, *args)
[ "def", "SetFunctionCode", "(", "self", ",", "*", "args", ")", ":", "return", "_lldb", ".", "SBTypeSummary_SetFunctionCode", "(", "self", ",", "*", "args", ")" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L11463-L11465
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/sdb/db/manager/sdbmanager.py
python
SDBManager._build_filter_part
(self, cls, filters, order_by=None, select=None)
Build the filter part
Build the filter part
[ "Build", "the", "filter", "part" ]
def _build_filter_part(self, cls, filters, order_by=None, select=None): """ Build the filter part """ import types query_parts = [] order_by_filtered = False if order_by: if order_by[0] == "-": order_by_method = "DESC" order_by = order_by[1:] else: order_by_method = "ASC" if select: if order_by and order_by in select: order_by_filtered = True query_parts.append("(%s)" % select) if isinstance(filters, six.string_types): query = "WHERE %s AND `__type__` = '%s'" % (filters, cls.__name__) if order_by in ["__id__", "itemName()"]: query += " ORDER BY itemName() %s" % order_by_method elif order_by is not None: query += " ORDER BY `%s` %s" % (order_by, order_by_method) return query for filter in filters: filter_parts = [] filter_props = filter[0] if not isinstance(filter_props, list): filter_props = [filter_props] for filter_prop in filter_props: (name, op) = filter_prop.strip().split(" ", 1) value = filter[1] property = cls.find_property(name) if name == order_by: order_by_filtered = True if types.TypeType(value) == list: filter_parts_sub = [] for val in value: val = self.encode_value(property, val) if isinstance(val, list): for v in val: filter_parts_sub.append(self._build_filter(property, name, op, v)) else: filter_parts_sub.append(self._build_filter(property, name, op, val)) filter_parts.append("(%s)" % (" OR ".join(filter_parts_sub))) else: val = self.encode_value(property, value) if isinstance(val, list): for v in val: filter_parts.append(self._build_filter(property, name, op, v)) else: filter_parts.append(self._build_filter(property, name, op, val)) query_parts.append("(%s)" % (" or ".join(filter_parts))) type_query = "(`__type__` = '%s'" % cls.__name__ for subclass in self._get_all_decendents(cls).keys(): type_query += " or `__type__` = '%s'" % subclass type_query += ")" query_parts.append(type_query) order_by_query = "" if order_by: if not order_by_filtered: query_parts.append("`%s` LIKE '%%'" % order_by) if order_by in ["__id__", "itemName()"]: order_by_query = " ORDER BY itemName() %s" % order_by_method else: order_by_query = " ORDER BY `%s` %s" % (order_by, order_by_method) if len(query_parts) > 0: return "WHERE %s %s" % (" AND ".join(query_parts), order_by_query) else: return ""
[ "def", "_build_filter_part", "(", "self", ",", "cls", ",", "filters", ",", "order_by", "=", "None", ",", "select", "=", "None", ")", ":", "import", "types", "query_parts", "=", "[", "]", "order_by_filtered", "=", "False", "if", "order_by", ":", "if", "order_by", "[", "0", "]", "==", "\"-\"", ":", "order_by_method", "=", "\"DESC\"", "order_by", "=", "order_by", "[", "1", ":", "]", "else", ":", "order_by_method", "=", "\"ASC\"", "if", "select", ":", "if", "order_by", "and", "order_by", "in", "select", ":", "order_by_filtered", "=", "True", "query_parts", ".", "append", "(", "\"(%s)\"", "%", "select", ")", "if", "isinstance", "(", "filters", ",", "six", ".", "string_types", ")", ":", "query", "=", "\"WHERE %s AND `__type__` = '%s'\"", "%", "(", "filters", ",", "cls", ".", "__name__", ")", "if", "order_by", "in", "[", "\"__id__\"", ",", "\"itemName()\"", "]", ":", "query", "+=", "\" ORDER BY itemName() %s\"", "%", "order_by_method", "elif", "order_by", "is", "not", "None", ":", "query", "+=", "\" ORDER BY `%s` %s\"", "%", "(", "order_by", ",", "order_by_method", ")", "return", "query", "for", "filter", "in", "filters", ":", "filter_parts", "=", "[", "]", "filter_props", "=", "filter", "[", "0", "]", "if", "not", "isinstance", "(", "filter_props", ",", "list", ")", ":", "filter_props", "=", "[", "filter_props", "]", "for", "filter_prop", "in", "filter_props", ":", "(", "name", ",", "op", ")", "=", "filter_prop", ".", "strip", "(", ")", ".", "split", "(", "\" \"", ",", "1", ")", "value", "=", "filter", "[", "1", "]", "property", "=", "cls", ".", "find_property", "(", "name", ")", "if", "name", "==", "order_by", ":", "order_by_filtered", "=", "True", "if", "types", ".", "TypeType", "(", "value", ")", "==", "list", ":", "filter_parts_sub", "=", "[", "]", "for", "val", "in", "value", ":", "val", "=", "self", ".", "encode_value", "(", "property", ",", "val", ")", "if", "isinstance", "(", "val", ",", "list", ")", ":", "for", "v", "in", "val", ":", "filter_parts_sub", ".", "append", "(", "self", ".", "_build_filter", "(", "property", ",", "name", ",", "op", ",", "v", ")", ")", "else", ":", "filter_parts_sub", ".", "append", "(", "self", ".", "_build_filter", "(", "property", ",", "name", ",", "op", ",", "val", ")", ")", "filter_parts", ".", "append", "(", "\"(%s)\"", "%", "(", "\" OR \"", ".", "join", "(", "filter_parts_sub", ")", ")", ")", "else", ":", "val", "=", "self", ".", "encode_value", "(", "property", ",", "value", ")", "if", "isinstance", "(", "val", ",", "list", ")", ":", "for", "v", "in", "val", ":", "filter_parts", ".", "append", "(", "self", ".", "_build_filter", "(", "property", ",", "name", ",", "op", ",", "v", ")", ")", "else", ":", "filter_parts", ".", "append", "(", "self", ".", "_build_filter", "(", "property", ",", "name", ",", "op", ",", "val", ")", ")", "query_parts", ".", "append", "(", "\"(%s)\"", "%", "(", "\" or \"", ".", "join", "(", "filter_parts", ")", ")", ")", "type_query", "=", "\"(`__type__` = '%s'\"", "%", "cls", ".", "__name__", "for", "subclass", "in", "self", ".", "_get_all_decendents", "(", "cls", ")", ".", "keys", "(", ")", ":", "type_query", "+=", "\" or `__type__` = '%s'\"", "%", "subclass", "type_query", "+=", "\")\"", "query_parts", ".", "append", "(", "type_query", ")", "order_by_query", "=", "\"\"", "if", "order_by", ":", "if", "not", "order_by_filtered", ":", "query_parts", ".", "append", "(", "\"`%s` LIKE '%%'\"", "%", "order_by", ")", "if", "order_by", "in", "[", "\"__id__\"", ",", "\"itemName()\"", "]", ":", "order_by_query", "=", "\" ORDER BY itemName() %s\"", "%", "order_by_method", "else", ":", "order_by_query", "=", "\" ORDER BY `%s` %s\"", "%", "(", "order_by", ",", "order_by_method", ")", "if", "len", "(", "query_parts", ")", ">", "0", ":", "return", "\"WHERE %s %s\"", "%", "(", "\" AND \"", ".", "join", "(", "query_parts", ")", ",", "order_by_query", ")", "else", ":", "return", "\"\"" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/sdb/db/manager/sdbmanager.py#L565-L644
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/layers/python/layers/layers.py
python
one_hot_encoding
(labels, num_classes, on_value=1.0, off_value=0.0, outputs_collections=None, scope=None)
Transform numeric labels into onehot_labels using `tf.one_hot`. Args: labels: [batch_size] target labels. num_classes: Total number of classes. on_value: A scalar defining the on-value. off_value: A scalar defining the off-value. outputs_collections: Collection to add the outputs. scope: Optional scope for name_scope. Returns: One-hot encoding of the labels.
Transform numeric labels into onehot_labels using `tf.one_hot`.
[ "Transform", "numeric", "labels", "into", "onehot_labels", "using", "tf", ".", "one_hot", "." ]
def one_hot_encoding(labels, num_classes, on_value=1.0, off_value=0.0, outputs_collections=None, scope=None): """Transform numeric labels into onehot_labels using `tf.one_hot`. Args: labels: [batch_size] target labels. num_classes: Total number of classes. on_value: A scalar defining the on-value. off_value: A scalar defining the off-value. outputs_collections: Collection to add the outputs. scope: Optional scope for name_scope. Returns: One-hot encoding of the labels. """ with ops.name_scope(scope, 'OneHotEncoding', [labels, num_classes]) as sc: labels = ops.convert_to_tensor(labels) if labels.dtype == dtypes.int32: labels = standard_ops.to_int64(labels) outputs = standard_ops.one_hot( labels, num_classes, on_value=on_value, off_value=off_value) return utils.collect_named_outputs(outputs_collections, sc, outputs)
[ "def", "one_hot_encoding", "(", "labels", ",", "num_classes", ",", "on_value", "=", "1.0", ",", "off_value", "=", "0.0", ",", "outputs_collections", "=", "None", ",", "scope", "=", "None", ")", ":", "with", "ops", ".", "name_scope", "(", "scope", ",", "'OneHotEncoding'", ",", "[", "labels", ",", "num_classes", "]", ")", "as", "sc", ":", "labels", "=", "ops", ".", "convert_to_tensor", "(", "labels", ")", "if", "labels", ".", "dtype", "==", "dtypes", ".", "int32", ":", "labels", "=", "standard_ops", ".", "to_int64", "(", "labels", ")", "outputs", "=", "standard_ops", ".", "one_hot", "(", "labels", ",", "num_classes", ",", "on_value", "=", "on_value", ",", "off_value", "=", "off_value", ")", "return", "utils", ".", "collect_named_outputs", "(", "outputs_collections", ",", "sc", ",", "outputs", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/layers/python/layers/layers.py#L2541-L2566
sdhash/sdhash
b9eff63e4e5867e910f41fd69032bbb1c94a2a5e
sdhash-ui/cherrypy/wsgiserver/ssl_pyopenssl.py
python
pyOpenSSLAdapter.get_context
(self)
return c
Return an SSL.Context from self attributes.
Return an SSL.Context from self attributes.
[ "Return", "an", "SSL", ".", "Context", "from", "self", "attributes", "." ]
def get_context(self): """Return an SSL.Context from self attributes.""" # See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473 c = SSL.Context(SSL.SSLv23_METHOD) c.use_privatekey_file(self.private_key) if self.certificate_chain: c.load_verify_locations(self.certificate_chain) c.use_certificate_file(self.certificate) return c
[ "def", "get_context", "(", "self", ")", ":", "# See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473", "c", "=", "SSL", ".", "Context", "(", "SSL", ".", "SSLv23_METHOD", ")", "c", ".", "use_privatekey_file", "(", "self", ".", "private_key", ")", "if", "self", ".", "certificate_chain", ":", "c", ".", "load_verify_locations", "(", "self", ".", "certificate_chain", ")", "c", ".", "use_certificate_file", "(", "self", ".", "certificate", ")", "return", "c" ]
https://github.com/sdhash/sdhash/blob/b9eff63e4e5867e910f41fd69032bbb1c94a2a5e/sdhash-ui/cherrypy/wsgiserver/ssl_pyopenssl.py#L193-L201
KhronosGroup/SPIRV-Tools
940127a77d3ad795a4a1422fbeaad50c9f19f2ea
utils/generate_grammar_tables.py
python
generate_capability_arrays
(caps)
return '\n'.join(arrays)
Returns the arrays of capabilities. Arguments: - caps: a sequence of sequence of capability names
Returns the arrays of capabilities.
[ "Returns", "the", "arrays", "of", "capabilities", "." ]
def generate_capability_arrays(caps): """Returns the arrays of capabilities. Arguments: - caps: a sequence of sequence of capability names """ caps = sorted(set([tuple(c) for c in caps if c])) arrays = [ 'static const SpvCapability {}[] = {};'.format( get_capability_array_name(c), compose_capability_list(c)) for c in caps] return '\n'.join(arrays)
[ "def", "generate_capability_arrays", "(", "caps", ")", ":", "caps", "=", "sorted", "(", "set", "(", "[", "tuple", "(", "c", ")", "for", "c", "in", "caps", "if", "c", "]", ")", ")", "arrays", "=", "[", "'static const SpvCapability {}[] = {};'", ".", "format", "(", "get_capability_array_name", "(", "c", ")", ",", "compose_capability_list", "(", "c", ")", ")", "for", "c", "in", "caps", "]", "return", "'\\n'", ".", "join", "(", "arrays", ")" ]
https://github.com/KhronosGroup/SPIRV-Tools/blob/940127a77d3ad795a4a1422fbeaad50c9f19f2ea/utils/generate_grammar_tables.py#L95-L106
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_core.py
python
EvtHandler.ProcessEventLocally
(*args, **kwargs)
return _core_.EvtHandler_ProcessEventLocally(*args, **kwargs)
ProcessEventLocally(self, Event event) -> bool
ProcessEventLocally(self, Event event) -> bool
[ "ProcessEventLocally", "(", "self", "Event", "event", ")", "-", ">", "bool" ]
def ProcessEventLocally(*args, **kwargs): """ProcessEventLocally(self, Event event) -> bool""" return _core_.EvtHandler_ProcessEventLocally(*args, **kwargs)
[ "def", "ProcessEventLocally", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "EvtHandler_ProcessEventLocally", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L4160-L4162
rtbkit/rtbkit
502d06acc3f8d90438946b6ae742190f2f4b4fbb
jml-build/jmlbuild.py
python
Parser.parse_func_default
(self, line)
return line
Function which don't have special handlers are parsed here.
Function which don't have special handlers are parsed here.
[ "Function", "which", "don", "t", "have", "special", "handlers", "are", "parsed", "here", "." ]
def parse_func_default(self, line): """ Function which don't have special handlers are parsed here. """ print_dbg("\tdefault_func: " + line) params, line = self.parse_func_params(line) return line
[ "def", "parse_func_default", "(", "self", ",", "line", ")", ":", "print_dbg", "(", "\"\\tdefault_func: \"", "+", "line", ")", "params", ",", "line", "=", "self", ".", "parse_func_params", "(", "line", ")", "return", "line" ]
https://github.com/rtbkit/rtbkit/blob/502d06acc3f8d90438946b6ae742190f2f4b4fbb/jml-build/jmlbuild.py#L532-L538
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_core.py
python
MenuBar.FindItemById
(*args, **kwargs)
return _core_.MenuBar_FindItemById(*args, **kwargs)
FindItemById(self, int id) -> MenuItem
FindItemById(self, int id) -> MenuItem
[ "FindItemById", "(", "self", "int", "id", ")", "-", ">", "MenuItem" ]
def FindItemById(*args, **kwargs): """FindItemById(self, int id) -> MenuItem""" return _core_.MenuBar_FindItemById(*args, **kwargs)
[ "def", "FindItemById", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "MenuBar_FindItemById", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L12319-L12321
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/model_pruning/python/layers/rnn_cells.py
python
MaskedLSTMCell.call
(self, inputs, state)
return m, new_state
Run one step of LSTM. Args: inputs: input Tensor, 2D, `[batch, num_units]. state: if `state_is_tuple` is False, this must be a state Tensor, `2-D, [batch, state_size]`. If `state_is_tuple` is True, this must be a tuple of state Tensors, both `2-D`, with column sizes `c_state` and `m_state`. Returns: A tuple containing: - A `2-D, [batch, output_dim]`, Tensor representing the output of the LSTM after reading `inputs` when previous state was `state`. Here output_dim is: num_proj if num_proj was set, num_units otherwise. - Tensor(s) representing the new state of LSTM after reading `inputs` when the previous state was `state`. Same type and shape(s) as `state`. Raises: ValueError: If input size cannot be inferred from inputs via static shape inference.
Run one step of LSTM.
[ "Run", "one", "step", "of", "LSTM", "." ]
def call(self, inputs, state): """Run one step of LSTM. Args: inputs: input Tensor, 2D, `[batch, num_units]. state: if `state_is_tuple` is False, this must be a state Tensor, `2-D, [batch, state_size]`. If `state_is_tuple` is True, this must be a tuple of state Tensors, both `2-D`, with column sizes `c_state` and `m_state`. Returns: A tuple containing: - A `2-D, [batch, output_dim]`, Tensor representing the output of the LSTM after reading `inputs` when previous state was `state`. Here output_dim is: num_proj if num_proj was set, num_units otherwise. - Tensor(s) representing the new state of LSTM after reading `inputs` when the previous state was `state`. Same type and shape(s) as `state`. Raises: ValueError: If input size cannot be inferred from inputs via static shape inference. """ num_proj = self._num_units if self._num_proj is None else self._num_proj sigmoid = math_ops.sigmoid if self._state_is_tuple: (c_prev, m_prev) = state else: c_prev = array_ops.slice(state, [0, 0], [-1, self._num_units]) m_prev = array_ops.slice(state, [0, self._num_units], [-1, num_proj]) input_size = inputs.get_shape().with_rank(2).dims[1] if input_size.value is None: raise ValueError("Could not infer input size from inputs.get_shape()[-1]") # i = input_gate, j = new_input, f = forget_gate, o = output_gate lstm_matrix = math_ops.matmul( array_ops.concat([inputs, m_prev], 1), self._masked_kernel) lstm_matrix = nn_ops.bias_add(lstm_matrix, self._bias) i, j, f, o = array_ops.split( value=lstm_matrix, num_or_size_splits=4, axis=1) # Diagonal connections if self._use_peepholes: c = ( sigmoid(f + self._forget_bias + self._w_f_diag * c_prev) * c_prev + sigmoid(i + self._w_i_diag * c_prev) * self._activation(j)) else: c = ( sigmoid(f + self._forget_bias) * c_prev + sigmoid(i) * self._activation(j)) if self._cell_clip is not None: # pylint: disable=invalid-unary-operand-type c = clip_ops.clip_by_value(c, -self._cell_clip, self._cell_clip) # pylint: enable=invalid-unary-operand-type if self._use_peepholes: m = sigmoid(o + self._w_o_diag * c) * self._activation(c) else: m = sigmoid(o) * self._activation(c) if self._num_proj is not None: m = math_ops.matmul(m, self._proj_kernel) if self._proj_clip is not None: # pylint: disable=invalid-unary-operand-type m = clip_ops.clip_by_value(m, -self._proj_clip, self._proj_clip) # pylint: enable=invalid-unary-operand-type new_state = ( tf_rnn.LSTMStateTuple(c, m) if self._state_is_tuple else array_ops.concat([c, m], 1)) return m, new_state
[ "def", "call", "(", "self", ",", "inputs", ",", "state", ")", ":", "num_proj", "=", "self", ".", "_num_units", "if", "self", ".", "_num_proj", "is", "None", "else", "self", ".", "_num_proj", "sigmoid", "=", "math_ops", ".", "sigmoid", "if", "self", ".", "_state_is_tuple", ":", "(", "c_prev", ",", "m_prev", ")", "=", "state", "else", ":", "c_prev", "=", "array_ops", ".", "slice", "(", "state", ",", "[", "0", ",", "0", "]", ",", "[", "-", "1", ",", "self", ".", "_num_units", "]", ")", "m_prev", "=", "array_ops", ".", "slice", "(", "state", ",", "[", "0", ",", "self", ".", "_num_units", "]", ",", "[", "-", "1", ",", "num_proj", "]", ")", "input_size", "=", "inputs", ".", "get_shape", "(", ")", ".", "with_rank", "(", "2", ")", ".", "dims", "[", "1", "]", "if", "input_size", ".", "value", "is", "None", ":", "raise", "ValueError", "(", "\"Could not infer input size from inputs.get_shape()[-1]\"", ")", "# i = input_gate, j = new_input, f = forget_gate, o = output_gate", "lstm_matrix", "=", "math_ops", ".", "matmul", "(", "array_ops", ".", "concat", "(", "[", "inputs", ",", "m_prev", "]", ",", "1", ")", ",", "self", ".", "_masked_kernel", ")", "lstm_matrix", "=", "nn_ops", ".", "bias_add", "(", "lstm_matrix", ",", "self", ".", "_bias", ")", "i", ",", "j", ",", "f", ",", "o", "=", "array_ops", ".", "split", "(", "value", "=", "lstm_matrix", ",", "num_or_size_splits", "=", "4", ",", "axis", "=", "1", ")", "# Diagonal connections", "if", "self", ".", "_use_peepholes", ":", "c", "=", "(", "sigmoid", "(", "f", "+", "self", ".", "_forget_bias", "+", "self", ".", "_w_f_diag", "*", "c_prev", ")", "*", "c_prev", "+", "sigmoid", "(", "i", "+", "self", ".", "_w_i_diag", "*", "c_prev", ")", "*", "self", ".", "_activation", "(", "j", ")", ")", "else", ":", "c", "=", "(", "sigmoid", "(", "f", "+", "self", ".", "_forget_bias", ")", "*", "c_prev", "+", "sigmoid", "(", "i", ")", "*", "self", ".", "_activation", "(", "j", ")", ")", "if", "self", ".", "_cell_clip", "is", "not", "None", ":", "# pylint: disable=invalid-unary-operand-type", "c", "=", "clip_ops", ".", "clip_by_value", "(", "c", ",", "-", "self", ".", "_cell_clip", ",", "self", ".", "_cell_clip", ")", "# pylint: enable=invalid-unary-operand-type", "if", "self", ".", "_use_peepholes", ":", "m", "=", "sigmoid", "(", "o", "+", "self", ".", "_w_o_diag", "*", "c", ")", "*", "self", ".", "_activation", "(", "c", ")", "else", ":", "m", "=", "sigmoid", "(", "o", ")", "*", "self", ".", "_activation", "(", "c", ")", "if", "self", ".", "_num_proj", "is", "not", "None", ":", "m", "=", "math_ops", ".", "matmul", "(", "m", ",", "self", ".", "_proj_kernel", ")", "if", "self", ".", "_proj_clip", "is", "not", "None", ":", "# pylint: disable=invalid-unary-operand-type", "m", "=", "clip_ops", ".", "clip_by_value", "(", "m", ",", "-", "self", ".", "_proj_clip", ",", "self", ".", "_proj_clip", ")", "# pylint: enable=invalid-unary-operand-type", "new_state", "=", "(", "tf_rnn", ".", "LSTMStateTuple", "(", "c", ",", "m", ")", "if", "self", ".", "_state_is_tuple", "else", "array_ops", ".", "concat", "(", "[", "c", ",", "m", "]", ",", "1", ")", ")", "return", "m", ",", "new_state" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/model_pruning/python/layers/rnn_cells.py#L273-L348
SpenceKonde/megaTinyCore
1c4a70b18a149fe6bcb551dfa6db11ca50b8997b
megaavr/tools/libs/pyedbglib/util/binary.py
python
pack_be16
(value)
return bytearray([(value >> 8) & 0xFF, value & 0xFF])
:param value: input value :return: 16-bit big endian bytearray representation of the input value
:param value: input value :return: 16-bit big endian bytearray representation of the input value
[ ":", "param", "value", ":", "input", "value", ":", "return", ":", "16", "-", "bit", "big", "endian", "bytearray", "representation", "of", "the", "input", "value" ]
def pack_be16(value): """ :param value: input value :return: 16-bit big endian bytearray representation of the input value """ _check_input_value(value, 16) return bytearray([(value >> 8) & 0xFF, value & 0xFF])
[ "def", "pack_be16", "(", "value", ")", ":", "_check_input_value", "(", "value", ",", "16", ")", "return", "bytearray", "(", "[", "(", "value", ">>", "8", ")", "&", "0xFF", ",", "value", "&", "0xFF", "]", ")" ]
https://github.com/SpenceKonde/megaTinyCore/blob/1c4a70b18a149fe6bcb551dfa6db11ca50b8997b/megaavr/tools/libs/pyedbglib/util/binary.py#L71-L77
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/backend.py
python
arange
(start, stop=None, step=1, dtype='int32')
return result
Creates a 1D tensor containing a sequence of integers. The function arguments use the same convention as Theano's arange: if only one argument is provided, it is in fact the "stop" argument and "start" is 0. The default type of the returned tensor is `'int32'` to match TensorFlow's default. Arguments: start: Start value. stop: Stop value. step: Difference between two successive values. dtype: Integer dtype to use. Returns: An integer tensor. Example: ```python >>> tf.keras.backend.arange(start=0, stop=10, step=1.5) <tf.Tensor: id=96, shape=(7,), dtype=float32, numpy=array([0. , 1.5, 3. , 4.5, 6. , 7.5, 9. ], dtype=float32)> ```
Creates a 1D tensor containing a sequence of integers.
[ "Creates", "a", "1D", "tensor", "containing", "a", "sequence", "of", "integers", "." ]
def arange(start, stop=None, step=1, dtype='int32'): """Creates a 1D tensor containing a sequence of integers. The function arguments use the same convention as Theano's arange: if only one argument is provided, it is in fact the "stop" argument and "start" is 0. The default type of the returned tensor is `'int32'` to match TensorFlow's default. Arguments: start: Start value. stop: Stop value. step: Difference between two successive values. dtype: Integer dtype to use. Returns: An integer tensor. Example: ```python >>> tf.keras.backend.arange(start=0, stop=10, step=1.5) <tf.Tensor: id=96, shape=(7,), dtype=float32, numpy=array([0. , 1.5, 3. , 4.5, 6. , 7.5, 9. ], dtype=float32)> ``` """ # Match the behavior of numpy and Theano by returning an empty sequence. if stop is None and start < 0: start = 0 result = math_ops.range(start, limit=stop, delta=step, name='arange') if dtype != 'int32': result = cast(result, dtype) return result
[ "def", "arange", "(", "start", ",", "stop", "=", "None", ",", "step", "=", "1", ",", "dtype", "=", "'int32'", ")", ":", "# Match the behavior of numpy and Theano by returning an empty sequence.", "if", "stop", "is", "None", "and", "start", "<", "0", ":", "start", "=", "0", "result", "=", "math_ops", ".", "range", "(", "start", ",", "limit", "=", "stop", ",", "delta", "=", "step", ",", "name", "=", "'arange'", ")", "if", "dtype", "!=", "'int32'", ":", "result", "=", "cast", "(", "result", ",", "dtype", ")", "return", "result" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/backend.py#L2849-L2883
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/draftfunctions/dxf.py
python
getDXF
(obj, direction=None)
return get_dxf(obj, direction=direction)
Return DXF string of the object. DEPRECATED. Use 'get_dxf'.
Return DXF string of the object. DEPRECATED. Use 'get_dxf'.
[ "Return", "DXF", "string", "of", "the", "object", ".", "DEPRECATED", ".", "Use", "get_dxf", "." ]
def getDXF(obj, direction=None): """Return DXF string of the object. DEPRECATED. Use 'get_dxf'.""" utils.use_instead("get_dxf") return get_dxf(obj, direction=direction)
[ "def", "getDXF", "(", "obj", ",", "direction", "=", "None", ")", ":", "utils", ".", "use_instead", "(", "\"get_dxf\"", ")", "return", "get_dxf", "(", "obj", ",", "direction", "=", "direction", ")" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftfunctions/dxf.py#L136-L141
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/closure_linter/closure_linter/common/tokenizer.py
python
Tokenizer.__CreateNormalToken
(self, mode, string, line, line_number)
return self._CreateToken(string, type, line, line_number)
Creates a normal token. Args: mode: The current mode. string: The string to tokenize. line: The line of text. line_number: The line number within the file. Returns: A Token object, of the default type for the current mode.
Creates a normal token.
[ "Creates", "a", "normal", "token", "." ]
def __CreateNormalToken(self, mode, string, line, line_number): """Creates a normal token. Args: mode: The current mode. string: The string to tokenize. line: The line of text. line_number: The line number within the file. Returns: A Token object, of the default type for the current mode. """ type = Type.NORMAL if mode in self.default_types: type = self.default_types[mode] return self._CreateToken(string, type, line, line_number)
[ "def", "__CreateNormalToken", "(", "self", ",", "mode", ",", "string", ",", "line", ",", "line_number", ")", ":", "type", "=", "Type", ".", "NORMAL", "if", "mode", "in", "self", ".", "default_types", ":", "type", "=", "self", ".", "default_types", "[", "mode", "]", "return", "self", ".", "_CreateToken", "(", "string", ",", "type", ",", "line", ",", "line_number", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/closure_linter/closure_linter/common/tokenizer.py#L149-L164
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py
python
train
()
Train CIFAR-10 for a number of steps.
Train CIFAR-10 for a number of steps.
[ "Train", "CIFAR", "-", "10", "for", "a", "number", "of", "steps", "." ]
def train(): """Train CIFAR-10 for a number of steps.""" with tf.Graph().as_default(), tf.device('/cpu:0'): # Create a variable to count the number of train() calls. This equals the # number of batches processed * FLAGS.num_gpus. global_step = tf.get_variable( 'global_step', [], initializer=tf.constant_initializer(0), trainable=False) # Calculate the learning rate schedule. num_batches_per_epoch = (cifar10.NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN / FLAGS.batch_size) decay_steps = int(num_batches_per_epoch * cifar10.NUM_EPOCHS_PER_DECAY) # Decay the learning rate exponentially based on the number of steps. lr = tf.train.exponential_decay(cifar10.INITIAL_LEARNING_RATE, global_step, decay_steps, cifar10.LEARNING_RATE_DECAY_FACTOR, staircase=True) # Create an optimizer that performs gradient descent. opt = tf.train.GradientDescentOptimizer(lr) # Calculate the gradients for each model tower. tower_grads = [] for i in xrange(FLAGS.num_gpus): with tf.device('/gpu:%d' % i): with tf.name_scope('%s_%d' % (cifar10.TOWER_NAME, i)) as scope: # Calculate the loss for one tower of the CIFAR model. This function # constructs the entire CIFAR model but shares the variables across # all towers. loss = tower_loss(scope) # Reuse variables for the next tower. tf.get_variable_scope().reuse_variables() # Retain the summaries from the final tower. summaries = tf.get_collection(tf.GraphKeys.SUMMARIES, scope) # Calculate the gradients for the batch of data on this CIFAR tower. grads = opt.compute_gradients(loss) # Keep track of the gradients across all towers. tower_grads.append(grads) # We must calculate the mean of each gradient. Note that this is the # synchronization point across all towers. grads = average_gradients(tower_grads) # Add a summary to track the learning rate. summaries.append(tf.scalar_summary('learning_rate', lr)) # Add histograms for gradients. for grad, var in grads: if grad is not None: summaries.append( tf.histogram_summary(var.op.name + '/gradients', grad)) # Apply the gradients to adjust the shared variables. apply_gradient_op = opt.apply_gradients(grads, global_step=global_step) # Add histograms for trainable variables. for var in tf.trainable_variables(): summaries.append(tf.histogram_summary(var.op.name, var)) # Track the moving averages of all trainable variables. variable_averages = tf.train.ExponentialMovingAverage( cifar10.MOVING_AVERAGE_DECAY, global_step) variables_averages_op = variable_averages.apply(tf.trainable_variables()) # Group all updates to into a single train op. train_op = tf.group(apply_gradient_op, variables_averages_op) # Create a saver. saver = tf.train.Saver(tf.all_variables()) # Build the summary operation from the last tower summaries. summary_op = tf.merge_summary(summaries) # Build an initialization operation to run below. init = tf.initialize_all_variables() # Start running operations on the Graph. allow_soft_placement must be set to # True to build towers on GPU, as some of the ops do not have GPU # implementations. sess = tf.Session(config=tf.ConfigProto( allow_soft_placement=True, log_device_placement=FLAGS.log_device_placement)) sess.run(init) # Start the queue runners. tf.train.start_queue_runners(sess=sess) summary_writer = tf.train.SummaryWriter(FLAGS.train_dir, sess.graph) for step in xrange(FLAGS.max_steps): start_time = time.time() _, loss_value = sess.run([train_op, loss]) duration = time.time() - start_time assert not np.isnan(loss_value), 'Model diverged with loss = NaN' if step % 10 == 0: num_examples_per_step = FLAGS.batch_size * FLAGS.num_gpus examples_per_sec = num_examples_per_step / duration sec_per_batch = duration / FLAGS.num_gpus format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f ' 'sec/batch)') print (format_str % (datetime.now(), step, loss_value, examples_per_sec, sec_per_batch)) if step % 100 == 0: summary_str = sess.run(summary_op) summary_writer.add_summary(summary_str, step) # Save the model checkpoint periodically. if step % 1000 == 0 or (step + 1) == FLAGS.max_steps: checkpoint_path = os.path.join(FLAGS.train_dir, 'model.ckpt') saver.save(sess, checkpoint_path, global_step=step)
[ "def", "train", "(", ")", ":", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ",", "tf", ".", "device", "(", "'/cpu:0'", ")", ":", "# Create a variable to count the number of train() calls. This equals the", "# number of batches processed * FLAGS.num_gpus.", "global_step", "=", "tf", ".", "get_variable", "(", "'global_step'", ",", "[", "]", ",", "initializer", "=", "tf", ".", "constant_initializer", "(", "0", ")", ",", "trainable", "=", "False", ")", "# Calculate the learning rate schedule.", "num_batches_per_epoch", "=", "(", "cifar10", ".", "NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN", "/", "FLAGS", ".", "batch_size", ")", "decay_steps", "=", "int", "(", "num_batches_per_epoch", "*", "cifar10", ".", "NUM_EPOCHS_PER_DECAY", ")", "# Decay the learning rate exponentially based on the number of steps.", "lr", "=", "tf", ".", "train", ".", "exponential_decay", "(", "cifar10", ".", "INITIAL_LEARNING_RATE", ",", "global_step", ",", "decay_steps", ",", "cifar10", ".", "LEARNING_RATE_DECAY_FACTOR", ",", "staircase", "=", "True", ")", "# Create an optimizer that performs gradient descent.", "opt", "=", "tf", ".", "train", ".", "GradientDescentOptimizer", "(", "lr", ")", "# Calculate the gradients for each model tower.", "tower_grads", "=", "[", "]", "for", "i", "in", "xrange", "(", "FLAGS", ".", "num_gpus", ")", ":", "with", "tf", ".", "device", "(", "'/gpu:%d'", "%", "i", ")", ":", "with", "tf", ".", "name_scope", "(", "'%s_%d'", "%", "(", "cifar10", ".", "TOWER_NAME", ",", "i", ")", ")", "as", "scope", ":", "# Calculate the loss for one tower of the CIFAR model. This function", "# constructs the entire CIFAR model but shares the variables across", "# all towers.", "loss", "=", "tower_loss", "(", "scope", ")", "# Reuse variables for the next tower.", "tf", ".", "get_variable_scope", "(", ")", ".", "reuse_variables", "(", ")", "# Retain the summaries from the final tower.", "summaries", "=", "tf", ".", "get_collection", "(", "tf", ".", "GraphKeys", ".", "SUMMARIES", ",", "scope", ")", "# Calculate the gradients for the batch of data on this CIFAR tower.", "grads", "=", "opt", ".", "compute_gradients", "(", "loss", ")", "# Keep track of the gradients across all towers.", "tower_grads", ".", "append", "(", "grads", ")", "# We must calculate the mean of each gradient. Note that this is the", "# synchronization point across all towers.", "grads", "=", "average_gradients", "(", "tower_grads", ")", "# Add a summary to track the learning rate.", "summaries", ".", "append", "(", "tf", ".", "scalar_summary", "(", "'learning_rate'", ",", "lr", ")", ")", "# Add histograms for gradients.", "for", "grad", ",", "var", "in", "grads", ":", "if", "grad", "is", "not", "None", ":", "summaries", ".", "append", "(", "tf", ".", "histogram_summary", "(", "var", ".", "op", ".", "name", "+", "'/gradients'", ",", "grad", ")", ")", "# Apply the gradients to adjust the shared variables.", "apply_gradient_op", "=", "opt", ".", "apply_gradients", "(", "grads", ",", "global_step", "=", "global_step", ")", "# Add histograms for trainable variables.", "for", "var", "in", "tf", ".", "trainable_variables", "(", ")", ":", "summaries", ".", "append", "(", "tf", ".", "histogram_summary", "(", "var", ".", "op", ".", "name", ",", "var", ")", ")", "# Track the moving averages of all trainable variables.", "variable_averages", "=", "tf", ".", "train", ".", "ExponentialMovingAverage", "(", "cifar10", ".", "MOVING_AVERAGE_DECAY", ",", "global_step", ")", "variables_averages_op", "=", "variable_averages", ".", "apply", "(", "tf", ".", "trainable_variables", "(", ")", ")", "# Group all updates to into a single train op.", "train_op", "=", "tf", ".", "group", "(", "apply_gradient_op", ",", "variables_averages_op", ")", "# Create a saver.", "saver", "=", "tf", ".", "train", ".", "Saver", "(", "tf", ".", "all_variables", "(", ")", ")", "# Build the summary operation from the last tower summaries.", "summary_op", "=", "tf", ".", "merge_summary", "(", "summaries", ")", "# Build an initialization operation to run below.", "init", "=", "tf", ".", "initialize_all_variables", "(", ")", "# Start running operations on the Graph. allow_soft_placement must be set to", "# True to build towers on GPU, as some of the ops do not have GPU", "# implementations.", "sess", "=", "tf", ".", "Session", "(", "config", "=", "tf", ".", "ConfigProto", "(", "allow_soft_placement", "=", "True", ",", "log_device_placement", "=", "FLAGS", ".", "log_device_placement", ")", ")", "sess", ".", "run", "(", "init", ")", "# Start the queue runners.", "tf", ".", "train", ".", "start_queue_runners", "(", "sess", "=", "sess", ")", "summary_writer", "=", "tf", ".", "train", ".", "SummaryWriter", "(", "FLAGS", ".", "train_dir", ",", "sess", ".", "graph", ")", "for", "step", "in", "xrange", "(", "FLAGS", ".", "max_steps", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "_", ",", "loss_value", "=", "sess", ".", "run", "(", "[", "train_op", ",", "loss", "]", ")", "duration", "=", "time", ".", "time", "(", ")", "-", "start_time", "assert", "not", "np", ".", "isnan", "(", "loss_value", ")", ",", "'Model diverged with loss = NaN'", "if", "step", "%", "10", "==", "0", ":", "num_examples_per_step", "=", "FLAGS", ".", "batch_size", "*", "FLAGS", ".", "num_gpus", "examples_per_sec", "=", "num_examples_per_step", "/", "duration", "sec_per_batch", "=", "duration", "/", "FLAGS", ".", "num_gpus", "format_str", "=", "(", "'%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '", "'sec/batch)'", ")", "print", "(", "format_str", "%", "(", "datetime", ".", "now", "(", ")", ",", "step", ",", "loss_value", ",", "examples_per_sec", ",", "sec_per_batch", ")", ")", "if", "step", "%", "100", "==", "0", ":", "summary_str", "=", "sess", ".", "run", "(", "summary_op", ")", "summary_writer", ".", "add_summary", "(", "summary_str", ",", "step", ")", "# Save the model checkpoint periodically.", "if", "step", "%", "1000", "==", "0", "or", "(", "step", "+", "1", ")", "==", "FLAGS", ".", "max_steps", ":", "checkpoint_path", "=", "os", ".", "path", ".", "join", "(", "FLAGS", ".", "train_dir", ",", "'model.ckpt'", ")", "saver", ".", "save", "(", "sess", ",", "checkpoint_path", ",", "global_step", "=", "step", ")" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py#L148-L268
ucb-bar/esp-llvm
8aec2ae754fd66d4e73b9b777a9f20c4583a0f03
bindings/python/llvm/object.py
python
Symbol.expire
(self)
Mark the object as expired to prevent future API accesses. This is called internally by this module and it is unlikely that external callers have a legitimate reason for using it.
Mark the object as expired to prevent future API accesses.
[ "Mark", "the", "object", "as", "expired", "to", "prevent", "future", "API", "accesses", "." ]
def expire(self): """Mark the object as expired to prevent future API accesses. This is called internally by this module and it is unlikely that external callers have a legitimate reason for using it. """ self.expired = True
[ "def", "expire", "(", "self", ")", ":", "self", ".", "expired", "=", "True" ]
https://github.com/ucb-bar/esp-llvm/blob/8aec2ae754fd66d4e73b9b777a9f20c4583a0f03/bindings/python/llvm/object.py#L350-L356
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/stats/_multivariate.py
python
wishart_gen.rvs
(self, df, scale, size=1, random_state=None)
return _squeeze_output(out)
Draw random samples from a Wishart distribution. Parameters ---------- %(_doc_default_callparams)s size : integer or iterable of integers, optional Number of samples to draw (default 1). %(_doc_random_state)s Returns ------- rvs : ndarray Random variates of shape (`size`) + (`dim`, `dim), where `dim` is the dimension of the scale matrix. Notes ----- %(_doc_callparams_note)s
Draw random samples from a Wishart distribution.
[ "Draw", "random", "samples", "from", "a", "Wishart", "distribution", "." ]
def rvs(self, df, scale, size=1, random_state=None): """ Draw random samples from a Wishart distribution. Parameters ---------- %(_doc_default_callparams)s size : integer or iterable of integers, optional Number of samples to draw (default 1). %(_doc_random_state)s Returns ------- rvs : ndarray Random variates of shape (`size`) + (`dim`, `dim), where `dim` is the dimension of the scale matrix. Notes ----- %(_doc_callparams_note)s """ n, shape = self._process_size(size) dim, df, scale = self._process_parameters(df, scale) # Cholesky decomposition of scale C = scipy.linalg.cholesky(scale, lower=True) out = self._rvs(n, shape, dim, df, C, random_state) return _squeeze_output(out)
[ "def", "rvs", "(", "self", ",", "df", ",", "scale", ",", "size", "=", "1", ",", "random_state", "=", "None", ")", ":", "n", ",", "shape", "=", "self", ".", "_process_size", "(", "size", ")", "dim", ",", "df", ",", "scale", "=", "self", ".", "_process_parameters", "(", "df", ",", "scale", ")", "# Cholesky decomposition of scale", "C", "=", "scipy", ".", "linalg", ".", "cholesky", "(", "scale", ",", "lower", "=", "True", ")", "out", "=", "self", ".", "_rvs", "(", "n", ",", "shape", ",", "dim", ",", "df", ",", "C", ",", "random_state", ")", "return", "_squeeze_output", "(", "out", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/stats/_multivariate.py#L1915-L1945
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2.py
python
uCSIsYiSyllables
(code)
return ret
Check whether the character is part of YiSyllables UCS Block
Check whether the character is part of YiSyllables UCS Block
[ "Check", "whether", "the", "character", "is", "part", "of", "YiSyllables", "UCS", "Block" ]
def uCSIsYiSyllables(code): """Check whether the character is part of YiSyllables UCS Block """ ret = libxml2mod.xmlUCSIsYiSyllables(code) return ret
[ "def", "uCSIsYiSyllables", "(", "code", ")", ":", "ret", "=", "libxml2mod", ".", "xmlUCSIsYiSyllables", "(", "code", ")", "return", "ret" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L2984-L2987
may0324/DeepCompression-caffe
0aff6c1287bda4cfc7f378ed8a16524e1afabd8c
scripts/cpp_lint.py
python
ParseNolintSuppressions
(filename, raw_line, linenum, error)
Updates the global list of error-suppressions. Parses any NOLINT comments on the current line, updating the global error_suppressions store. Reports an error if the NOLINT comment was malformed. Args: filename: str, the name of the input file. raw_line: str, the line of input text, with comments. linenum: int, the number of the current line. error: function, an error handler.
Updates the global list of error-suppressions.
[ "Updates", "the", "global", "list", "of", "error", "-", "suppressions", "." ]
def ParseNolintSuppressions(filename, raw_line, linenum, error): """Updates the global list of error-suppressions. Parses any NOLINT comments on the current line, updating the global error_suppressions store. Reports an error if the NOLINT comment was malformed. Args: filename: str, the name of the input file. raw_line: str, the line of input text, with comments. linenum: int, the number of the current line. error: function, an error handler. """ # FIXME(adonovan): "NOLINT(" is misparsed as NOLINT(*). matched = _RE_SUPPRESSION.search(raw_line) if matched: if matched.group(1) == '_NEXT_LINE': linenum += 1 category = matched.group(2) if category in (None, '(*)'): # => "suppress all" _error_suppressions.setdefault(None, set()).add(linenum) else: if category.startswith('(') and category.endswith(')'): category = category[1:-1] if category in _ERROR_CATEGORIES: _error_suppressions.setdefault(category, set()).add(linenum) else: error(filename, linenum, 'readability/nolint', 5, 'Unknown NOLINT error category: %s' % category)
[ "def", "ParseNolintSuppressions", "(", "filename", ",", "raw_line", ",", "linenum", ",", "error", ")", ":", "# FIXME(adonovan): \"NOLINT(\" is misparsed as NOLINT(*).", "matched", "=", "_RE_SUPPRESSION", ".", "search", "(", "raw_line", ")", "if", "matched", ":", "if", "matched", ".", "group", "(", "1", ")", "==", "'_NEXT_LINE'", ":", "linenum", "+=", "1", "category", "=", "matched", ".", "group", "(", "2", ")", "if", "category", "in", "(", "None", ",", "'(*)'", ")", ":", "# => \"suppress all\"", "_error_suppressions", ".", "setdefault", "(", "None", ",", "set", "(", ")", ")", ".", "add", "(", "linenum", ")", "else", ":", "if", "category", ".", "startswith", "(", "'('", ")", "and", "category", ".", "endswith", "(", "')'", ")", ":", "category", "=", "category", "[", "1", ":", "-", "1", "]", "if", "category", "in", "_ERROR_CATEGORIES", ":", "_error_suppressions", ".", "setdefault", "(", "category", ",", "set", "(", ")", ")", ".", "add", "(", "linenum", ")", "else", ":", "error", "(", "filename", ",", "linenum", ",", "'readability/nolint'", ",", "5", ",", "'Unknown NOLINT error category: %s'", "%", "category", ")" ]
https://github.com/may0324/DeepCompression-caffe/blob/0aff6c1287bda4cfc7f378ed8a16524e1afabd8c/scripts/cpp_lint.py#L464-L492
papyrussolution/OpenPapyrus
bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91
Src/OSF/protobuf-3.19.1/python/google/protobuf/internal/encoder.py
python
_StructPackEncoder
(wire_type, format)
return SpecificEncoder
Return a constructor for an encoder for a fixed-width field. Args: wire_type: The field's wire type, for encoding tags. format: The format string to pass to struct.pack().
Return a constructor for an encoder for a fixed-width field.
[ "Return", "a", "constructor", "for", "an", "encoder", "for", "a", "fixed", "-", "width", "field", "." ]
def _StructPackEncoder(wire_type, format): """Return a constructor for an encoder for a fixed-width field. Args: wire_type: The field's wire type, for encoding tags. format: The format string to pass to struct.pack(). """ value_size = struct.calcsize(format) def SpecificEncoder(field_number, is_repeated, is_packed): local_struct_pack = struct.pack if is_packed: tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) local_EncodeVarint = _EncodeVarint def EncodePackedField(write, value, deterministic): write(tag_bytes) local_EncodeVarint(write, len(value) * value_size, deterministic) for element in value: write(local_struct_pack(format, element)) return EncodePackedField elif is_repeated: tag_bytes = TagBytes(field_number, wire_type) def EncodeRepeatedField(write, value, unused_deterministic=None): for element in value: write(tag_bytes) write(local_struct_pack(format, element)) return EncodeRepeatedField else: tag_bytes = TagBytes(field_number, wire_type) def EncodeField(write, value, unused_deterministic=None): write(tag_bytes) return write(local_struct_pack(format, value)) return EncodeField return SpecificEncoder
[ "def", "_StructPackEncoder", "(", "wire_type", ",", "format", ")", ":", "value_size", "=", "struct", ".", "calcsize", "(", "format", ")", "def", "SpecificEncoder", "(", "field_number", ",", "is_repeated", ",", "is_packed", ")", ":", "local_struct_pack", "=", "struct", ".", "pack", "if", "is_packed", ":", "tag_bytes", "=", "TagBytes", "(", "field_number", ",", "wire_format", ".", "WIRETYPE_LENGTH_DELIMITED", ")", "local_EncodeVarint", "=", "_EncodeVarint", "def", "EncodePackedField", "(", "write", ",", "value", ",", "deterministic", ")", ":", "write", "(", "tag_bytes", ")", "local_EncodeVarint", "(", "write", ",", "len", "(", "value", ")", "*", "value_size", ",", "deterministic", ")", "for", "element", "in", "value", ":", "write", "(", "local_struct_pack", "(", "format", ",", "element", ")", ")", "return", "EncodePackedField", "elif", "is_repeated", ":", "tag_bytes", "=", "TagBytes", "(", "field_number", ",", "wire_type", ")", "def", "EncodeRepeatedField", "(", "write", ",", "value", ",", "unused_deterministic", "=", "None", ")", ":", "for", "element", "in", "value", ":", "write", "(", "tag_bytes", ")", "write", "(", "local_struct_pack", "(", "format", ",", "element", ")", ")", "return", "EncodeRepeatedField", "else", ":", "tag_bytes", "=", "TagBytes", "(", "field_number", ",", "wire_type", ")", "def", "EncodeField", "(", "write", ",", "value", ",", "unused_deterministic", "=", "None", ")", ":", "write", "(", "tag_bytes", ")", "return", "write", "(", "local_struct_pack", "(", "format", ",", "value", ")", ")", "return", "EncodeField", "return", "SpecificEncoder" ]
https://github.com/papyrussolution/OpenPapyrus/blob/bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91/Src/OSF/protobuf-3.19.1/python/google/protobuf/internal/encoder.py#L505-L540
mapnik/mapnik
f3da900c355e1d15059c4a91b00203dcc9d9f0ef
scons/scons-local-4.1.0/SCons/Tool/gxx.py
python
generate
(env)
Add Builders and construction variables for g++ to an Environment.
Add Builders and construction variables for g++ to an Environment.
[ "Add", "Builders", "and", "construction", "variables", "for", "g", "++", "to", "an", "Environment", "." ]
def generate(env): """Add Builders and construction variables for g++ to an Environment.""" static_obj, shared_obj = SCons.Tool.createObjBuilders(env) if 'CXX' not in env: env['CXX'] = env.Detect(compilers) or compilers[0] cxx.generate(env) # platform specific settings if env['PLATFORM'] == 'aix': env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc') env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 env['SHOBJSUFFIX'] = '$OBJSUFFIX' elif env['PLATFORM'] == 'hpux': env['SHOBJSUFFIX'] = '.pic.o' elif env['PLATFORM'] == 'sunos': env['SHOBJSUFFIX'] = '.pic.o' # determine compiler version version = gcc.detect_version(env, env['CXX']) if version: env['CXXVERSION'] = version
[ "def", "generate", "(", "env", ")", ":", "static_obj", ",", "shared_obj", "=", "SCons", ".", "Tool", ".", "createObjBuilders", "(", "env", ")", "if", "'CXX'", "not", "in", "env", ":", "env", "[", "'CXX'", "]", "=", "env", ".", "Detect", "(", "compilers", ")", "or", "compilers", "[", "0", "]", "cxx", ".", "generate", "(", "env", ")", "# platform specific settings", "if", "env", "[", "'PLATFORM'", "]", "==", "'aix'", ":", "env", "[", "'SHCXXFLAGS'", "]", "=", "SCons", ".", "Util", ".", "CLVar", "(", "'$CXXFLAGS -mminimal-toc'", ")", "env", "[", "'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'", "]", "=", "1", "env", "[", "'SHOBJSUFFIX'", "]", "=", "'$OBJSUFFIX'", "elif", "env", "[", "'PLATFORM'", "]", "==", "'hpux'", ":", "env", "[", "'SHOBJSUFFIX'", "]", "=", "'.pic.o'", "elif", "env", "[", "'PLATFORM'", "]", "==", "'sunos'", ":", "env", "[", "'SHOBJSUFFIX'", "]", "=", "'.pic.o'", "# determine compiler version", "version", "=", "gcc", ".", "detect_version", "(", "env", ",", "env", "[", "'CXX'", "]", ")", "if", "version", ":", "env", "[", "'CXXVERSION'", "]", "=", "version" ]
https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Tool/gxx.py#L46-L67
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/datetime.py
python
timezone.__getinitargs__
(self)
return (self._offset, self._name)
pickle support
pickle support
[ "pickle", "support" ]
def __getinitargs__(self): """pickle support""" if self._name is None: return (self._offset,) return (self._offset, self._name)
[ "def", "__getinitargs__", "(", "self", ")", ":", "if", "self", ".", "_name", "is", "None", ":", "return", "(", "self", ".", "_offset", ",", ")", "return", "(", "self", ".", "_offset", ",", "self", ".", "_name", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/datetime.py#L2163-L2167
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/richtext.py
python
RichTextBuffer_GetFloatingLayoutMode
(*args)
return _richtext.RichTextBuffer_GetFloatingLayoutMode(*args)
RichTextBuffer_GetFloatingLayoutMode() -> bool
RichTextBuffer_GetFloatingLayoutMode() -> bool
[ "RichTextBuffer_GetFloatingLayoutMode", "()", "-", ">", "bool" ]
def RichTextBuffer_GetFloatingLayoutMode(*args): """RichTextBuffer_GetFloatingLayoutMode() -> bool""" return _richtext.RichTextBuffer_GetFloatingLayoutMode(*args)
[ "def", "RichTextBuffer_GetFloatingLayoutMode", "(", "*", "args", ")", ":", "return", "_richtext", ".", "RichTextBuffer_GetFloatingLayoutMode", "(", "*", "args", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/richtext.py#L2729-L2731
eclipse/omr
056e7c9ce9d503649190bc5bd9931fac30b4e4bc
jitbuilder/apigen/genutils.py
python
APIClass.as_type
(self)
return APIType(self.name(), self.api)
Returns an instance of APIType corresponding to the described class.
Returns an instance of APIType corresponding to the described class.
[ "Returns", "an", "instance", "of", "APIType", "corresponding", "to", "the", "described", "class", "." ]
def as_type(self): """Returns an instance of APIType corresponding to the described class.""" return APIType(self.name(), self.api)
[ "def", "as_type", "(", "self", ")", ":", "return", "APIType", "(", "self", ".", "name", "(", ")", ",", "self", ".", "api", ")" ]
https://github.com/eclipse/omr/blob/056e7c9ce9d503649190bc5bd9931fac30b4e4bc/jitbuilder/apigen/genutils.py#L346-L348
OSGeo/gdal
3748fc4ba4fba727492774b2b908a2130c864a83
swig/python/osgeo/osr.py
python
SpatialReference.SetOS
(self, *args, **kwargs)
return _osr.SpatialReference_SetOS(self, *args, **kwargs)
r"""SetOS(SpatialReference self, double dfOriginLat, double dfCMeridian, double scale, double fe, double fn) -> OGRErr
r"""SetOS(SpatialReference self, double dfOriginLat, double dfCMeridian, double scale, double fe, double fn) -> OGRErr
[ "r", "SetOS", "(", "SpatialReference", "self", "double", "dfOriginLat", "double", "dfCMeridian", "double", "scale", "double", "fe", "double", "fn", ")", "-", ">", "OGRErr" ]
def SetOS(self, *args, **kwargs): r"""SetOS(SpatialReference self, double dfOriginLat, double dfCMeridian, double scale, double fe, double fn) -> OGRErr""" return _osr.SpatialReference_SetOS(self, *args, **kwargs)
[ "def", "SetOS", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_osr", ".", "SpatialReference_SetOS", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/OSGeo/gdal/blob/3748fc4ba4fba727492774b2b908a2130c864a83/swig/python/osgeo/osr.py#L646-L648
google/swiftshader
8ccc63f045d5975fb67f9dfd3d2b8235b0526990
third_party/SPIRV-Tools/utils/generate_grammar_tables.py
python
InstInitializer.__init__
(self, opname, caps, exts, operands, version, lastVersion)
Initialization. Arguments: - opname: opcode name (with the 'Op' prefix) - caps: a sequence of capability names required by this opcode - exts: a sequence of names of extensions enabling this enumerant - operands: a sequence of (operand-kind, operand-quantifier) tuples - version: minimal SPIR-V version required for this opcode - lastVersion: last version of SPIR-V that includes this opcode
Initialization.
[ "Initialization", "." ]
def __init__(self, opname, caps, exts, operands, version, lastVersion): """Initialization. Arguments: - opname: opcode name (with the 'Op' prefix) - caps: a sequence of capability names required by this opcode - exts: a sequence of names of extensions enabling this enumerant - operands: a sequence of (operand-kind, operand-quantifier) tuples - version: minimal SPIR-V version required for this opcode - lastVersion: last version of SPIR-V that includes this opcode """ assert opname.startswith('Op') self.opname = opname[2:] # Remove the "Op" prefix. self.num_caps = len(caps) self.caps_mask = get_capability_array_name(caps) self.num_exts = len(exts) self.exts = get_extension_array_name(exts) self.operands = [convert_operand_kind(o) for o in operands] self.fix_syntax() operands = [o[0] for o in operands] self.ref_type_id = 'IdResultType' in operands self.def_result_id = 'IdResult' in operands self.version = convert_min_required_version(version) self.lastVersion = convert_max_required_version(lastVersion)
[ "def", "__init__", "(", "self", ",", "opname", ",", "caps", ",", "exts", ",", "operands", ",", "version", ",", "lastVersion", ")", ":", "assert", "opname", ".", "startswith", "(", "'Op'", ")", "self", ".", "opname", "=", "opname", "[", "2", ":", "]", "# Remove the \"Op\" prefix.", "self", ".", "num_caps", "=", "len", "(", "caps", ")", "self", ".", "caps_mask", "=", "get_capability_array_name", "(", "caps", ")", "self", ".", "num_exts", "=", "len", "(", "exts", ")", "self", ".", "exts", "=", "get_extension_array_name", "(", "exts", ")", "self", ".", "operands", "=", "[", "convert_operand_kind", "(", "o", ")", "for", "o", "in", "operands", "]", "self", ".", "fix_syntax", "(", ")", "operands", "=", "[", "o", "[", "0", "]", "for", "o", "in", "operands", "]", "self", ".", "ref_type_id", "=", "'IdResultType'", "in", "operands", "self", ".", "def_result_id", "=", "'IdResult'", "in", "operands", "self", ".", "version", "=", "convert_min_required_version", "(", "version", ")", "self", ".", "lastVersion", "=", "convert_max_required_version", "(", "lastVersion", ")" ]
https://github.com/google/swiftshader/blob/8ccc63f045d5975fb67f9dfd3d2b8235b0526990/third_party/SPIRV-Tools/utils/generate_grammar_tables.py#L216-L243
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/turtle.py
python
RawTurtle.onrelease
(self, fun, btn=1, add=None)
Bind fun to mouse-button-release event on this turtle on canvas. Arguments: fun -- a function with two arguments, to which will be assigned the coordinates of the clicked point on the canvas. num -- number of the mouse-button defaults to 1 (left mouse button). Example (for a MyTurtle instance named joe): >>> class MyTurtle(Turtle): ... def glow(self,x,y): ... self.fillcolor("red") ... def unglow(self,x,y): ... self.fillcolor("") ... >>> joe = MyTurtle() >>> joe.onclick(joe.glow) >>> joe.onrelease(joe.unglow) Clicking on joe turns fillcolor red, unclicking turns it to transparent.
Bind fun to mouse-button-release event on this turtle on canvas.
[ "Bind", "fun", "to", "mouse", "-", "button", "-", "release", "event", "on", "this", "turtle", "on", "canvas", "." ]
def onrelease(self, fun, btn=1, add=None): """Bind fun to mouse-button-release event on this turtle on canvas. Arguments: fun -- a function with two arguments, to which will be assigned the coordinates of the clicked point on the canvas. num -- number of the mouse-button defaults to 1 (left mouse button). Example (for a MyTurtle instance named joe): >>> class MyTurtle(Turtle): ... def glow(self,x,y): ... self.fillcolor("red") ... def unglow(self,x,y): ... self.fillcolor("") ... >>> joe = MyTurtle() >>> joe.onclick(joe.glow) >>> joe.onrelease(joe.unglow) Clicking on joe turns fillcolor red, unclicking turns it to transparent. """ self.screen._onrelease(self.turtle._item, fun, btn, add) self._update()
[ "def", "onrelease", "(", "self", ",", "fun", ",", "btn", "=", "1", ",", "add", "=", "None", ")", ":", "self", ".", "screen", ".", "_onrelease", "(", "self", ".", "turtle", ".", "_item", ",", "fun", ",", "btn", ",", "add", ")", "self", ".", "_update", "(", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/turtle.py#L3435-L3458
SpaceNetChallenge/BuildingDetectors
3def3c44b5847c744cd2f3356182892d92496579
qinhaifang/src/caffe-mnc/scripts/cpp_lint.py
python
IsCppString
(line)
return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
Does line terminate so, that the next symbol is in string constant. This function does not consider single-line nor multi-line comments. Args: line: is a partial line of code starting from the 0..n. Returns: True, if next character appended to 'line' is inside a string constant.
Does line terminate so, that the next symbol is in string constant.
[ "Does", "line", "terminate", "so", "that", "the", "next", "symbol", "is", "in", "string", "constant", "." ]
def IsCppString(line): """Does line terminate so, that the next symbol is in string constant. This function does not consider single-line nor multi-line comments. Args: line: is a partial line of code starting from the 0..n. Returns: True, if next character appended to 'line' is inside a string constant. """ line = line.replace(r'\\', 'XX') # after this, \\" does not match to \" return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
[ "def", "IsCppString", "(", "line", ")", ":", "line", "=", "line", ".", "replace", "(", "r'\\\\'", ",", "'XX'", ")", "# after this, \\\\\" does not match to \\\"", "return", "(", "(", "line", ".", "count", "(", "'\"'", ")", "-", "line", ".", "count", "(", "r'\\\"'", ")", "-", "line", ".", "count", "(", "\"'\\\"'\"", ")", ")", "&", "1", ")", "==", "1" ]
https://github.com/SpaceNetChallenge/BuildingDetectors/blob/3def3c44b5847c744cd2f3356182892d92496579/qinhaifang/src/caffe-mnc/scripts/cpp_lint.py#L1045-L1059
tensorflow/deepmath
b5b721f54de1d5d6a02d78f5da5995237f9995f9
deepmath/deephol/prover.py
python
Prover.prove_one
(self, search_tree: proof_search_tree.ProofSearchTree, task: proof_assistant_pb2.ProverTask)
Prove a single-goal task. This method can assume an already initialized search tree with node 0 being the sing goal specified in the task. Args: search_tree: The pre-initialized search tree. task: Task to be performed. Returns: Error message on error, None otherwise.
Prove a single-goal task.
[ "Prove", "a", "single", "-", "goal", "task", "." ]
def prove_one(self, search_tree: proof_search_tree.ProofSearchTree, task: proof_assistant_pb2.ProverTask) -> Optional[Text]: """Prove a single-goal task. This method can assume an already initialized search tree with node 0 being the sing goal specified in the task. Args: search_tree: The pre-initialized search tree. task: Task to be performed. Returns: Error message on error, None otherwise. """ raise NotImplementedError('Must define this.')
[ "def", "prove_one", "(", "self", ",", "search_tree", ":", "proof_search_tree", ".", "ProofSearchTree", ",", "task", ":", "proof_assistant_pb2", ".", "ProverTask", ")", "->", "Optional", "[", "Text", "]", ":", "raise", "NotImplementedError", "(", "'Must define this.'", ")" ]
https://github.com/tensorflow/deepmath/blob/b5b721f54de1d5d6a02d78f5da5995237f9995f9/deepmath/deephol/prover.py#L110-L124
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/fractions.py
python
Fraction.__str__
(self)
str(self)
str(self)
[ "str", "(", "self", ")" ]
def __str__(self): """str(self)""" if self._denominator == 1: return str(self._numerator) else: return '%s/%s' % (self._numerator, self._denominator)
[ "def", "__str__", "(", "self", ")", ":", "if", "self", ".", "_denominator", "==", "1", ":", "return", "str", "(", "self", ".", "_numerator", ")", "else", ":", "return", "'%s/%s'", "%", "(", "self", ".", "_numerator", ",", "self", ".", "_denominator", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/fractions.py#L273-L278
15172658790/Blog
46e5036f5fbcad535af2255dc0e095cebcd8d710
计算机与信息类/数据结构/students/mbinary/allOone/allOone.py
python
AllOne.inc
(self, key,n=1)
Inserts a new key <Key> with value 1. Or increments an existing key by 1. :type key: str :rtype: void
Inserts a new key <Key> with value 1. Or increments an existing key by 1. :type key: str :rtype: void
[ "Inserts", "a", "new", "key", "<Key", ">", "with", "value", "1", ".", "Or", "increments", "an", "existing", "key", "by", "1", ".", ":", "type", "key", ":", "str", ":", "rtype", ":", "void" ]
def inc(self, key,n=1): """ Inserts a new key <Key> with value 1. Or increments an existing key by 1. :type key: str :rtype: void """ if key in self: self[key]+=n else:self[key]=n for i in range(n): self.dll.incTo(key, self[key])
[ "def", "inc", "(", "self", ",", "key", ",", "n", "=", "1", ")", ":", "if", "key", "in", "self", ":", "self", "[", "key", "]", "+=", "n", "else", ":", "self", "[", "key", "]", "=", "n", "for", "i", "in", "range", "(", "n", ")", ":", "self", ".", "dll", ".", "incTo", "(", "key", ",", "self", "[", "key", "]", ")" ]
https://github.com/15172658790/Blog/blob/46e5036f5fbcad535af2255dc0e095cebcd8d710/计算机与信息类/数据结构/students/mbinary/allOone/allOone.py#L111-L120
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/protobuf/py2/google/protobuf/internal/extension_dict.py
python
_ExtensionDict.__init__
(self, extended_message)
Args: extended_message: Message instance for which we are the Extensions dict.
Args: extended_message: Message instance for which we are the Extensions dict.
[ "Args", ":", "extended_message", ":", "Message", "instance", "for", "which", "we", "are", "the", "Extensions", "dict", "." ]
def __init__(self, extended_message): """ Args: extended_message: Message instance for which we are the Extensions dict. """ self._extended_message = extended_message
[ "def", "__init__", "(", "self", ",", "extended_message", ")", ":", "self", ".", "_extended_message", "=", "extended_message" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/protobuf/py2/google/protobuf/internal/extension_dict.py#L71-L76
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/types/npytypes.py
python
Record.typeof
(self, key)
return self.fields[key].type
Get the type of a field.
Get the type of a field.
[ "Get", "the", "type", "of", "a", "field", "." ]
def typeof(self, key): """Get the type of a field. """ return self.fields[key].type
[ "def", "typeof", "(", "self", ",", "key", ")", ":", "return", "self", ".", "fields", "[", "key", "]", ".", "type" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/types/npytypes.py#L173-L176
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/decimal.py
python
Decimal.is_subnormal
(self, context=None)
return self.adjusted() < context.Emin
Return True if self is subnormal; otherwise return False.
Return True if self is subnormal; otherwise return False.
[ "Return", "True", "if", "self", "is", "subnormal", ";", "otherwise", "return", "False", "." ]
def is_subnormal(self, context=None): """Return True if self is subnormal; otherwise return False.""" if self._is_special or not self: return False if context is None: context = getcontext() return self.adjusted() < context.Emin
[ "def", "is_subnormal", "(", "self", ",", "context", "=", "None", ")", ":", "if", "self", ".", "_is_special", "or", "not", "self", ":", "return", "False", "if", "context", "is", "None", ":", "context", "=", "getcontext", "(", ")", "return", "self", ".", "adjusted", "(", ")", "<", "context", ".", "Emin" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/decimal.py#L3051-L3057
Yelp/MOE
5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c
moe/optimal_learning/python/cpp_wrappers/optimization.py
python
GradientDescentOptimizer.optimize
(self, **kwargs)
C++ does not expose this endpoint.
C++ does not expose this endpoint.
[ "C", "++", "does", "not", "expose", "this", "endpoint", "." ]
def optimize(self, **kwargs): """C++ does not expose this endpoint.""" raise NotImplementedError("C++ wrapper currently does not support optimization member functions.")
[ "def", "optimize", "(", "self", ",", "*", "*", "kwargs", ")", ":", "raise", "NotImplementedError", "(", "\"C++ wrapper currently does not support optimization member functions.\"", ")" ]
https://github.com/Yelp/MOE/blob/5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c/moe/optimal_learning/python/cpp_wrappers/optimization.py#L436-L438
tensorflow/ngraph-bridge
ea6422491ec75504e78a63db029e7f74ec3479a5
examples/mnist/mnist_deep_simplified_distributed.py
python
weight_variable
(shape, name)
return weight_var
weight_variable generates a weight variable of a given shape.
weight_variable generates a weight variable of a given shape.
[ "weight_variable", "generates", "a", "weight", "variable", "of", "a", "given", "shape", "." ]
def weight_variable(shape, name): """weight_variable generates a weight variable of a given shape.""" weight_var = tf.compat.v1.get_variable(name, shape) return weight_var
[ "def", "weight_variable", "(", "shape", ",", "name", ")", ":", "weight_var", "=", "tf", ".", "compat", ".", "v1", ".", "get_variable", "(", "name", ",", "shape", ")", "return", "weight_var" ]
https://github.com/tensorflow/ngraph-bridge/blob/ea6422491ec75504e78a63db029e7f74ec3479a5/examples/mnist/mnist_deep_simplified_distributed.py#L136-L139
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py3/sklearn/decomposition/_dict_learning.py
python
dict_learning_online
(X, n_components=2, alpha=1, n_iter=100, return_code=True, dict_init=None, callback=None, batch_size=3, verbose=False, shuffle=True, n_jobs=None, method='lars', iter_offset=0, random_state=None, return_inner_stats=False, inner_stats=None, return_n_iter=False, positive_dict=False, positive_code=False, method_max_iter=1000)
Solves a dictionary learning matrix factorization problem online. Finds the best dictionary and the corresponding sparse code for approximating the data matrix X by solving:: (U^*, V^*) = argmin 0.5 || X - U V ||_2^2 + alpha * || U ||_1 (U,V) with || V_k ||_2 = 1 for all 0 <= k < n_components where V is the dictionary and U is the sparse code. This is accomplished by repeatedly iterating over mini-batches by slicing the input data. Read more in the :ref:`User Guide <DictionaryLearning>`. Parameters ---------- X : array of shape (n_samples, n_features) Data matrix. n_components : int, Number of dictionary atoms to extract. alpha : float, Sparsity controlling parameter. n_iter : int, Number of mini-batch iterations to perform. return_code : boolean, Whether to also return the code U or just the dictionary V. dict_init : array of shape (n_components, n_features), Initial value for the dictionary for warm restart scenarios. callback : callable or None, optional (default: None) callable that gets invoked every five iterations batch_size : int, The number of samples to take in each batch. verbose : bool, optional (default: False) To control the verbosity of the procedure. shuffle : boolean, Whether to shuffle the data before splitting it in batches. n_jobs : int or None, optional (default=None) Number of parallel jobs to run. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. method : {'lars', 'cd'} lars: uses the least angle regression method to solve the lasso problem (linear_model.lars_path) cd: uses the coordinate descent method to compute the Lasso solution (linear_model.Lasso). Lars will be faster if the estimated components are sparse. iter_offset : int, default 0 Number of previous iterations completed on the dictionary used for initialization. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. return_inner_stats : boolean, optional Return the inner statistics A (dictionary covariance) and B (data approximation). Useful to restart the algorithm in an online setting. If return_inner_stats is True, return_code is ignored inner_stats : tuple of (A, B) ndarrays Inner sufficient statistics that are kept by the algorithm. Passing them at initialization is useful in online settings, to avoid losing the history of the evolution. A (n_components, n_components) is the dictionary covariance matrix. B (n_features, n_components) is the data approximation matrix return_n_iter : bool Whether or not to return the number of iterations. positive_dict : bool Whether to enforce positivity when finding the dictionary. .. versionadded:: 0.20 positive_code : bool Whether to enforce positivity when finding the code. .. versionadded:: 0.20 method_max_iter : int, optional (default=1000) Maximum number of iterations to perform when solving the lasso problem. .. versionadded:: 0.22 Returns ------- code : array of shape (n_samples, n_components), the sparse code (only returned if `return_code=True`) dictionary : array of shape (n_components, n_features), the solutions to the dictionary learning problem n_iter : int Number of iterations run. Returned only if `return_n_iter` is set to `True`. See also -------- dict_learning DictionaryLearning MiniBatchDictionaryLearning SparsePCA MiniBatchSparsePCA
Solves a dictionary learning matrix factorization problem online.
[ "Solves", "a", "dictionary", "learning", "matrix", "factorization", "problem", "online", "." ]
def dict_learning_online(X, n_components=2, alpha=1, n_iter=100, return_code=True, dict_init=None, callback=None, batch_size=3, verbose=False, shuffle=True, n_jobs=None, method='lars', iter_offset=0, random_state=None, return_inner_stats=False, inner_stats=None, return_n_iter=False, positive_dict=False, positive_code=False, method_max_iter=1000): """Solves a dictionary learning matrix factorization problem online. Finds the best dictionary and the corresponding sparse code for approximating the data matrix X by solving:: (U^*, V^*) = argmin 0.5 || X - U V ||_2^2 + alpha * || U ||_1 (U,V) with || V_k ||_2 = 1 for all 0 <= k < n_components where V is the dictionary and U is the sparse code. This is accomplished by repeatedly iterating over mini-batches by slicing the input data. Read more in the :ref:`User Guide <DictionaryLearning>`. Parameters ---------- X : array of shape (n_samples, n_features) Data matrix. n_components : int, Number of dictionary atoms to extract. alpha : float, Sparsity controlling parameter. n_iter : int, Number of mini-batch iterations to perform. return_code : boolean, Whether to also return the code U or just the dictionary V. dict_init : array of shape (n_components, n_features), Initial value for the dictionary for warm restart scenarios. callback : callable or None, optional (default: None) callable that gets invoked every five iterations batch_size : int, The number of samples to take in each batch. verbose : bool, optional (default: False) To control the verbosity of the procedure. shuffle : boolean, Whether to shuffle the data before splitting it in batches. n_jobs : int or None, optional (default=None) Number of parallel jobs to run. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. method : {'lars', 'cd'} lars: uses the least angle regression method to solve the lasso problem (linear_model.lars_path) cd: uses the coordinate descent method to compute the Lasso solution (linear_model.Lasso). Lars will be faster if the estimated components are sparse. iter_offset : int, default 0 Number of previous iterations completed on the dictionary used for initialization. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. return_inner_stats : boolean, optional Return the inner statistics A (dictionary covariance) and B (data approximation). Useful to restart the algorithm in an online setting. If return_inner_stats is True, return_code is ignored inner_stats : tuple of (A, B) ndarrays Inner sufficient statistics that are kept by the algorithm. Passing them at initialization is useful in online settings, to avoid losing the history of the evolution. A (n_components, n_components) is the dictionary covariance matrix. B (n_features, n_components) is the data approximation matrix return_n_iter : bool Whether or not to return the number of iterations. positive_dict : bool Whether to enforce positivity when finding the dictionary. .. versionadded:: 0.20 positive_code : bool Whether to enforce positivity when finding the code. .. versionadded:: 0.20 method_max_iter : int, optional (default=1000) Maximum number of iterations to perform when solving the lasso problem. .. versionadded:: 0.22 Returns ------- code : array of shape (n_samples, n_components), the sparse code (only returned if `return_code=True`) dictionary : array of shape (n_components, n_features), the solutions to the dictionary learning problem n_iter : int Number of iterations run. Returned only if `return_n_iter` is set to `True`. See also -------- dict_learning DictionaryLearning MiniBatchDictionaryLearning SparsePCA MiniBatchSparsePCA """ if n_components is None: n_components = X.shape[1] if method not in ('lars', 'cd'): raise ValueError('Coding method not supported as a fit algorithm.') _check_positive_coding(method, positive_code) method = 'lasso_' + method t0 = time.time() n_samples, n_features = X.shape # Avoid integer division problems alpha = float(alpha) random_state = check_random_state(random_state) # Init V with SVD of X if dict_init is not None: dictionary = dict_init else: _, S, dictionary = randomized_svd(X, n_components, random_state=random_state) dictionary = S[:, np.newaxis] * dictionary r = len(dictionary) if n_components <= r: dictionary = dictionary[:n_components, :] else: dictionary = np.r_[dictionary, np.zeros((n_components - r, dictionary.shape[1]))] if verbose == 1: print('[dict_learning]', end=' ') if shuffle: X_train = X.copy() random_state.shuffle(X_train) else: X_train = X dictionary = check_array(dictionary.T, order='F', dtype=np.float64, copy=False) dictionary = np.require(dictionary, requirements='W') X_train = check_array(X_train, order='C', dtype=np.float64, copy=False) batches = gen_batches(n_samples, batch_size) batches = itertools.cycle(batches) # The covariance of the dictionary if inner_stats is None: A = np.zeros((n_components, n_components)) # The data approximation B = np.zeros((n_features, n_components)) else: A = inner_stats[0].copy() B = inner_stats[1].copy() # If n_iter is zero, we need to return zero. ii = iter_offset - 1 for ii, batch in zip(range(iter_offset, iter_offset + n_iter), batches): this_X = X_train[batch] dt = (time.time() - t0) if verbose == 1: sys.stdout.write(".") sys.stdout.flush() elif verbose: if verbose > 10 or ii % ceil(100. / verbose) == 0: print("Iteration % 3i (elapsed time: % 3is, % 4.1fmn)" % (ii, dt, dt / 60)) this_code = sparse_encode(this_X, dictionary.T, algorithm=method, alpha=alpha, n_jobs=n_jobs, check_input=False, positive=positive_code, max_iter=method_max_iter, verbose=verbose).T # Update the auxiliary variables if ii < batch_size - 1: theta = float((ii + 1) * batch_size) else: theta = float(batch_size ** 2 + ii + 1 - batch_size) beta = (theta + 1 - batch_size) / (theta + 1) A *= beta A += np.dot(this_code, this_code.T) B *= beta B += np.dot(this_X.T, this_code.T) # Update dictionary dictionary = _update_dict(dictionary, B, A, verbose=verbose, random_state=random_state, positive=positive_dict) # XXX: Can the residuals be of any use? # Maybe we need a stopping criteria based on the amount of # modification in the dictionary if callback is not None: callback(locals()) if return_inner_stats: if return_n_iter: return dictionary.T, (A, B), ii - iter_offset + 1 else: return dictionary.T, (A, B) if return_code: if verbose > 1: print('Learning code...', end=' ') elif verbose == 1: print('|', end=' ') code = sparse_encode(X, dictionary.T, algorithm=method, alpha=alpha, n_jobs=n_jobs, check_input=False, positive=positive_code, max_iter=method_max_iter, verbose=verbose) if verbose > 1: dt = (time.time() - t0) print('done (total time: % 3is, % 4.1fmn)' % (dt, dt / 60)) if return_n_iter: return code, dictionary.T, ii - iter_offset + 1 else: return code, dictionary.T if return_n_iter: return dictionary.T, ii - iter_offset + 1 else: return dictionary.T
[ "def", "dict_learning_online", "(", "X", ",", "n_components", "=", "2", ",", "alpha", "=", "1", ",", "n_iter", "=", "100", ",", "return_code", "=", "True", ",", "dict_init", "=", "None", ",", "callback", "=", "None", ",", "batch_size", "=", "3", ",", "verbose", "=", "False", ",", "shuffle", "=", "True", ",", "n_jobs", "=", "None", ",", "method", "=", "'lars'", ",", "iter_offset", "=", "0", ",", "random_state", "=", "None", ",", "return_inner_stats", "=", "False", ",", "inner_stats", "=", "None", ",", "return_n_iter", "=", "False", ",", "positive_dict", "=", "False", ",", "positive_code", "=", "False", ",", "method_max_iter", "=", "1000", ")", ":", "if", "n_components", "is", "None", ":", "n_components", "=", "X", ".", "shape", "[", "1", "]", "if", "method", "not", "in", "(", "'lars'", ",", "'cd'", ")", ":", "raise", "ValueError", "(", "'Coding method not supported as a fit algorithm.'", ")", "_check_positive_coding", "(", "method", ",", "positive_code", ")", "method", "=", "'lasso_'", "+", "method", "t0", "=", "time", ".", "time", "(", ")", "n_samples", ",", "n_features", "=", "X", ".", "shape", "# Avoid integer division problems", "alpha", "=", "float", "(", "alpha", ")", "random_state", "=", "check_random_state", "(", "random_state", ")", "# Init V with SVD of X", "if", "dict_init", "is", "not", "None", ":", "dictionary", "=", "dict_init", "else", ":", "_", ",", "S", ",", "dictionary", "=", "randomized_svd", "(", "X", ",", "n_components", ",", "random_state", "=", "random_state", ")", "dictionary", "=", "S", "[", ":", ",", "np", ".", "newaxis", "]", "*", "dictionary", "r", "=", "len", "(", "dictionary", ")", "if", "n_components", "<=", "r", ":", "dictionary", "=", "dictionary", "[", ":", "n_components", ",", ":", "]", "else", ":", "dictionary", "=", "np", ".", "r_", "[", "dictionary", ",", "np", ".", "zeros", "(", "(", "n_components", "-", "r", ",", "dictionary", ".", "shape", "[", "1", "]", ")", ")", "]", "if", "verbose", "==", "1", ":", "print", "(", "'[dict_learning]'", ",", "end", "=", "' '", ")", "if", "shuffle", ":", "X_train", "=", "X", ".", "copy", "(", ")", "random_state", ".", "shuffle", "(", "X_train", ")", "else", ":", "X_train", "=", "X", "dictionary", "=", "check_array", "(", "dictionary", ".", "T", ",", "order", "=", "'F'", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "False", ")", "dictionary", "=", "np", ".", "require", "(", "dictionary", ",", "requirements", "=", "'W'", ")", "X_train", "=", "check_array", "(", "X_train", ",", "order", "=", "'C'", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "False", ")", "batches", "=", "gen_batches", "(", "n_samples", ",", "batch_size", ")", "batches", "=", "itertools", ".", "cycle", "(", "batches", ")", "# The covariance of the dictionary", "if", "inner_stats", "is", "None", ":", "A", "=", "np", ".", "zeros", "(", "(", "n_components", ",", "n_components", ")", ")", "# The data approximation", "B", "=", "np", ".", "zeros", "(", "(", "n_features", ",", "n_components", ")", ")", "else", ":", "A", "=", "inner_stats", "[", "0", "]", ".", "copy", "(", ")", "B", "=", "inner_stats", "[", "1", "]", ".", "copy", "(", ")", "# If n_iter is zero, we need to return zero.", "ii", "=", "iter_offset", "-", "1", "for", "ii", ",", "batch", "in", "zip", "(", "range", "(", "iter_offset", ",", "iter_offset", "+", "n_iter", ")", ",", "batches", ")", ":", "this_X", "=", "X_train", "[", "batch", "]", "dt", "=", "(", "time", ".", "time", "(", ")", "-", "t0", ")", "if", "verbose", "==", "1", ":", "sys", ".", "stdout", ".", "write", "(", "\".\"", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "elif", "verbose", ":", "if", "verbose", ">", "10", "or", "ii", "%", "ceil", "(", "100.", "/", "verbose", ")", "==", "0", ":", "print", "(", "\"Iteration % 3i (elapsed time: % 3is, % 4.1fmn)\"", "%", "(", "ii", ",", "dt", ",", "dt", "/", "60", ")", ")", "this_code", "=", "sparse_encode", "(", "this_X", ",", "dictionary", ".", "T", ",", "algorithm", "=", "method", ",", "alpha", "=", "alpha", ",", "n_jobs", "=", "n_jobs", ",", "check_input", "=", "False", ",", "positive", "=", "positive_code", ",", "max_iter", "=", "method_max_iter", ",", "verbose", "=", "verbose", ")", ".", "T", "# Update the auxiliary variables", "if", "ii", "<", "batch_size", "-", "1", ":", "theta", "=", "float", "(", "(", "ii", "+", "1", ")", "*", "batch_size", ")", "else", ":", "theta", "=", "float", "(", "batch_size", "**", "2", "+", "ii", "+", "1", "-", "batch_size", ")", "beta", "=", "(", "theta", "+", "1", "-", "batch_size", ")", "/", "(", "theta", "+", "1", ")", "A", "*=", "beta", "A", "+=", "np", ".", "dot", "(", "this_code", ",", "this_code", ".", "T", ")", "B", "*=", "beta", "B", "+=", "np", ".", "dot", "(", "this_X", ".", "T", ",", "this_code", ".", "T", ")", "# Update dictionary", "dictionary", "=", "_update_dict", "(", "dictionary", ",", "B", ",", "A", ",", "verbose", "=", "verbose", ",", "random_state", "=", "random_state", ",", "positive", "=", "positive_dict", ")", "# XXX: Can the residuals be of any use?", "# Maybe we need a stopping criteria based on the amount of", "# modification in the dictionary", "if", "callback", "is", "not", "None", ":", "callback", "(", "locals", "(", ")", ")", "if", "return_inner_stats", ":", "if", "return_n_iter", ":", "return", "dictionary", ".", "T", ",", "(", "A", ",", "B", ")", ",", "ii", "-", "iter_offset", "+", "1", "else", ":", "return", "dictionary", ".", "T", ",", "(", "A", ",", "B", ")", "if", "return_code", ":", "if", "verbose", ">", "1", ":", "print", "(", "'Learning code...'", ",", "end", "=", "' '", ")", "elif", "verbose", "==", "1", ":", "print", "(", "'|'", ",", "end", "=", "' '", ")", "code", "=", "sparse_encode", "(", "X", ",", "dictionary", ".", "T", ",", "algorithm", "=", "method", ",", "alpha", "=", "alpha", ",", "n_jobs", "=", "n_jobs", ",", "check_input", "=", "False", ",", "positive", "=", "positive_code", ",", "max_iter", "=", "method_max_iter", ",", "verbose", "=", "verbose", ")", "if", "verbose", ">", "1", ":", "dt", "=", "(", "time", ".", "time", "(", ")", "-", "t0", ")", "print", "(", "'done (total time: % 3is, % 4.1fmn)'", "%", "(", "dt", ",", "dt", "/", "60", ")", ")", "if", "return_n_iter", ":", "return", "code", ",", "dictionary", ".", "T", ",", "ii", "-", "iter_offset", "+", "1", "else", ":", "return", "code", ",", "dictionary", ".", "T", "if", "return_n_iter", ":", "return", "dictionary", ".", "T", ",", "ii", "-", "iter_offset", "+", "1", "else", ":", "return", "dictionary", ".", "T" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/decomposition/_dict_learning.py#L620-L875
rapidsai/cudf
d5b2448fc69f17509304d594f029d0df56984962
python/cudf/cudf/core/index.py
python
DatetimeIndex.year
(self)
return self._get_dt_field("year")
The year of the datetime. Examples -------- >>> import cudf >>> import pandas as pd >>> datetime_index = cudf.Index(pd.date_range("2000-01-01", ... periods=3, freq="Y")) >>> datetime_index DatetimeIndex(['2000-12-31', '2001-12-31', '2002-12-31'], dtype='datetime64[ns]') >>> datetime_index.year Int16Index([2000, 2001, 2002], dtype='int16')
The year of the datetime.
[ "The", "year", "of", "the", "datetime", "." ]
def year(self): """ The year of the datetime. Examples -------- >>> import cudf >>> import pandas as pd >>> datetime_index = cudf.Index(pd.date_range("2000-01-01", ... periods=3, freq="Y")) >>> datetime_index DatetimeIndex(['2000-12-31', '2001-12-31', '2002-12-31'], dtype='datetime64[ns]') >>> datetime_index.year Int16Index([2000, 2001, 2002], dtype='int16') """ # noqa: E501 return self._get_dt_field("year")
[ "def", "year", "(", "self", ")", ":", "# noqa: E501", "return", "self", ".", "_get_dt_field", "(", "\"year\"", ")" ]
https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/index.py#L1597-L1612
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/linalg/blas.py
python
find_best_blas_type
(arrays=(), dtype=None)
return prefix, dtype, prefer_fortran
Find best-matching BLAS/LAPACK type. Arrays are used to determine the optimal prefix of BLAS routines. Parameters ---------- arrays : sequence of ndarrays, optional Arrays can be given to determine optimal prefix of BLAS routines. If not given, double-precision routines will be used, otherwise the most generic type in arrays will be used. dtype : str or dtype, optional Data-type specifier. Not used if `arrays` is non-empty. Returns ------- prefix : str BLAS/LAPACK prefix character. dtype : dtype Inferred Numpy data type. prefer_fortran : bool Whether to prefer Fortran order routines over C order. Examples -------- >>> import scipy.linalg.blas as bla >>> a = np.random.rand(10,15) >>> b = np.asfortranarray(a) # Change the memory layout order >>> bla.find_best_blas_type((a,)) ('d', dtype('float64'), False) >>> bla.find_best_blas_type((a*1j,)) ('z', dtype('complex128'), False) >>> bla.find_best_blas_type((b,)) ('d', dtype('float64'), True)
Find best-matching BLAS/LAPACK type.
[ "Find", "best", "-", "matching", "BLAS", "/", "LAPACK", "type", "." ]
def find_best_blas_type(arrays=(), dtype=None): """Find best-matching BLAS/LAPACK type. Arrays are used to determine the optimal prefix of BLAS routines. Parameters ---------- arrays : sequence of ndarrays, optional Arrays can be given to determine optimal prefix of BLAS routines. If not given, double-precision routines will be used, otherwise the most generic type in arrays will be used. dtype : str or dtype, optional Data-type specifier. Not used if `arrays` is non-empty. Returns ------- prefix : str BLAS/LAPACK prefix character. dtype : dtype Inferred Numpy data type. prefer_fortran : bool Whether to prefer Fortran order routines over C order. Examples -------- >>> import scipy.linalg.blas as bla >>> a = np.random.rand(10,15) >>> b = np.asfortranarray(a) # Change the memory layout order >>> bla.find_best_blas_type((a,)) ('d', dtype('float64'), False) >>> bla.find_best_blas_type((a*1j,)) ('z', dtype('complex128'), False) >>> bla.find_best_blas_type((b,)) ('d', dtype('float64'), True) """ dtype = _np.dtype(dtype) prefer_fortran = False if arrays: # use the most generic type in arrays dtypes = [ar.dtype for ar in arrays] dtype = _np.find_common_type(dtypes, ()) try: index = dtypes.index(dtype) except ValueError: index = 0 if arrays[index].flags['FORTRAN']: # prefer Fortran for leading array with column major order prefer_fortran = True prefix = _type_conv.get(dtype.char, 'd') if dtype.char == 'G': # complex256 -> complex128 (i.e., C long double -> C double) dtype = _np.dtype('D') elif dtype.char not in 'fdFD': dtype = _np.dtype('d') return prefix, dtype, prefer_fortran
[ "def", "find_best_blas_type", "(", "arrays", "=", "(", ")", ",", "dtype", "=", "None", ")", ":", "dtype", "=", "_np", ".", "dtype", "(", "dtype", ")", "prefer_fortran", "=", "False", "if", "arrays", ":", "# use the most generic type in arrays", "dtypes", "=", "[", "ar", ".", "dtype", "for", "ar", "in", "arrays", "]", "dtype", "=", "_np", ".", "find_common_type", "(", "dtypes", ",", "(", ")", ")", "try", ":", "index", "=", "dtypes", ".", "index", "(", "dtype", ")", "except", "ValueError", ":", "index", "=", "0", "if", "arrays", "[", "index", "]", ".", "flags", "[", "'FORTRAN'", "]", ":", "# prefer Fortran for leading array with column major order", "prefer_fortran", "=", "True", "prefix", "=", "_type_conv", ".", "get", "(", "dtype", ".", "char", ",", "'d'", ")", "if", "dtype", ".", "char", "==", "'G'", ":", "# complex256 -> complex128 (i.e., C long double -> C double)", "dtype", "=", "_np", ".", "dtype", "(", "'D'", ")", "elif", "dtype", ".", "char", "not", "in", "'fdFD'", ":", "dtype", "=", "_np", ".", "dtype", "(", "'d'", ")", "return", "prefix", ",", "dtype", ",", "prefer_fortran" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/linalg/blas.py#L236-L294
mitsuba-renderer/mitsuba2
4e7628c6eed365904ca2ba536b795d1b03410344
src/python/__init__.py
python
variant
()
return getattr(_tls, 'variant', None)
Returns the currently active variant
Returns the currently active variant
[ "Returns", "the", "currently", "active", "variant" ]
def variant(): 'Returns the currently active variant' return getattr(_tls, 'variant', None)
[ "def", "variant", "(", ")", ":", "return", "getattr", "(", "_tls", ",", "'variant'", ",", "None", ")" ]
https://github.com/mitsuba-renderer/mitsuba2/blob/4e7628c6eed365904ca2ba536b795d1b03410344/src/python/__init__.py#L178-L180
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_gdi.py
python
PyLocale.__init__
(self, *args, **kwargs)
__init__(self, int language=-1, int flags=LOCALE_LOAD_DEFAULT) -> PyLocale
__init__(self, int language=-1, int flags=LOCALE_LOAD_DEFAULT) -> PyLocale
[ "__init__", "(", "self", "int", "language", "=", "-", "1", "int", "flags", "=", "LOCALE_LOAD_DEFAULT", ")", "-", ">", "PyLocale" ]
def __init__(self, *args, **kwargs): """__init__(self, int language=-1, int flags=LOCALE_LOAD_DEFAULT) -> PyLocale""" _gdi_.PyLocale_swiginit(self,_gdi_.new_PyLocale(*args, **kwargs)) PyLocale._setCallbackInfo(self, self, PyLocale)
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_gdi_", ".", "PyLocale_swiginit", "(", "self", ",", "_gdi_", ".", "new_PyLocale", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "PyLocale", ".", "_setCallbackInfo", "(", "self", ",", "self", ",", "PyLocale", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_gdi.py#L3152-L3155
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_misc.py
python
NotificationMessage.__init__
(self, *args)
__init__(self) -> NotificationMessage __init__(self, String title, String message=wxEmptyString, Window parent=None) -> NotificationMessage
__init__(self) -> NotificationMessage __init__(self, String title, String message=wxEmptyString, Window parent=None) -> NotificationMessage
[ "__init__", "(", "self", ")", "-", ">", "NotificationMessage", "__init__", "(", "self", "String", "title", "String", "message", "=", "wxEmptyString", "Window", "parent", "=", "None", ")", "-", ">", "NotificationMessage" ]
def __init__(self, *args): """ __init__(self) -> NotificationMessage __init__(self, String title, String message=wxEmptyString, Window parent=None) -> NotificationMessage """ _misc_.NotificationMessage_swiginit(self,_misc_.new_NotificationMessage(*args))
[ "def", "__init__", "(", "self", ",", "*", "args", ")", ":", "_misc_", ".", "NotificationMessage_swiginit", "(", "self", ",", "_misc_", ".", "new_NotificationMessage", "(", "*", "args", ")", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_misc.py#L1210-L1215
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/RNN/rnn_quantizer/nndct_shared/nndct_graph/base_graph.py
python
GraphBase.children
(self, node)
Get successors of a node in graph Returns: list: list of successors
Get successors of a node in graph
[ "Get", "successors", "of", "a", "node", "in", "graph" ]
def children(self, node): """Get successors of a node in graph Returns: list: list of successors """
[ "def", "children", "(", "self", ",", "node", ")", ":" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/RNN/rnn_quantizer/nndct_shared/nndct_graph/base_graph.py#L27-L32
Tencent/CMONGO
c40380caa14e05509f46993aa8b8da966b09b0b5
src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Executor.py
python
Executor.get_timestamp
(self)
return 0
Fetch a time stamp for this Executor. We don't have one, of course (only files do), but this is the interface used by the timestamp module.
Fetch a time stamp for this Executor. We don't have one, of course (only files do), but this is the interface used by the timestamp module.
[ "Fetch", "a", "time", "stamp", "for", "this", "Executor", ".", "We", "don", "t", "have", "one", "of", "course", "(", "only", "files", "do", ")", "but", "this", "is", "the", "interface", "used", "by", "the", "timestamp", "module", "." ]
def get_timestamp(self): """Fetch a time stamp for this Executor. We don't have one, of course (only files do), but this is the interface used by the timestamp module. """ return 0
[ "def", "get_timestamp", "(", "self", ")", ":", "return", "0" ]
https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Executor.py#L465-L470
jeog/TDAmeritradeAPI
91c738afd7d57b54f6231170bd64c2550fafd34d
python/tdma_api/get.py
python
TransactionHistoryGetter.get_symbol
(self)
return clib.get_str(self._abi('GetSymbol'), self._obj)
Returns search symbol being used.
Returns search symbol being used.
[ "Returns", "search", "symbol", "being", "used", "." ]
def get_symbol(self): """Returns search symbol being used.""" return clib.get_str(self._abi('GetSymbol'), self._obj)
[ "def", "get_symbol", "(", "self", ")", ":", "return", "clib", ".", "get_str", "(", "self", ".", "_abi", "(", "'GetSymbol'", ")", ",", "self", ".", "_obj", ")" ]
https://github.com/jeog/TDAmeritradeAPI/blob/91c738afd7d57b54f6231170bd64c2550fafd34d/python/tdma_api/get.py#L1181-L1183
trilinos/Trilinos
6168be6dd51e35e1cd681e9c4b24433e709df140
packages/muelu/doc/Tutorial/tex/prepareTexTutorial.py
python
deleteDir
(path)
deletes the path entirely
deletes the path entirely
[ "deletes", "the", "path", "entirely" ]
def deleteDir(path): """deletes the path entirely""" cmd = "rm -rf "+path result = getstatusoutput(cmd) if(result[0]!=0): raise RuntimeError(result[1])
[ "def", "deleteDir", "(", "path", ")", ":", "cmd", "=", "\"rm -rf \"", "+", "path", "result", "=", "getstatusoutput", "(", "cmd", ")", "if", "(", "result", "[", "0", "]", "!=", "0", ")", ":", "raise", "RuntimeError", "(", "result", "[", "1", "]", ")" ]
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/muelu/doc/Tutorial/tex/prepareTexTutorial.py#L17-L22
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/ros/roslib/src/roslib/names.py
python
resource_name_package
(name)
return name[:name.find(PRN_SEPARATOR)]
pkg/typeName -> pkg, typeName -> None @param name: package resource name, e.g. 'std_msgs/String' @type name: str @return: package name of resource @rtype: str
pkg/typeName -> pkg, typeName -> None
[ "pkg", "/", "typeName", "-", ">", "pkg", "typeName", "-", ">", "None" ]
def resource_name_package(name): """ pkg/typeName -> pkg, typeName -> None @param name: package resource name, e.g. 'std_msgs/String' @type name: str @return: package name of resource @rtype: str """ if not PRN_SEPARATOR in name: return None return name[:name.find(PRN_SEPARATOR)]
[ "def", "resource_name_package", "(", "name", ")", ":", "if", "not", "PRN_SEPARATOR", "in", "name", ":", "return", "None", "return", "name", "[", ":", "name", ".", "find", "(", "PRN_SEPARATOR", ")", "]" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/ros/roslib/src/roslib/names.py#L242-L254
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/python/ops/sparse_ops.py
python
sparse_maximum
(sp_a, sp_b, name=None)
return sparse_tensor.SparseTensor(out_indices, out_values, sp_a.dense_shape)
Returns the element-wise max of two SparseTensors. Assumes the two SparseTensors have the same shape, i.e., no broadcasting. Example: ```python sp_zero = sparse_tensor.SparseTensor([[0]], [0], [7]) sp_one = sparse_tensor.SparseTensor([[1]], [1], [7]) res = tf.sparse_maximum(sp_zero, sp_one).eval() # "res" should be equal to SparseTensor([[0], [1]], [0, 1], [7]). ``` Args: sp_a: a `SparseTensor` operand whose dtype is real, and indices lexicographically ordered. sp_b: the other `SparseTensor` operand with the same requirements (and the same shape). name: optional name of the operation. Returns: output: the output SparseTensor.
Returns the element-wise max of two SparseTensors.
[ "Returns", "the", "element", "-", "wise", "max", "of", "two", "SparseTensors", "." ]
def sparse_maximum(sp_a, sp_b, name=None): """Returns the element-wise max of two SparseTensors. Assumes the two SparseTensors have the same shape, i.e., no broadcasting. Example: ```python sp_zero = sparse_tensor.SparseTensor([[0]], [0], [7]) sp_one = sparse_tensor.SparseTensor([[1]], [1], [7]) res = tf.sparse_maximum(sp_zero, sp_one).eval() # "res" should be equal to SparseTensor([[0], [1]], [0, 1], [7]). ``` Args: sp_a: a `SparseTensor` operand whose dtype is real, and indices lexicographically ordered. sp_b: the other `SparseTensor` operand with the same requirements (and the same shape). name: optional name of the operation. Returns: output: the output SparseTensor. """ with ops.name_scope(name, "SparseSparseMaximum", [sp_a.indices, sp_a.values, sp_b.indices, sp_b.values]) as name: out_indices, out_values = gen_sparse_ops.sparse_sparse_maximum( sp_a.indices, sp_a.values, sp_a.dense_shape, sp_b.indices, sp_b.values, sp_b.dense_shape, name=name) return sparse_tensor.SparseTensor(out_indices, out_values, sp_a.dense_shape)
[ "def", "sparse_maximum", "(", "sp_a", ",", "sp_b", ",", "name", "=", "None", ")", ":", "with", "ops", ".", "name_scope", "(", "name", ",", "\"SparseSparseMaximum\"", ",", "[", "sp_a", ".", "indices", ",", "sp_a", ".", "values", ",", "sp_b", ".", "indices", ",", "sp_b", ".", "values", "]", ")", "as", "name", ":", "out_indices", ",", "out_values", "=", "gen_sparse_ops", ".", "sparse_sparse_maximum", "(", "sp_a", ".", "indices", ",", "sp_a", ".", "values", ",", "sp_a", ".", "dense_shape", ",", "sp_b", ".", "indices", ",", "sp_b", ".", "values", ",", "sp_b", ".", "dense_shape", ",", "name", "=", "name", ")", "return", "sparse_tensor", ".", "SparseTensor", "(", "out_indices", ",", "out_values", ",", "sp_a", ".", "dense_shape", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/sparse_ops.py#L1774-L1807
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/_osx_support.py
python
_find_executable
(executable, path=None)
Tries to find 'executable' in the directories listed in 'path'. A string listing directories separated by 'os.pathsep'; defaults to os.environ['PATH']. Returns the complete filename or None if not found.
Tries to find 'executable' in the directories listed in 'path'.
[ "Tries", "to", "find", "executable", "in", "the", "directories", "listed", "in", "path", "." ]
def _find_executable(executable, path=None): """Tries to find 'executable' in the directories listed in 'path'. A string listing directories separated by 'os.pathsep'; defaults to os.environ['PATH']. Returns the complete filename or None if not found. """ if path is None: path = os.environ['PATH'] paths = path.split(os.pathsep) base, ext = os.path.splitext(executable) if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'): executable = executable + '.exe' if not os.path.isfile(executable): for p in paths: f = os.path.join(p, executable) if os.path.isfile(f): # the file exists, we have a shot at spawn working return f return None else: return executable
[ "def", "_find_executable", "(", "executable", ",", "path", "=", "None", ")", ":", "if", "path", "is", "None", ":", "path", "=", "os", ".", "environ", "[", "'PATH'", "]", "paths", "=", "path", ".", "split", "(", "os", ".", "pathsep", ")", "base", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "executable", ")", "if", "(", "sys", ".", "platform", "==", "'win32'", "or", "os", ".", "name", "==", "'os2'", ")", "and", "(", "ext", "!=", "'.exe'", ")", ":", "executable", "=", "executable", "+", "'.exe'", "if", "not", "os", ".", "path", ".", "isfile", "(", "executable", ")", ":", "for", "p", "in", "paths", ":", "f", "=", "os", ".", "path", ".", "join", "(", "p", ",", "executable", ")", "if", "os", ".", "path", ".", "isfile", "(", "f", ")", ":", "# the file exists, we have a shot at spawn working", "return", "f", "return", "None", "else", ":", "return", "executable" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/_osx_support.py#L29-L52
tcpexmachina/remy
687b5db29b81df7ae8737889c78b47e7f9788297
scripts/plot.py
python
process_replot_argument
(replot_dir, results_dir)
return remyccs, link_ppt_range, console_dir
Reads the args.json file in a results directory, copies it to an appropriate location in the current results directory and returns the link speed range and a list of RemyCC files.
Reads the args.json file in a results directory, copies it to an appropriate location in the current results directory and returns the link speed range and a list of RemyCC files.
[ "Reads", "the", "args", ".", "json", "file", "in", "a", "results", "directory", "copies", "it", "to", "an", "appropriate", "location", "in", "the", "current", "results", "directory", "and", "returns", "the", "link", "speed", "range", "and", "a", "list", "of", "RemyCC", "files", "." ]
def process_replot_argument(replot_dir, results_dir): """Reads the args.json file in a results directory, copies it to an appropriate location in the current results directory and returns the link speed range and a list of RemyCC files.""" argsfilename = os.path.join(replot_dir, "args.json") argsfile = open(argsfilename) jsondict = json.load(argsfile) argsfile.close() args = jsondict["args"] remyccs = args["remycc"] link_ppt_range = np.logspace(np.log10(args["link_ppt"][0]), np.log10(args["link_ppt"][1]), args["num_points"]) console_dir = os.path.join(replot_dir, "outputs") replots_dirname = os.path.join(results_dir, "replots", os.path.basename(replot_dir)) os.makedirs(replots_dirname, exist_ok=True) target_filename = os.path.join(replots_dirname, "args.json") shutil.copy(argsfilename, target_filename) return remyccs, link_ppt_range, console_dir
[ "def", "process_replot_argument", "(", "replot_dir", ",", "results_dir", ")", ":", "argsfilename", "=", "os", ".", "path", ".", "join", "(", "replot_dir", ",", "\"args.json\"", ")", "argsfile", "=", "open", "(", "argsfilename", ")", "jsondict", "=", "json", ".", "load", "(", "argsfile", ")", "argsfile", ".", "close", "(", ")", "args", "=", "jsondict", "[", "\"args\"", "]", "remyccs", "=", "args", "[", "\"remycc\"", "]", "link_ppt_range", "=", "np", ".", "logspace", "(", "np", ".", "log10", "(", "args", "[", "\"link_ppt\"", "]", "[", "0", "]", ")", ",", "np", ".", "log10", "(", "args", "[", "\"link_ppt\"", "]", "[", "1", "]", ")", ",", "args", "[", "\"num_points\"", "]", ")", "console_dir", "=", "os", ".", "path", ".", "join", "(", "replot_dir", ",", "\"outputs\"", ")", "replots_dirname", "=", "os", ".", "path", ".", "join", "(", "results_dir", ",", "\"replots\"", ",", "os", ".", "path", ".", "basename", "(", "replot_dir", ")", ")", "os", ".", "makedirs", "(", "replots_dirname", ",", "exist_ok", "=", "True", ")", "target_filename", "=", "os", ".", "path", ".", "join", "(", "replots_dirname", ",", "\"args.json\"", ")", "shutil", ".", "copy", "(", "argsfilename", ",", "target_filename", ")", "return", "remyccs", ",", "link_ppt_range", ",", "console_dir" ]
https://github.com/tcpexmachina/remy/blob/687b5db29b81df7ae8737889c78b47e7f9788297/scripts/plot.py#L226-L244
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_misc.py
python
FileTypeInfoSequence
(*args, **kwargs)
return val
FileTypeInfoSequence(wxArrayString sArray) -> FileTypeInfo
FileTypeInfoSequence(wxArrayString sArray) -> FileTypeInfo
[ "FileTypeInfoSequence", "(", "wxArrayString", "sArray", ")", "-", ">", "FileTypeInfo" ]
def FileTypeInfoSequence(*args, **kwargs): """FileTypeInfoSequence(wxArrayString sArray) -> FileTypeInfo""" val = _misc_.new_FileTypeInfoSequence(*args, **kwargs) return val
[ "def", "FileTypeInfoSequence", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "val", "=", "_misc_", ".", "new_FileTypeInfoSequence", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "val" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_misc.py#L2562-L2565
albertz/openlierox
d316c14a8eb57848ef56e9bfa7b23a56f694a51b
tools/DedicatedServerVideo/gdata/base/service.py
python
GBaseService.__init__
(self, email=None, password=None, source=None, server='base.google.com', api_key=None, additional_headers=None, handler=None, **kwargs)
Creates a client for the Google Base service. Args: email: string (optional) The user's email address, used for authentication. password: string (optional) The user's password. source: string (optional) The name of the user's application. server: string (optional) The name of the server to which a connection will be opened. Default value: 'base.google.com'. api_key: string (optional) The Google Base API key to use. **kwargs: The other parameters to pass to gdata.service.GDataService constructor.
Creates a client for the Google Base service.
[ "Creates", "a", "client", "for", "the", "Google", "Base", "service", "." ]
def __init__(self, email=None, password=None, source=None, server='base.google.com', api_key=None, additional_headers=None, handler=None, **kwargs): """Creates a client for the Google Base service. Args: email: string (optional) The user's email address, used for authentication. password: string (optional) The user's password. source: string (optional) The name of the user's application. server: string (optional) The name of the server to which a connection will be opened. Default value: 'base.google.com'. api_key: string (optional) The Google Base API key to use. **kwargs: The other parameters to pass to gdata.service.GDataService constructor. """ gdata.service.GDataService.__init__( self, email=email, password=password, service='gbase', source=source, server=server, additional_headers=additional_headers, handler=handler, **kwargs) self.api_key = api_key
[ "def", "__init__", "(", "self", ",", "email", "=", "None", ",", "password", "=", "None", ",", "source", "=", "None", ",", "server", "=", "'base.google.com'", ",", "api_key", "=", "None", ",", "additional_headers", "=", "None", ",", "handler", "=", "None", ",", "*", "*", "kwargs", ")", ":", "gdata", ".", "service", ".", "GDataService", ".", "__init__", "(", "self", ",", "email", "=", "email", ",", "password", "=", "password", ",", "service", "=", "'gbase'", ",", "source", "=", "source", ",", "server", "=", "server", ",", "additional_headers", "=", "additional_headers", ",", "handler", "=", "handler", ",", "*", "*", "kwargs", ")", "self", ".", "api_key", "=", "api_key" ]
https://github.com/albertz/openlierox/blob/d316c14a8eb57848ef56e9bfa7b23a56f694a51b/tools/DedicatedServerVideo/gdata/base/service.py#L52-L72
danxuhk/ContinuousCRF-CNN
2b6dcaf179620f118b225ed12c890414ca828e21
scripts/cpp_lint.py
python
FileInfo.Split
(self)
return (project,) + os.path.splitext(rest)
Splits the file into the directory, basename, and extension. For 'chrome/browser/browser.cc', Split() would return ('chrome/browser', 'browser', '.cc') Returns: A tuple of (directory, basename, extension).
Splits the file into the directory, basename, and extension.
[ "Splits", "the", "file", "into", "the", "directory", "basename", "and", "extension", "." ]
def Split(self): """Splits the file into the directory, basename, and extension. For 'chrome/browser/browser.cc', Split() would return ('chrome/browser', 'browser', '.cc') Returns: A tuple of (directory, basename, extension). """ googlename = self.RepositoryName() project, rest = os.path.split(googlename) return (project,) + os.path.splitext(rest)
[ "def", "Split", "(", "self", ")", ":", "googlename", "=", "self", ".", "RepositoryName", "(", ")", "project", ",", "rest", "=", "os", ".", "path", ".", "split", "(", "googlename", ")", "return", "(", "project", ",", ")", "+", "os", ".", "path", ".", "splitext", "(", "rest", ")" ]
https://github.com/danxuhk/ContinuousCRF-CNN/blob/2b6dcaf179620f118b225ed12c890414ca828e21/scripts/cpp_lint.py#L934-L946
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Source/ThirdParty/FakeIt/2.0.2/build/coveralls.py
python
Repository.git
(self, *arguments)
return process.communicate()[0].decode('UTF-8')
Return output from git.
Return output from git.
[ "Return", "output", "from", "git", "." ]
def git(self, *arguments): """Return output from git.""" process = subprocess.Popen(['git'] + list(arguments), stdout=subprocess.PIPE, cwd=self.cwd) return process.communicate()[0].decode('UTF-8')
[ "def", "git", "(", "self", ",", "*", "arguments", ")", ":", "process", "=", "subprocess", ".", "Popen", "(", "[", "'git'", "]", "+", "list", "(", "arguments", ")", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "cwd", "=", "self", ".", "cwd", ")", "return", "process", ".", "communicate", "(", ")", "[", "0", "]", ".", "decode", "(", "'UTF-8'", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Source/ThirdParty/FakeIt/2.0.2/build/coveralls.py#L63-L68
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_windows.py
python
PrintPreview.GetFrame
(*args, **kwargs)
return _windows_.PrintPreview_GetFrame(*args, **kwargs)
GetFrame(self) -> Frame
GetFrame(self) -> Frame
[ "GetFrame", "(", "self", ")", "-", ">", "Frame" ]
def GetFrame(*args, **kwargs): """GetFrame(self) -> Frame""" return _windows_.PrintPreview_GetFrame(*args, **kwargs)
[ "def", "GetFrame", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "PrintPreview_GetFrame", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L5593-L5595
google/shaka-packager
e1b0c7c45431327fd3ce193514a5407d07b39b22
packager/third_party/protobuf/python/google/protobuf/text_format.py
python
_Parser._MergeMessageField
(self, tokenizer, message, field)
Merges a single scalar field into a message. Args: tokenizer: A tokenizer to parse the field value. message: The message of which field is a member. field: The descriptor of the field to be merged. Raises: ParseError: In case of text parsing problems.
Merges a single scalar field into a message.
[ "Merges", "a", "single", "scalar", "field", "into", "a", "message", "." ]
def _MergeMessageField(self, tokenizer, message, field): """Merges a single scalar field into a message. Args: tokenizer: A tokenizer to parse the field value. message: The message of which field is a member. field: The descriptor of the field to be merged. Raises: ParseError: In case of text parsing problems. """ is_map_entry = _IsMapEntry(field) if tokenizer.TryConsume('<'): end_token = '>' else: tokenizer.Consume('{') end_token = '}' if (field.message_type.full_name == _ANY_FULL_TYPE_NAME and tokenizer.TryConsume('[')): packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) tokenizer.Consume(']') tokenizer.TryConsume(':') if tokenizer.TryConsume('<'): expanded_any_end_token = '>' else: tokenizer.Consume('{') expanded_any_end_token = '}' if not self.descriptor_pool: raise ParseError('Descriptor pool required to parse expanded Any field') expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, self.descriptor_pool) if not expanded_any_sub_message: raise ParseError('Type %s not found in descriptor pool' % packed_type_name) while not tokenizer.TryConsume(expanded_any_end_token): if tokenizer.AtEnd(): raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (expanded_any_end_token,)) self._MergeField(tokenizer, expanded_any_sub_message) if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: any_message = getattr(message, field.name).add() else: any_message = getattr(message, field.name) any_message.Pack(expanded_any_sub_message) elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: if field.is_extension: sub_message = message.Extensions[field].add() elif is_map_entry: sub_message = getattr(message, field.name).GetEntryClass()() else: sub_message = getattr(message, field.name).add() else: if field.is_extension: sub_message = message.Extensions[field] else: sub_message = getattr(message, field.name) sub_message.SetInParent() while not tokenizer.TryConsume(end_token): if tokenizer.AtEnd(): raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) self._MergeField(tokenizer, sub_message) if is_map_entry: value_cpptype = field.message_type.fields_by_name['value'].cpp_type if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: value = getattr(message, field.name)[sub_message.key] value.MergeFrom(sub_message.value) else: getattr(message, field.name)[sub_message.key] = sub_message.value
[ "def", "_MergeMessageField", "(", "self", ",", "tokenizer", ",", "message", ",", "field", ")", ":", "is_map_entry", "=", "_IsMapEntry", "(", "field", ")", "if", "tokenizer", ".", "TryConsume", "(", "'<'", ")", ":", "end_token", "=", "'>'", "else", ":", "tokenizer", ".", "Consume", "(", "'{'", ")", "end_token", "=", "'}'", "if", "(", "field", ".", "message_type", ".", "full_name", "==", "_ANY_FULL_TYPE_NAME", "and", "tokenizer", ".", "TryConsume", "(", "'['", ")", ")", ":", "packed_type_name", "=", "self", ".", "_ConsumeAnyTypeUrl", "(", "tokenizer", ")", "tokenizer", ".", "Consume", "(", "']'", ")", "tokenizer", ".", "TryConsume", "(", "':'", ")", "if", "tokenizer", ".", "TryConsume", "(", "'<'", ")", ":", "expanded_any_end_token", "=", "'>'", "else", ":", "tokenizer", ".", "Consume", "(", "'{'", ")", "expanded_any_end_token", "=", "'}'", "if", "not", "self", ".", "descriptor_pool", ":", "raise", "ParseError", "(", "'Descriptor pool required to parse expanded Any field'", ")", "expanded_any_sub_message", "=", "_BuildMessageFromTypeName", "(", "packed_type_name", ",", "self", ".", "descriptor_pool", ")", "if", "not", "expanded_any_sub_message", ":", "raise", "ParseError", "(", "'Type %s not found in descriptor pool'", "%", "packed_type_name", ")", "while", "not", "tokenizer", ".", "TryConsume", "(", "expanded_any_end_token", ")", ":", "if", "tokenizer", ".", "AtEnd", "(", ")", ":", "raise", "tokenizer", ".", "ParseErrorPreviousToken", "(", "'Expected \"%s\".'", "%", "(", "expanded_any_end_token", ",", ")", ")", "self", ".", "_MergeField", "(", "tokenizer", ",", "expanded_any_sub_message", ")", "if", "field", ".", "label", "==", "descriptor", ".", "FieldDescriptor", ".", "LABEL_REPEATED", ":", "any_message", "=", "getattr", "(", "message", ",", "field", ".", "name", ")", ".", "add", "(", ")", "else", ":", "any_message", "=", "getattr", "(", "message", ",", "field", ".", "name", ")", "any_message", ".", "Pack", "(", "expanded_any_sub_message", ")", "elif", "field", ".", "label", "==", "descriptor", ".", "FieldDescriptor", ".", "LABEL_REPEATED", ":", "if", "field", ".", "is_extension", ":", "sub_message", "=", "message", ".", "Extensions", "[", "field", "]", ".", "add", "(", ")", "elif", "is_map_entry", ":", "sub_message", "=", "getattr", "(", "message", ",", "field", ".", "name", ")", ".", "GetEntryClass", "(", ")", "(", ")", "else", ":", "sub_message", "=", "getattr", "(", "message", ",", "field", ".", "name", ")", ".", "add", "(", ")", "else", ":", "if", "field", ".", "is_extension", ":", "sub_message", "=", "message", ".", "Extensions", "[", "field", "]", "else", ":", "sub_message", "=", "getattr", "(", "message", ",", "field", ".", "name", ")", "sub_message", ".", "SetInParent", "(", ")", "while", "not", "tokenizer", ".", "TryConsume", "(", "end_token", ")", ":", "if", "tokenizer", ".", "AtEnd", "(", ")", ":", "raise", "tokenizer", ".", "ParseErrorPreviousToken", "(", "'Expected \"%s\".'", "%", "(", "end_token", ",", ")", ")", "self", ".", "_MergeField", "(", "tokenizer", ",", "sub_message", ")", "if", "is_map_entry", ":", "value_cpptype", "=", "field", ".", "message_type", ".", "fields_by_name", "[", "'value'", "]", ".", "cpp_type", "if", "value_cpptype", "==", "descriptor", ".", "FieldDescriptor", ".", "CPPTYPE_MESSAGE", ":", "value", "=", "getattr", "(", "message", ",", "field", ".", "name", ")", "[", "sub_message", ".", "key", "]", "value", ".", "MergeFrom", "(", "sub_message", ".", "value", ")", "else", ":", "getattr", "(", "message", ",", "field", ".", "name", ")", "[", "sub_message", ".", "key", "]", "=", "sub_message", ".", "value" ]
https://github.com/google/shaka-packager/blob/e1b0c7c45431327fd3ce193514a5407d07b39b22/packager/third_party/protobuf/python/google/protobuf/text_format.py#L710-L781
LiquidPlayer/LiquidCore
9405979363f2353ac9a71ad8ab59685dd7f919c9
deps/node-10.15.3/tools/jinja2/utils.py
python
LRUCache.__delitem__
(self, key)
Remove an item from the cache dict. Raise a `KeyError` if it does not exist.
Remove an item from the cache dict. Raise a `KeyError` if it does not exist.
[ "Remove", "an", "item", "from", "the", "cache", "dict", ".", "Raise", "a", "KeyError", "if", "it", "does", "not", "exist", "." ]
def __delitem__(self, key): """Remove an item from the cache dict. Raise a `KeyError` if it does not exist. """ self._wlock.acquire() try: del self._mapping[key] try: self._remove(key) except ValueError: # __getitem__ is not locked, it might happen pass finally: self._wlock.release()
[ "def", "__delitem__", "(", "self", ",", "key", ")", ":", "self", ".", "_wlock", ".", "acquire", "(", ")", "try", ":", "del", "self", ".", "_mapping", "[", "key", "]", "try", ":", "self", ".", "_remove", "(", "key", ")", "except", "ValueError", ":", "# __getitem__ is not locked, it might happen", "pass", "finally", ":", "self", ".", "_wlock", ".", "release", "(", ")" ]
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/tools/jinja2/utils.py#L429-L442
RamadhanAmizudin/malware
2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1
Fuzzbunch/Resources/ST1.14/Tools/sentrytribe.py
python
Sentrytribe.resend_message
(self, msg_id)
Resend fragments requires a ping done first to find missing fragments
Resend fragments requires a ping done first to find missing fragments
[ "Resend", "fragments", "requires", "a", "ping", "done", "first", "to", "find", "missing", "fragments" ]
def resend_message(self, msg_id): """ Resend fragments requires a ping done first to find missing fragments """ if self.pending_msg_id == msg_id and msg_id in self.pending_messages.keys(): print "[+] Found saved message, only resending missing fragments" for i in self.pending_messages[msg_id].keys(): if not self.pending_fragments[i-1]: # fragments starts at offset 0 self.send_data(CMD_EXECUTE, self.pending_messages[msg_id][i], msg_id, i, len(self.pending_messages[msg_id].keys())) else: print "Skipping", i elif msg_id in self.pending_messages.keys(): print "[+] Found saved message, but couldn't find ping or missing fragments, resending everything" frag_count = len(self.pending_messages[msg_id].keys()) for i in self.pending_messages[msg_id].keys(): self.send_data(CMD_EXECUTE, self.pending_messages[msg_id][i], msg_id, i, frag_count) else: raise Exception("Couldn't find pending message, did you ping? "+repr(msg_id)+" not in "+str(self.pending_messages.keys()))
[ "def", "resend_message", "(", "self", ",", "msg_id", ")", ":", "if", "self", ".", "pending_msg_id", "==", "msg_id", "and", "msg_id", "in", "self", ".", "pending_messages", ".", "keys", "(", ")", ":", "print", "\"[+] Found saved message, only resending missing fragments\"", "for", "i", "in", "self", ".", "pending_messages", "[", "msg_id", "]", ".", "keys", "(", ")", ":", "if", "not", "self", ".", "pending_fragments", "[", "i", "-", "1", "]", ":", "# fragments starts at offset 0", "self", ".", "send_data", "(", "CMD_EXECUTE", ",", "self", ".", "pending_messages", "[", "msg_id", "]", "[", "i", "]", ",", "msg_id", ",", "i", ",", "len", "(", "self", ".", "pending_messages", "[", "msg_id", "]", ".", "keys", "(", ")", ")", ")", "else", ":", "print", "\"Skipping\"", ",", "i", "elif", "msg_id", "in", "self", ".", "pending_messages", ".", "keys", "(", ")", ":", "print", "\"[+] Found saved message, but couldn't find ping or missing fragments, resending everything\"", "frag_count", "=", "len", "(", "self", ".", "pending_messages", "[", "msg_id", "]", ".", "keys", "(", ")", ")", "for", "i", "in", "self", ".", "pending_messages", "[", "msg_id", "]", ".", "keys", "(", ")", ":", "self", ".", "send_data", "(", "CMD_EXECUTE", ",", "self", ".", "pending_messages", "[", "msg_id", "]", "[", "i", "]", ",", "msg_id", ",", "i", ",", "frag_count", ")", "else", ":", "raise", "Exception", "(", "\"Couldn't find pending message, did you ping? \"", "+", "repr", "(", "msg_id", ")", "+", "\" not in \"", "+", "str", "(", "self", ".", "pending_messages", ".", "keys", "(", ")", ")", ")" ]
https://github.com/RamadhanAmizudin/malware/blob/2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1/Fuzzbunch/Resources/ST1.14/Tools/sentrytribe.py#L302-L320
p4lang/p4c
3272e79369f20813cc1a555a5eb26f44432f84a4
tools/cpplint.py
python
NestingState.InNamespaceBody
(self)
return self.stack and isinstance(self.stack[-1], _NamespaceInfo)
Check if we are currently one level inside a namespace body. Returns: True if top of the stack is a namespace block, False otherwise.
Check if we are currently one level inside a namespace body.
[ "Check", "if", "we", "are", "currently", "one", "level", "inside", "a", "namespace", "body", "." ]
def InNamespaceBody(self): """Check if we are currently one level inside a namespace body. Returns: True if top of the stack is a namespace block, False otherwise. """ return self.stack and isinstance(self.stack[-1], _NamespaceInfo)
[ "def", "InNamespaceBody", "(", "self", ")", ":", "return", "self", ".", "stack", "and", "isinstance", "(", "self", ".", "stack", "[", "-", "1", "]", ",", "_NamespaceInfo", ")" ]
https://github.com/p4lang/p4c/blob/3272e79369f20813cc1a555a5eb26f44432f84a4/tools/cpplint.py#L2940-L2946
intel-iot-devkit/how-to-code-samples
b4ea616f36bbfa2e042beb1698f968cfd651d79f
access-control/python/iot_access_control/log.py
python
log
(event)
Publish message to MQTT server and data store.
Publish message to MQTT server and data store.
[ "Publish", "message", "to", "MQTT", "server", "and", "data", "store", "." ]
def log(event): """ Publish message to MQTT server and data store. """ message = "{0} {1}".format(datetime.utcnow().isoformat(), event) payload = {"value": message} print(message) send(payload)
[ "def", "log", "(", "event", ")", ":", "message", "=", "\"{0} {1}\"", ".", "format", "(", "datetime", ".", "utcnow", "(", ")", ".", "isoformat", "(", ")", ",", "event", ")", "payload", "=", "{", "\"value\"", ":", "message", "}", "print", "(", "message", ")", "send", "(", "payload", ")" ]
https://github.com/intel-iot-devkit/how-to-code-samples/blob/b4ea616f36bbfa2e042beb1698f968cfd651d79f/access-control/python/iot_access_control/log.py#L45-L54
MVIG-SJTU/RMPE
5188c230ec800c12be7369c3619615bc9b020aa4
scripts/cpp_lint.py
python
ProcessFileData
(filename, file_extension, lines, error, extra_check_functions=[])
Performs lint checks and reports any errors to the given error function. Args: filename: Filename of the file that is being processed. file_extension: The extension (dot not included) of the file. lines: An array of strings, each representing a line of the file, with the last element being empty if the file is terminated with a newline. error: A callable to which errors are reported, which takes 4 arguments: filename, line number, error level, and message extra_check_functions: An array of additional check functions that will be run on each source line. Each function takes 4 arguments: filename, clean_lines, line, error
Performs lint checks and reports any errors to the given error function.
[ "Performs", "lint", "checks", "and", "reports", "any", "errors", "to", "the", "given", "error", "function", "." ]
def ProcessFileData(filename, file_extension, lines, error, extra_check_functions=[]): """Performs lint checks and reports any errors to the given error function. Args: filename: Filename of the file that is being processed. file_extension: The extension (dot not included) of the file. lines: An array of strings, each representing a line of the file, with the last element being empty if the file is terminated with a newline. error: A callable to which errors are reported, which takes 4 arguments: filename, line number, error level, and message extra_check_functions: An array of additional check functions that will be run on each source line. Each function takes 4 arguments: filename, clean_lines, line, error """ lines = (['// marker so line numbers and indices both start at 1'] + lines + ['// marker so line numbers end in a known way']) include_state = _IncludeState() function_state = _FunctionState() nesting_state = _NestingState() ResetNolintSuppressions() CheckForCopyright(filename, lines, error) if file_extension == 'h': CheckForHeaderGuard(filename, lines, error) RemoveMultiLineComments(filename, lines, error) clean_lines = CleansedLines(lines) for line in xrange(clean_lines.NumLines()): ProcessLine(filename, file_extension, clean_lines, line, include_state, function_state, nesting_state, error, extra_check_functions) nesting_state.CheckCompletedBlocks(filename, error) CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error) # We check here rather than inside ProcessLine so that we see raw # lines rather than "cleaned" lines. CheckForBadCharacters(filename, lines, error) CheckForNewlineAtEOF(filename, lines, error)
[ "def", "ProcessFileData", "(", "filename", ",", "file_extension", ",", "lines", ",", "error", ",", "extra_check_functions", "=", "[", "]", ")", ":", "lines", "=", "(", "[", "'// marker so line numbers and indices both start at 1'", "]", "+", "lines", "+", "[", "'// marker so line numbers end in a known way'", "]", ")", "include_state", "=", "_IncludeState", "(", ")", "function_state", "=", "_FunctionState", "(", ")", "nesting_state", "=", "_NestingState", "(", ")", "ResetNolintSuppressions", "(", ")", "CheckForCopyright", "(", "filename", ",", "lines", ",", "error", ")", "if", "file_extension", "==", "'h'", ":", "CheckForHeaderGuard", "(", "filename", ",", "lines", ",", "error", ")", "RemoveMultiLineComments", "(", "filename", ",", "lines", ",", "error", ")", "clean_lines", "=", "CleansedLines", "(", "lines", ")", "for", "line", "in", "xrange", "(", "clean_lines", ".", "NumLines", "(", ")", ")", ":", "ProcessLine", "(", "filename", ",", "file_extension", ",", "clean_lines", ",", "line", ",", "include_state", ",", "function_state", ",", "nesting_state", ",", "error", ",", "extra_check_functions", ")", "nesting_state", ".", "CheckCompletedBlocks", "(", "filename", ",", "error", ")", "CheckForIncludeWhatYouUse", "(", "filename", ",", "clean_lines", ",", "include_state", ",", "error", ")", "# We check here rather than inside ProcessLine so that we see raw", "# lines rather than \"cleaned\" lines.", "CheckForBadCharacters", "(", "filename", ",", "lines", ",", "error", ")", "CheckForNewlineAtEOF", "(", "filename", ",", "lines", ",", "error", ")" ]
https://github.com/MVIG-SJTU/RMPE/blob/5188c230ec800c12be7369c3619615bc9b020aa4/scripts/cpp_lint.py#L4648-L4691
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/dataview.py
python
DataViewRenderer.GetAlignment
(*args, **kwargs)
return _dataview.DataViewRenderer_GetAlignment(*args, **kwargs)
GetAlignment(self) -> int
GetAlignment(self) -> int
[ "GetAlignment", "(", "self", ")", "-", ">", "int" ]
def GetAlignment(*args, **kwargs): """GetAlignment(self) -> int""" return _dataview.DataViewRenderer_GetAlignment(*args, **kwargs)
[ "def", "GetAlignment", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_dataview", ".", "DataViewRenderer_GetAlignment", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/dataview.py#L1193-L1195
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/collections.py
python
Counter.__init__
(self, iterable=None, **kwds)
Create a new, empty Counter object. And if given, count elements from an input iterable. Or, initialize the count from another mapping of elements to their counts. >>> c = Counter() # a new, empty counter >>> c = Counter('gallahad') # a new counter from an iterable >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping >>> c = Counter(a=4, b=2) # a new counter from keyword args
Create a new, empty Counter object. And if given, count elements from an input iterable. Or, initialize the count from another mapping of elements to their counts.
[ "Create", "a", "new", "empty", "Counter", "object", ".", "And", "if", "given", "count", "elements", "from", "an", "input", "iterable", ".", "Or", "initialize", "the", "count", "from", "another", "mapping", "of", "elements", "to", "their", "counts", "." ]
def __init__(self, iterable=None, **kwds): '''Create a new, empty Counter object. And if given, count elements from an input iterable. Or, initialize the count from another mapping of elements to their counts. >>> c = Counter() # a new, empty counter >>> c = Counter('gallahad') # a new counter from an iterable >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping >>> c = Counter(a=4, b=2) # a new counter from keyword args ''' super(Counter, self).__init__() self.update(iterable, **kwds)
[ "def", "__init__", "(", "self", ",", "iterable", "=", "None", ",", "*", "*", "kwds", ")", ":", "super", "(", "Counter", ",", "self", ")", ".", "__init__", "(", ")", "self", ".", "update", "(", "iterable", ",", "*", "*", "kwds", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/collections.py#L432-L444
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wptserve/wptserve/request.py
python
RequestHeaders.get_list
(self, key, default=missing)
Get all the header values for a particular field name as a list
Get all the header values for a particular field name as a list
[ "Get", "all", "the", "header", "values", "for", "a", "particular", "field", "name", "as", "a", "list" ]
def get_list(self, key, default=missing): """Get all the header values for a particular field name as a list""" try: return dict.__getitem__(self, key.lower()) except KeyError: if default is not missing: return default else: raise
[ "def", "get_list", "(", "self", ",", "key", ",", "default", "=", "missing", ")", ":", "try", ":", "return", "dict", ".", "__getitem__", "(", "self", ",", "key", ".", "lower", "(", ")", ")", "except", "KeyError", ":", "if", "default", "is", "not", "missing", ":", "return", "default", "else", ":", "raise" ]
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wptserve/wptserve/request.py#L369-L378
okex/V3-Open-API-SDK
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_internal/pyproject.py
python
load_pyproject_toml
( use_pep517, # type: Optional[bool] pyproject_toml, # type: str setup_py, # type: str req_name # type: str )
return (requires, backend, check)
Load the pyproject.toml file. Parameters: use_pep517 - Has the user requested PEP 517 processing? None means the user hasn't explicitly specified. pyproject_toml - Location of the project's pyproject.toml file setup_py - Location of the project's setup.py file req_name - The name of the requirement we're processing (for error reporting) Returns: None if we should use the legacy code path, otherwise a tuple ( requirements from pyproject.toml, name of PEP 517 backend, requirements we should check are installed after setting up the build environment )
Load the pyproject.toml file.
[ "Load", "the", "pyproject", ".", "toml", "file", "." ]
def load_pyproject_toml( use_pep517, # type: Optional[bool] pyproject_toml, # type: str setup_py, # type: str req_name # type: str ): # type: (...) -> Optional[Tuple[List[str], str, List[str]]] """Load the pyproject.toml file. Parameters: use_pep517 - Has the user requested PEP 517 processing? None means the user hasn't explicitly specified. pyproject_toml - Location of the project's pyproject.toml file setup_py - Location of the project's setup.py file req_name - The name of the requirement we're processing (for error reporting) Returns: None if we should use the legacy code path, otherwise a tuple ( requirements from pyproject.toml, name of PEP 517 backend, requirements we should check are installed after setting up the build environment ) """ has_pyproject = os.path.isfile(pyproject_toml) has_setup = os.path.isfile(setup_py) if has_pyproject: with io.open(pyproject_toml, encoding="utf-8") as f: pp_toml = pytoml.load(f) build_system = pp_toml.get("build-system") else: build_system = None # The following cases must use PEP 517 # We check for use_pep517 being non-None and falsey because that means # the user explicitly requested --no-use-pep517. The value 0 as # opposed to False can occur when the value is provided via an # environment variable or config file option (due to the quirk of # strtobool() returning an integer in pip's configuration code). if has_pyproject and not has_setup: if use_pep517 is not None and not use_pep517: raise InstallationError( "Disabling PEP 517 processing is invalid: " "project does not have a setup.py" ) use_pep517 = True elif build_system and "build-backend" in build_system: if use_pep517 is not None and not use_pep517: raise InstallationError( "Disabling PEP 517 processing is invalid: " "project specifies a build backend of {} " "in pyproject.toml".format( build_system["build-backend"] ) ) use_pep517 = True # If we haven't worked out whether to use PEP 517 yet, # and the user hasn't explicitly stated a preference, # we do so if the project has a pyproject.toml file. elif use_pep517 is None: use_pep517 = has_pyproject # At this point, we know whether we're going to use PEP 517. assert use_pep517 is not None # If we're using the legacy code path, there is nothing further # for us to do here. if not use_pep517: return None if build_system is None: # Either the user has a pyproject.toml with no build-system # section, or the user has no pyproject.toml, but has opted in # explicitly via --use-pep517. # In the absence of any explicit backend specification, we # assume the setuptools backend that most closely emulates the # traditional direct setup.py execution, and require wheel and # a version of setuptools that supports that backend. build_system = { "requires": ["setuptools>=40.8.0", "wheel"], "build-backend": "setuptools.build_meta:__legacy__", } # If we're using PEP 517, we have build system information (either # from pyproject.toml, or defaulted by the code above). # Note that at this point, we do not know if the user has actually # specified a backend, though. assert build_system is not None # Ensure that the build-system section in pyproject.toml conforms # to PEP 518. error_template = ( "{package} has a pyproject.toml file that does not comply " "with PEP 518: {reason}" ) # Specifying the build-system table but not the requires key is invalid if "requires" not in build_system: raise InstallationError( error_template.format(package=req_name, reason=( "it has a 'build-system' table but not " "'build-system.requires' which is mandatory in the table" )) ) # Error out if requires is not a list of strings requires = build_system["requires"] if not _is_list_of_str(requires): raise InstallationError(error_template.format( package=req_name, reason="'build-system.requires' is not a list of strings.", )) backend = build_system.get("build-backend") check = [] # type: List[str] if backend is None: # If the user didn't specify a backend, we assume they want to use # the setuptools backend. But we can't be sure they have included # a version of setuptools which supplies the backend, or wheel # (which is needed by the backend) in their requirements. So we # make a note to check that those requirements are present once # we have set up the environment. # This is quite a lot of work to check for a very specific case. But # the problem is, that case is potentially quite common - projects that # adopted PEP 518 early for the ability to specify requirements to # execute setup.py, but never considered needing to mention the build # tools themselves. The original PEP 518 code had a similar check (but # implemented in a different way). backend = "setuptools.build_meta:__legacy__" check = ["setuptools>=40.8.0", "wheel"] return (requires, backend, check)
[ "def", "load_pyproject_toml", "(", "use_pep517", ",", "# type: Optional[bool]", "pyproject_toml", ",", "# type: str", "setup_py", ",", "# type: str", "req_name", "# type: str", ")", ":", "# type: (...) -> Optional[Tuple[List[str], str, List[str]]]", "has_pyproject", "=", "os", ".", "path", ".", "isfile", "(", "pyproject_toml", ")", "has_setup", "=", "os", ".", "path", ".", "isfile", "(", "setup_py", ")", "if", "has_pyproject", ":", "with", "io", ".", "open", "(", "pyproject_toml", ",", "encoding", "=", "\"utf-8\"", ")", "as", "f", ":", "pp_toml", "=", "pytoml", ".", "load", "(", "f", ")", "build_system", "=", "pp_toml", ".", "get", "(", "\"build-system\"", ")", "else", ":", "build_system", "=", "None", "# The following cases must use PEP 517", "# We check for use_pep517 being non-None and falsey because that means", "# the user explicitly requested --no-use-pep517. The value 0 as", "# opposed to False can occur when the value is provided via an", "# environment variable or config file option (due to the quirk of", "# strtobool() returning an integer in pip's configuration code).", "if", "has_pyproject", "and", "not", "has_setup", ":", "if", "use_pep517", "is", "not", "None", "and", "not", "use_pep517", ":", "raise", "InstallationError", "(", "\"Disabling PEP 517 processing is invalid: \"", "\"project does not have a setup.py\"", ")", "use_pep517", "=", "True", "elif", "build_system", "and", "\"build-backend\"", "in", "build_system", ":", "if", "use_pep517", "is", "not", "None", "and", "not", "use_pep517", ":", "raise", "InstallationError", "(", "\"Disabling PEP 517 processing is invalid: \"", "\"project specifies a build backend of {} \"", "\"in pyproject.toml\"", ".", "format", "(", "build_system", "[", "\"build-backend\"", "]", ")", ")", "use_pep517", "=", "True", "# If we haven't worked out whether to use PEP 517 yet,", "# and the user hasn't explicitly stated a preference,", "# we do so if the project has a pyproject.toml file.", "elif", "use_pep517", "is", "None", ":", "use_pep517", "=", "has_pyproject", "# At this point, we know whether we're going to use PEP 517.", "assert", "use_pep517", "is", "not", "None", "# If we're using the legacy code path, there is nothing further", "# for us to do here.", "if", "not", "use_pep517", ":", "return", "None", "if", "build_system", "is", "None", ":", "# Either the user has a pyproject.toml with no build-system", "# section, or the user has no pyproject.toml, but has opted in", "# explicitly via --use-pep517.", "# In the absence of any explicit backend specification, we", "# assume the setuptools backend that most closely emulates the", "# traditional direct setup.py execution, and require wheel and", "# a version of setuptools that supports that backend.", "build_system", "=", "{", "\"requires\"", ":", "[", "\"setuptools>=40.8.0\"", ",", "\"wheel\"", "]", ",", "\"build-backend\"", ":", "\"setuptools.build_meta:__legacy__\"", ",", "}", "# If we're using PEP 517, we have build system information (either", "# from pyproject.toml, or defaulted by the code above).", "# Note that at this point, we do not know if the user has actually", "# specified a backend, though.", "assert", "build_system", "is", "not", "None", "# Ensure that the build-system section in pyproject.toml conforms", "# to PEP 518.", "error_template", "=", "(", "\"{package} has a pyproject.toml file that does not comply \"", "\"with PEP 518: {reason}\"", ")", "# Specifying the build-system table but not the requires key is invalid", "if", "\"requires\"", "not", "in", "build_system", ":", "raise", "InstallationError", "(", "error_template", ".", "format", "(", "package", "=", "req_name", ",", "reason", "=", "(", "\"it has a 'build-system' table but not \"", "\"'build-system.requires' which is mandatory in the table\"", ")", ")", ")", "# Error out if requires is not a list of strings", "requires", "=", "build_system", "[", "\"requires\"", "]", "if", "not", "_is_list_of_str", "(", "requires", ")", ":", "raise", "InstallationError", "(", "error_template", ".", "format", "(", "package", "=", "req_name", ",", "reason", "=", "\"'build-system.requires' is not a list of strings.\"", ",", ")", ")", "backend", "=", "build_system", ".", "get", "(", "\"build-backend\"", ")", "check", "=", "[", "]", "# type: List[str]", "if", "backend", "is", "None", ":", "# If the user didn't specify a backend, we assume they want to use", "# the setuptools backend. But we can't be sure they have included", "# a version of setuptools which supplies the backend, or wheel", "# (which is needed by the backend) in their requirements. So we", "# make a note to check that those requirements are present once", "# we have set up the environment.", "# This is quite a lot of work to check for a very specific case. But", "# the problem is, that case is potentially quite common - projects that", "# adopted PEP 518 early for the ability to specify requirements to", "# execute setup.py, but never considered needing to mention the build", "# tools themselves. The original PEP 518 code had a similar check (but", "# implemented in a different way).", "backend", "=", "\"setuptools.build_meta:__legacy__\"", "check", "=", "[", "\"setuptools>=40.8.0\"", ",", "\"wheel\"", "]", "return", "(", "requires", ",", "backend", ",", "check", ")" ]
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_internal/pyproject.py#L35-L171
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_gdi.py
python
StockGDI.GetCursor
(*args, **kwargs)
return _gdi_.StockGDI_GetCursor(*args, **kwargs)
GetCursor(int item) -> Cursor
GetCursor(int item) -> Cursor
[ "GetCursor", "(", "int", "item", ")", "-", ">", "Cursor" ]
def GetCursor(*args, **kwargs): """GetCursor(int item) -> Cursor""" return _gdi_.StockGDI_GetCursor(*args, **kwargs)
[ "def", "GetCursor", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "StockGDI_GetCursor", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_gdi.py#L6854-L6856
chromiumembedded/cef
80caf947f3fe2210e5344713c5281d8af9bdc295
tools/exec_util.py
python
exec_cmd
(cmd, path, input_string=None)
return {'out': out.decode('utf-8'), 'err': err.decode('utf-8'), 'ret': ret}
Execute the specified command and return the result.
Execute the specified command and return the result.
[ "Execute", "the", "specified", "command", "and", "return", "the", "result", "." ]
def exec_cmd(cmd, path, input_string=None): """ Execute the specified command and return the result. """ out = '' err = '' ret = -1 parts = cmd.split() try: if input_string is None: process = Popen( parts, cwd=path, stdout=PIPE, stderr=PIPE, shell=(sys.platform == 'win32')) out, err = process.communicate() ret = process.returncode else: process = Popen( parts, cwd=path, stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=(sys.platform == 'win32')) out, err = process.communicate(input=input_string) ret = process.returncode except IOError as e: (errno, strerror) = e.args raise except: raise return {'out': out.decode('utf-8'), 'err': err.decode('utf-8'), 'ret': ret}
[ "def", "exec_cmd", "(", "cmd", ",", "path", ",", "input_string", "=", "None", ")", ":", "out", "=", "''", "err", "=", "''", "ret", "=", "-", "1", "parts", "=", "cmd", ".", "split", "(", ")", "try", ":", "if", "input_string", "is", "None", ":", "process", "=", "Popen", "(", "parts", ",", "cwd", "=", "path", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "shell", "=", "(", "sys", ".", "platform", "==", "'win32'", ")", ")", "out", ",", "err", "=", "process", ".", "communicate", "(", ")", "ret", "=", "process", ".", "returncode", "else", ":", "process", "=", "Popen", "(", "parts", ",", "cwd", "=", "path", ",", "stdin", "=", "PIPE", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "shell", "=", "(", "sys", ".", "platform", "==", "'win32'", ")", ")", "out", ",", "err", "=", "process", ".", "communicate", "(", "input", "=", "input_string", ")", "ret", "=", "process", ".", "returncode", "except", "IOError", "as", "e", ":", "(", "errno", ",", "strerror", ")", "=", "e", ".", "args", "raise", "except", ":", "raise", "return", "{", "'out'", ":", "out", ".", "decode", "(", "'utf-8'", ")", ",", "'err'", ":", "err", ".", "decode", "(", "'utf-8'", ")", ",", "'ret'", ":", "ret", "}" ]
https://github.com/chromiumembedded/cef/blob/80caf947f3fe2210e5344713c5281d8af9bdc295/tools/exec_util.py#L10-L41
albertz/openlierox
d316c14a8eb57848ef56e9bfa7b23a56f694a51b
tools/DedicatedServerVideo/gdata/photos/__init__.py
python
TagEntry.GetAlbumUri
(self)
return href[:pos]
Return the uri to the AlbumEntry containing this tag
Return the uri to the AlbumEntry containing this tag
[ "Return", "the", "uri", "to", "the", "AlbumEntry", "containing", "this", "tag" ]
def GetAlbumUri(self): """Return the uri to the AlbumEntry containing this tag""" href = self.GetSelfLink().href pos = href.find('/photoid') if pos == -1: return None return href[:pos]
[ "def", "GetAlbumUri", "(", "self", ")", ":", "href", "=", "self", ".", "GetSelfLink", "(", ")", ".", "href", "pos", "=", "href", ".", "find", "(", "'/photoid'", ")", "if", "pos", "==", "-", "1", ":", "return", "None", "return", "href", "[", ":", "pos", "]" ]
https://github.com/albertz/openlierox/blob/d316c14a8eb57848ef56e9bfa7b23a56f694a51b/tools/DedicatedServerVideo/gdata/photos/__init__.py#L844-L851
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
third_party/jinja2/utils.py
python
generate_lorem_ipsum
(n=5, html=True, min=20, max=100)
return Markup(u'\n'.join(u'<p>%s</p>' % escape(x) for x in result))
Generate some lorem impsum for the template.
Generate some lorem impsum for the template.
[ "Generate", "some", "lorem", "impsum", "for", "the", "template", "." ]
def generate_lorem_ipsum(n=5, html=True, min=20, max=100): """Generate some lorem impsum for the template.""" from jinja2.constants import LOREM_IPSUM_WORDS from random import choice, randrange words = LOREM_IPSUM_WORDS.split() result = [] for _ in range(n): next_capitalized = True last_comma = last_fullstop = 0 word = None last = None p = [] # each paragraph contains out of 20 to 100 words. for idx, _ in enumerate(range(randrange(min, max))): while True: word = choice(words) if word != last: last = word break if next_capitalized: word = word.capitalize() next_capitalized = False # add commas if idx - randrange(3, 8) > last_comma: last_comma = idx last_fullstop += 2 word += ',' # add end of sentences if idx - randrange(10, 20) > last_fullstop: last_comma = last_fullstop = idx word += '.' next_capitalized = True p.append(word) # ensure that the paragraph ends with a dot. p = u' '.join(p) if p.endswith(','): p = p[:-1] + '.' elif not p.endswith('.'): p += '.' result.append(p) if not html: return u'\n\n'.join(result) return Markup(u'\n'.join(u'<p>%s</p>' % escape(x) for x in result))
[ "def", "generate_lorem_ipsum", "(", "n", "=", "5", ",", "html", "=", "True", ",", "min", "=", "20", ",", "max", "=", "100", ")", ":", "from", "jinja2", ".", "constants", "import", "LOREM_IPSUM_WORDS", "from", "random", "import", "choice", ",", "randrange", "words", "=", "LOREM_IPSUM_WORDS", ".", "split", "(", ")", "result", "=", "[", "]", "for", "_", "in", "range", "(", "n", ")", ":", "next_capitalized", "=", "True", "last_comma", "=", "last_fullstop", "=", "0", "word", "=", "None", "last", "=", "None", "p", "=", "[", "]", "# each paragraph contains out of 20 to 100 words.", "for", "idx", ",", "_", "in", "enumerate", "(", "range", "(", "randrange", "(", "min", ",", "max", ")", ")", ")", ":", "while", "True", ":", "word", "=", "choice", "(", "words", ")", "if", "word", "!=", "last", ":", "last", "=", "word", "break", "if", "next_capitalized", ":", "word", "=", "word", ".", "capitalize", "(", ")", "next_capitalized", "=", "False", "# add commas", "if", "idx", "-", "randrange", "(", "3", ",", "8", ")", ">", "last_comma", ":", "last_comma", "=", "idx", "last_fullstop", "+=", "2", "word", "+=", "','", "# add end of sentences", "if", "idx", "-", "randrange", "(", "10", ",", "20", ")", ">", "last_fullstop", ":", "last_comma", "=", "last_fullstop", "=", "idx", "word", "+=", "'.'", "next_capitalized", "=", "True", "p", ".", "append", "(", "word", ")", "# ensure that the paragraph ends with a dot.", "p", "=", "u' '", ".", "join", "(", "p", ")", "if", "p", ".", "endswith", "(", "','", ")", ":", "p", "=", "p", "[", ":", "-", "1", "]", "+", "'.'", "elif", "not", "p", ".", "endswith", "(", "'.'", ")", ":", "p", "+=", "'.'", "result", ".", "append", "(", "p", ")", "if", "not", "html", ":", "return", "u'\\n\\n'", ".", "join", "(", "result", ")", "return", "Markup", "(", "u'\\n'", ".", "join", "(", "u'<p>%s</p>'", "%", "escape", "(", "x", ")", "for", "x", "in", "result", ")", ")" ]
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/jinja2/utils.py#L230-L276
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/parso/py3/parso/tree.py
python
search_ancestor
(node: 'NodeOrLeaf', *node_types: str)
return None
Recursively looks at the parents of a node and returns the first found node that matches ``node_types``. Returns ``None`` if no matching node is found. This function is deprecated, use :meth:`NodeOrLeaf.search_ancestor` instead. :param node: The ancestors of this node will be checked. :param node_types: type names that are searched for.
Recursively looks at the parents of a node and returns the first found node that matches ``node_types``. Returns ``None`` if no matching node is found.
[ "Recursively", "looks", "at", "the", "parents", "of", "a", "node", "and", "returns", "the", "first", "found", "node", "that", "matches", "node_types", ".", "Returns", "None", "if", "no", "matching", "node", "is", "found", "." ]
def search_ancestor(node: 'NodeOrLeaf', *node_types: str) -> 'Optional[BaseNode]': """ Recursively looks at the parents of a node and returns the first found node that matches ``node_types``. Returns ``None`` if no matching node is found. This function is deprecated, use :meth:`NodeOrLeaf.search_ancestor` instead. :param node: The ancestors of this node will be checked. :param node_types: type names that are searched for. """ n = node.parent while n is not None: if n.type in node_types: return n n = n.parent return None
[ "def", "search_ancestor", "(", "node", ":", "'NodeOrLeaf'", ",", "*", "node_types", ":", "str", ")", "->", "'Optional[BaseNode]'", ":", "n", "=", "node", ".", "parent", "while", "n", "is", "not", "None", ":", "if", "n", ".", "type", "in", "node_types", ":", "return", "n", "n", "=", "n", ".", "parent", "return", "None" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/parso/py3/parso/tree.py#L7-L22
francinexue/xuefu
b6ff79747a42e020588c0c0a921048e08fe4680c
cnx/tickds.py
python
TickDataSeries.getHighDataSeries
(self)
return self.__highDS
Returns a :class:`pyalgotrade.dataseries.DataSeries` with the high prices.
Returns a :class:`pyalgotrade.dataseries.DataSeries` with the high prices.
[ "Returns", "a", ":", "class", ":", "pyalgotrade", ".", "dataseries", ".", "DataSeries", "with", "the", "high", "prices", "." ]
def getHighDataSeries(self): """Returns a :class:`pyalgotrade.dataseries.DataSeries` with the high prices.""" return self.__highDS
[ "def", "getHighDataSeries", "(", "self", ")", ":", "return", "self", ".", "__highDS" ]
https://github.com/francinexue/xuefu/blob/b6ff79747a42e020588c0c0a921048e08fe4680c/cnx/tickds.py#L141-L143
oracle/graaljs
36a56e8e993d45fc40939a3a4d9c0c24990720f1
graal-nodejs/tools/gyp/pylib/gyp/MSVSSettings.py
python
ConvertVCMacrosToMSBuild
(s)
return s
Convert the MSVS macros found in the string to the MSBuild equivalent. This list is probably not exhaustive. Add as needed.
Convert the MSVS macros found in the string to the MSBuild equivalent.
[ "Convert", "the", "MSVS", "macros", "found", "in", "the", "string", "to", "the", "MSBuild", "equivalent", "." ]
def ConvertVCMacrosToMSBuild(s): """Convert the MSVS macros found in the string to the MSBuild equivalent. This list is probably not exhaustive. Add as needed. """ if "$" in s: replace_map = { "$(ConfigurationName)": "$(Configuration)", "$(InputDir)": "%(RelativeDir)", "$(InputExt)": "%(Extension)", "$(InputFileName)": "%(Filename)%(Extension)", "$(InputName)": "%(Filename)", "$(InputPath)": "%(Identity)", "$(ParentName)": "$(ProjectFileName)", "$(PlatformName)": "$(Platform)", "$(SafeInputName)": "%(Filename)", } for old, new in replace_map.items(): s = s.replace(old, new) s = FixVCMacroSlashes(s) return s
[ "def", "ConvertVCMacrosToMSBuild", "(", "s", ")", ":", "if", "\"$\"", "in", "s", ":", "replace_map", "=", "{", "\"$(ConfigurationName)\"", ":", "\"$(Configuration)\"", ",", "\"$(InputDir)\"", ":", "\"%(RelativeDir)\"", ",", "\"$(InputExt)\"", ":", "\"%(Extension)\"", ",", "\"$(InputFileName)\"", ":", "\"%(Filename)%(Extension)\"", ",", "\"$(InputName)\"", ":", "\"%(Filename)\"", ",", "\"$(InputPath)\"", ":", "\"%(Identity)\"", ",", "\"$(ParentName)\"", ":", "\"$(ProjectFileName)\"", ",", "\"$(PlatformName)\"", ":", "\"$(Platform)\"", ",", "\"$(SafeInputName)\"", ":", "\"%(Filename)\"", ",", "}", "for", "old", ",", "new", "in", "replace_map", ".", "items", "(", ")", ":", "s", "=", "s", ".", "replace", "(", "old", ",", "new", ")", "s", "=", "FixVCMacroSlashes", "(", "s", ")", "return", "s" ]
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/tools/gyp/pylib/gyp/MSVSSettings.py#L422-L442
ceph/ceph
959663007321a369c83218414a29bd9dbc8bda3a
qa/tasks/ceph_manager.py
python
CephManager.expand_pool
(self, pool_name, by, max_pgs)
Increase the number of pgs in a pool
Increase the number of pgs in a pool
[ "Increase", "the", "number", "of", "pgs", "in", "a", "pool" ]
def expand_pool(self, pool_name, by, max_pgs): """ Increase the number of pgs in a pool """ with self.lock: assert isinstance(pool_name, str) assert isinstance(by, int) assert pool_name in self.pools if self.get_num_creating() > 0: return False if (self.pools[pool_name] + by) > max_pgs: return False self.log("increase pool size by %d" % (by,)) new_pg_num = self.pools[pool_name] + by self.set_pool_property(pool_name, "pg_num", new_pg_num) self.pools[pool_name] = new_pg_num return True
[ "def", "expand_pool", "(", "self", ",", "pool_name", ",", "by", ",", "max_pgs", ")", ":", "with", "self", ".", "lock", ":", "assert", "isinstance", "(", "pool_name", ",", "str", ")", "assert", "isinstance", "(", "by", ",", "int", ")", "assert", "pool_name", "in", "self", ".", "pools", "if", "self", ".", "get_num_creating", "(", ")", ">", "0", ":", "return", "False", "if", "(", "self", ".", "pools", "[", "pool_name", "]", "+", "by", ")", ">", "max_pgs", ":", "return", "False", "self", ".", "log", "(", "\"increase pool size by %d\"", "%", "(", "by", ",", ")", ")", "new_pg_num", "=", "self", ".", "pools", "[", "pool_name", "]", "+", "by", "self", ".", "set_pool_property", "(", "pool_name", ",", "\"pg_num\"", ",", "new_pg_num", ")", "self", ".", "pools", "[", "pool_name", "]", "=", "new_pg_num", "return", "True" ]
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/qa/tasks/ceph_manager.py#L2234-L2250
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_gdi.py
python
BitmapFromBuffer
(width, height, dataBuffer, alphaBuffer=None)
Creates a `wx.Bitmap` from the data in dataBuffer. The dataBuffer parameter must be a Python object that implements the buffer interface, such as a string, array, etc. The dataBuffer object is expected to contain a series of RGB bytes and be width*height*3 bytes long. A buffer object can optionally be supplied for the image's alpha channel data, and it is expected to be width*height bytes long. On Windows and Mac the RGB values are 'premultiplied' by the alpha values. (The other platforms do the multiplication themselves.) Unlike `wx.ImageFromBuffer` the bitmap created with this function does not share the memory buffer with the buffer object. This is because the native pixel buffer format varies on different platforms, and so instead an efficient as possible copy of the data is made from the buffer objects to the bitmap's native pixel buffer. For direct access to a bitmap's pixel buffer see `wx.NativePixelData` and `wx.AlphaPixelData`. :see: `wx.Bitmap`, `wx.BitmapFromBufferRGBA`, `wx.NativePixelData`, `wx.AlphaPixelData`, `wx.ImageFromBuffer`
Creates a `wx.Bitmap` from the data in dataBuffer. The dataBuffer parameter must be a Python object that implements the buffer interface, such as a string, array, etc. The dataBuffer object is expected to contain a series of RGB bytes and be width*height*3 bytes long. A buffer object can optionally be supplied for the image's alpha channel data, and it is expected to be width*height bytes long. On Windows and Mac the RGB values are 'premultiplied' by the alpha values. (The other platforms do the multiplication themselves.)
[ "Creates", "a", "wx", ".", "Bitmap", "from", "the", "data", "in", "dataBuffer", ".", "The", "dataBuffer", "parameter", "must", "be", "a", "Python", "object", "that", "implements", "the", "buffer", "interface", "such", "as", "a", "string", "array", "etc", ".", "The", "dataBuffer", "object", "is", "expected", "to", "contain", "a", "series", "of", "RGB", "bytes", "and", "be", "width", "*", "height", "*", "3", "bytes", "long", ".", "A", "buffer", "object", "can", "optionally", "be", "supplied", "for", "the", "image", "s", "alpha", "channel", "data", "and", "it", "is", "expected", "to", "be", "width", "*", "height", "bytes", "long", ".", "On", "Windows", "and", "Mac", "the", "RGB", "values", "are", "premultiplied", "by", "the", "alpha", "values", ".", "(", "The", "other", "platforms", "do", "the", "multiplication", "themselves", ".", ")" ]
def BitmapFromBuffer(width, height, dataBuffer, alphaBuffer=None): """ Creates a `wx.Bitmap` from the data in dataBuffer. The dataBuffer parameter must be a Python object that implements the buffer interface, such as a string, array, etc. The dataBuffer object is expected to contain a series of RGB bytes and be width*height*3 bytes long. A buffer object can optionally be supplied for the image's alpha channel data, and it is expected to be width*height bytes long. On Windows and Mac the RGB values are 'premultiplied' by the alpha values. (The other platforms do the multiplication themselves.) Unlike `wx.ImageFromBuffer` the bitmap created with this function does not share the memory buffer with the buffer object. This is because the native pixel buffer format varies on different platforms, and so instead an efficient as possible copy of the data is made from the buffer objects to the bitmap's native pixel buffer. For direct access to a bitmap's pixel buffer see `wx.NativePixelData` and `wx.AlphaPixelData`. :see: `wx.Bitmap`, `wx.BitmapFromBufferRGBA`, `wx.NativePixelData`, `wx.AlphaPixelData`, `wx.ImageFromBuffer` """ if alphaBuffer is not None: return _gdi_._BitmapFromBufferAlpha(width, height, dataBuffer, alphaBuffer) else: return _gdi_._BitmapFromBuffer(width, height, dataBuffer)
[ "def", "BitmapFromBuffer", "(", "width", ",", "height", ",", "dataBuffer", ",", "alphaBuffer", "=", "None", ")", ":", "if", "alphaBuffer", "is", "not", "None", ":", "return", "_gdi_", ".", "_BitmapFromBufferAlpha", "(", "width", ",", "height", ",", "dataBuffer", ",", "alphaBuffer", ")", "else", ":", "return", "_gdi_", ".", "_BitmapFromBuffer", "(", "width", ",", "height", ",", "dataBuffer", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_gdi.py#L912-L938
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_gdi.py
python
Pen.SetJoin
(*args, **kwargs)
return _gdi_.Pen_SetJoin(*args, **kwargs)
SetJoin(self, int join_style)
SetJoin(self, int join_style)
[ "SetJoin", "(", "self", "int", "join_style", ")" ]
def SetJoin(*args, **kwargs): """SetJoin(self, int join_style)""" return _gdi_.Pen_SetJoin(*args, **kwargs)
[ "def", "SetJoin", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "Pen_SetJoin", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_gdi.py#L433-L435
NicknineTheEagle/TF2-Base
20459c5a7fbc995b6bf54fa85c2f62a101e9fb64
src/thirdparty/protobuf-2.3.0/python/google/protobuf/service.py
python
RpcController.Reset
(self)
Resets the RpcController to its initial state. After the RpcController has been reset, it may be reused in a new call. Must not be called while an RPC is in progress.
Resets the RpcController to its initial state.
[ "Resets", "the", "RpcController", "to", "its", "initial", "state", "." ]
def Reset(self): """Resets the RpcController to its initial state. After the RpcController has been reset, it may be reused in a new call. Must not be called while an RPC is in progress. """ raise NotImplementedError
[ "def", "Reset", "(", "self", ")", ":", "raise", "NotImplementedError" ]
https://github.com/NicknineTheEagle/TF2-Base/blob/20459c5a7fbc995b6bf54fa85c2f62a101e9fb64/src/thirdparty/protobuf-2.3.0/python/google/protobuf/service.py#L132-L138
carla-simulator/carla
8854804f4d7748e14d937ec763a2912823a7e5f5
PythonAPI/carla/agents/navigation/local_planner.py
python
LocalPlanner.__init__
(self, vehicle, opt_dict={})
:param vehicle: actor to apply to local planner logic onto :param opt_dict: dictionary of arguments with different parameters: dt: time between simulation steps target_speed: desired cruise speed in Km/h sampling_radius: distance between the waypoints part of the plan lateral_control_dict: values of the lateral PID controller longitudinal_control_dict: values of the longitudinal PID controller max_throttle: maximum throttle applied to the vehicle max_brake: maximum brake applied to the vehicle max_steering: maximum steering applied to the vehicle offset: distance between the route waypoints and the center of the lane
:param vehicle: actor to apply to local planner logic onto :param opt_dict: dictionary of arguments with different parameters: dt: time between simulation steps target_speed: desired cruise speed in Km/h sampling_radius: distance between the waypoints part of the plan lateral_control_dict: values of the lateral PID controller longitudinal_control_dict: values of the longitudinal PID controller max_throttle: maximum throttle applied to the vehicle max_brake: maximum brake applied to the vehicle max_steering: maximum steering applied to the vehicle offset: distance between the route waypoints and the center of the lane
[ ":", "param", "vehicle", ":", "actor", "to", "apply", "to", "local", "planner", "logic", "onto", ":", "param", "opt_dict", ":", "dictionary", "of", "arguments", "with", "different", "parameters", ":", "dt", ":", "time", "between", "simulation", "steps", "target_speed", ":", "desired", "cruise", "speed", "in", "Km", "/", "h", "sampling_radius", ":", "distance", "between", "the", "waypoints", "part", "of", "the", "plan", "lateral_control_dict", ":", "values", "of", "the", "lateral", "PID", "controller", "longitudinal_control_dict", ":", "values", "of", "the", "longitudinal", "PID", "controller", "max_throttle", ":", "maximum", "throttle", "applied", "to", "the", "vehicle", "max_brake", ":", "maximum", "brake", "applied", "to", "the", "vehicle", "max_steering", ":", "maximum", "steering", "applied", "to", "the", "vehicle", "offset", ":", "distance", "between", "the", "route", "waypoints", "and", "the", "center", "of", "the", "lane" ]
def __init__(self, vehicle, opt_dict={}): """ :param vehicle: actor to apply to local planner logic onto :param opt_dict: dictionary of arguments with different parameters: dt: time between simulation steps target_speed: desired cruise speed in Km/h sampling_radius: distance between the waypoints part of the plan lateral_control_dict: values of the lateral PID controller longitudinal_control_dict: values of the longitudinal PID controller max_throttle: maximum throttle applied to the vehicle max_brake: maximum brake applied to the vehicle max_steering: maximum steering applied to the vehicle offset: distance between the route waypoints and the center of the lane """ self._vehicle = vehicle self._world = self._vehicle.get_world() self._map = self._world.get_map() self._vehicle_controller = None self.target_waypoint = None self.target_road_option = None self._waypoints_queue = deque(maxlen=10000) self._min_waypoint_queue_length = 100 self._stop_waypoint_creation = False # Base parameters self._dt = 1.0 / 20.0 self._target_speed = 20.0 # Km/h self._sampling_radius = 2.0 self._args_lateral_dict = {'K_P': 1.95, 'K_I': 0.05, 'K_D': 0.2, 'dt': self._dt} self._args_longitudinal_dict = {'K_P': 1.0, 'K_I': 0.05, 'K_D': 0, 'dt': self._dt} self._max_throt = 0.75 self._max_brake = 0.3 self._max_steer = 0.8 self._offset = 0 self._base_min_distance = 3.0 self._follow_speed_limits = False # Overload parameters if opt_dict: if 'dt' in opt_dict: self._dt = opt_dict['dt'] if 'target_speed' in opt_dict: self._target_speed = opt_dict['target_speed'] if 'sampling_radius' in opt_dict: self._sampling_radius = opt_dict['sampling_radius'] if 'lateral_control_dict' in opt_dict: self._args_lateral_dict = opt_dict['lateral_control_dict'] if 'longitudinal_control_dict' in opt_dict: self._args_longitudinal_dict = opt_dict['longitudinal_control_dict'] if 'max_throttle' in opt_dict: self._max_throt = opt_dict['max_throttle'] if 'max_brake' in opt_dict: self._max_brake = opt_dict['max_brake'] if 'max_steering' in opt_dict: self._max_steer = opt_dict['max_steering'] if 'offset' in opt_dict: self._offset = opt_dict['offset'] if 'base_min_distance' in opt_dict: self._base_min_distance = opt_dict['base_min_distance'] if 'follow_speed_limits' in opt_dict: self._follow_speed_limits = opt_dict['follow_speed_limits'] # initializing controller self._init_controller()
[ "def", "__init__", "(", "self", ",", "vehicle", ",", "opt_dict", "=", "{", "}", ")", ":", "self", ".", "_vehicle", "=", "vehicle", "self", ".", "_world", "=", "self", ".", "_vehicle", ".", "get_world", "(", ")", "self", ".", "_map", "=", "self", ".", "_world", ".", "get_map", "(", ")", "self", ".", "_vehicle_controller", "=", "None", "self", ".", "target_waypoint", "=", "None", "self", ".", "target_road_option", "=", "None", "self", ".", "_waypoints_queue", "=", "deque", "(", "maxlen", "=", "10000", ")", "self", ".", "_min_waypoint_queue_length", "=", "100", "self", ".", "_stop_waypoint_creation", "=", "False", "# Base parameters", "self", ".", "_dt", "=", "1.0", "/", "20.0", "self", ".", "_target_speed", "=", "20.0", "# Km/h", "self", ".", "_sampling_radius", "=", "2.0", "self", ".", "_args_lateral_dict", "=", "{", "'K_P'", ":", "1.95", ",", "'K_I'", ":", "0.05", ",", "'K_D'", ":", "0.2", ",", "'dt'", ":", "self", ".", "_dt", "}", "self", ".", "_args_longitudinal_dict", "=", "{", "'K_P'", ":", "1.0", ",", "'K_I'", ":", "0.05", ",", "'K_D'", ":", "0", ",", "'dt'", ":", "self", ".", "_dt", "}", "self", ".", "_max_throt", "=", "0.75", "self", ".", "_max_brake", "=", "0.3", "self", ".", "_max_steer", "=", "0.8", "self", ".", "_offset", "=", "0", "self", ".", "_base_min_distance", "=", "3.0", "self", ".", "_follow_speed_limits", "=", "False", "# Overload parameters", "if", "opt_dict", ":", "if", "'dt'", "in", "opt_dict", ":", "self", ".", "_dt", "=", "opt_dict", "[", "'dt'", "]", "if", "'target_speed'", "in", "opt_dict", ":", "self", ".", "_target_speed", "=", "opt_dict", "[", "'target_speed'", "]", "if", "'sampling_radius'", "in", "opt_dict", ":", "self", ".", "_sampling_radius", "=", "opt_dict", "[", "'sampling_radius'", "]", "if", "'lateral_control_dict'", "in", "opt_dict", ":", "self", ".", "_args_lateral_dict", "=", "opt_dict", "[", "'lateral_control_dict'", "]", "if", "'longitudinal_control_dict'", "in", "opt_dict", ":", "self", ".", "_args_longitudinal_dict", "=", "opt_dict", "[", "'longitudinal_control_dict'", "]", "if", "'max_throttle'", "in", "opt_dict", ":", "self", ".", "_max_throt", "=", "opt_dict", "[", "'max_throttle'", "]", "if", "'max_brake'", "in", "opt_dict", ":", "self", ".", "_max_brake", "=", "opt_dict", "[", "'max_brake'", "]", "if", "'max_steering'", "in", "opt_dict", ":", "self", ".", "_max_steer", "=", "opt_dict", "[", "'max_steering'", "]", "if", "'offset'", "in", "opt_dict", ":", "self", ".", "_offset", "=", "opt_dict", "[", "'offset'", "]", "if", "'base_min_distance'", "in", "opt_dict", ":", "self", ".", "_base_min_distance", "=", "opt_dict", "[", "'base_min_distance'", "]", "if", "'follow_speed_limits'", "in", "opt_dict", ":", "self", ".", "_follow_speed_limits", "=", "opt_dict", "[", "'follow_speed_limits'", "]", "# initializing controller", "self", ".", "_init_controller", "(", ")" ]
https://github.com/carla-simulator/carla/blob/8854804f4d7748e14d937ec763a2912823a7e5f5/PythonAPI/carla/agents/navigation/local_planner.py#L43-L108
adobe/chromium
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
third_party/protobuf/python/google/protobuf/internal/containers.py
python
RepeatedScalarFieldContainer.extend
(self, elem_seq)
Extends by appending the given sequence. Similar to list.extend().
Extends by appending the given sequence. Similar to list.extend().
[ "Extends", "by", "appending", "the", "given", "sequence", ".", "Similar", "to", "list", ".", "extend", "()", "." ]
def extend(self, elem_seq): """Extends by appending the given sequence. Similar to list.extend().""" if not elem_seq: return new_values = [] for elem in elem_seq: self._type_checker.CheckValue(elem) new_values.append(elem) self._values.extend(new_values) self._message_listener.Modified()
[ "def", "extend", "(", "self", ",", "elem_seq", ")", ":", "if", "not", "elem_seq", ":", "return", "new_values", "=", "[", "]", "for", "elem", "in", "elem_seq", ":", "self", ".", "_type_checker", ".", "CheckValue", "(", "elem", ")", "new_values", ".", "append", "(", "elem", ")", "self", ".", "_values", ".", "extend", "(", "new_values", ")", "self", ".", "_message_listener", ".", "Modified", "(", ")" ]
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/third_party/protobuf/python/google/protobuf/internal/containers.py#L118-L128
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/traitlets/py2/traitlets/traitlets.py
python
_validate_link
(*tuples)
Validate arguments for traitlet link functions
Validate arguments for traitlet link functions
[ "Validate", "arguments", "for", "traitlet", "link", "functions" ]
def _validate_link(*tuples): """Validate arguments for traitlet link functions""" for t in tuples: if not len(t) == 2: raise TypeError("Each linked traitlet must be specified as (HasTraits, 'trait_name'), not %r" % t) obj, trait_name = t if not isinstance(obj, HasTraits): raise TypeError("Each object must be HasTraits, not %r" % type(obj)) if not trait_name in obj.traits(): raise TypeError("%r has no trait %r" % (obj, trait_name))
[ "def", "_validate_link", "(", "*", "tuples", ")", ":", "for", "t", "in", "tuples", ":", "if", "not", "len", "(", "t", ")", "==", "2", ":", "raise", "TypeError", "(", "\"Each linked traitlet must be specified as (HasTraits, 'trait_name'), not %r\"", "%", "t", ")", "obj", ",", "trait_name", "=", "t", "if", "not", "isinstance", "(", "obj", ",", "HasTraits", ")", ":", "raise", "TypeError", "(", "\"Each object must be HasTraits, not %r\"", "%", "type", "(", "obj", ")", ")", "if", "not", "trait_name", "in", "obj", ".", "traits", "(", ")", ":", "raise", "TypeError", "(", "\"%r has no trait %r\"", "%", "(", "obj", ",", "trait_name", ")", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/traitlets/py2/traitlets/traitlets.py#L243-L252
kiwix/kiwix-xulrunner
38f4a10ae4b1585c16cb11730bb0dcc4924ae19f
android/gen-std-icon.py
python
copy_to
(src, dst)
copy source content (local or remote) to local file
copy source content (local or remote) to local file
[ "copy", "source", "content", "(", "local", "or", "remote", ")", "to", "local", "file" ]
def copy_to(src, dst): ''' copy source content (local or remote) to local file ''' local = None if is_remote_path(src): local = tempfile.NamedTemporaryFile(delete=False) download_remote_file(src, local.name) src = local.name shutil.copy(src, dst) if local is not None: os.remove(local.name)
[ "def", "copy_to", "(", "src", ",", "dst", ")", ":", "local", "=", "None", "if", "is_remote_path", "(", "src", ")", ":", "local", "=", "tempfile", ".", "NamedTemporaryFile", "(", "delete", "=", "False", ")", "download_remote_file", "(", "src", ",", "local", ".", "name", ")", "src", "=", "local", ".", "name", "shutil", ".", "copy", "(", "src", ",", "dst", ")", "if", "local", "is", "not", "None", ":", "os", ".", "remove", "(", "local", ".", "name", ")" ]
https://github.com/kiwix/kiwix-xulrunner/blob/38f4a10ae4b1585c16cb11730bb0dcc4924ae19f/android/gen-std-icon.py#L123-L132
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/xml/etree/ElementTree.py
python
Element.iterfind
(self, path, namespaces=None)
return ElementPath.iterfind(self, path, namespaces)
Find all matching subelements by tag name or path. *path* is a string having either an element tag or an XPath, *namespaces* is an optional mapping from namespace prefix to full name. Return an iterable yielding all matching elements in document order.
Find all matching subelements by tag name or path.
[ "Find", "all", "matching", "subelements", "by", "tag", "name", "or", "path", "." ]
def iterfind(self, path, namespaces=None): """Find all matching subelements by tag name or path. *path* is a string having either an element tag or an XPath, *namespaces* is an optional mapping from namespace prefix to full name. Return an iterable yielding all matching elements in document order. """ return ElementPath.iterfind(self, path, namespaces)
[ "def", "iterfind", "(", "self", ",", "path", ",", "namespaces", "=", "None", ")", ":", "return", "ElementPath", ".", "iterfind", "(", "self", ",", "path", ",", "namespaces", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/xml/etree/ElementTree.py#L319-L328
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/lib2to3/fixes/fix_import.py
python
traverse_imports
(names)
Walks over all the names imported in a dotted_as_names node.
Walks over all the names imported in a dotted_as_names node.
[ "Walks", "over", "all", "the", "names", "imported", "in", "a", "dotted_as_names", "node", "." ]
def traverse_imports(names): """ Walks over all the names imported in a dotted_as_names node. """ pending = [names] while pending: node = pending.pop() if node.type == token.NAME: yield node.value elif node.type == syms.dotted_name: yield "".join([ch.value for ch in node.children]) elif node.type == syms.dotted_as_name: pending.append(node.children[0]) elif node.type == syms.dotted_as_names: pending.extend(node.children[::-2]) else: raise AssertionError("unknown node type")
[ "def", "traverse_imports", "(", "names", ")", ":", "pending", "=", "[", "names", "]", "while", "pending", ":", "node", "=", "pending", ".", "pop", "(", ")", "if", "node", ".", "type", "==", "token", ".", "NAME", ":", "yield", "node", ".", "value", "elif", "node", ".", "type", "==", "syms", ".", "dotted_name", ":", "yield", "\"\"", ".", "join", "(", "[", "ch", ".", "value", "for", "ch", "in", "node", ".", "children", "]", ")", "elif", "node", ".", "type", "==", "syms", ".", "dotted_as_name", ":", "pending", ".", "append", "(", "node", ".", "children", "[", "0", "]", ")", "elif", "node", ".", "type", "==", "syms", ".", "dotted_as_names", ":", "pending", ".", "extend", "(", "node", ".", "children", "[", ":", ":", "-", "2", "]", ")", "else", ":", "raise", "AssertionError", "(", "\"unknown node type\"", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/lib2to3/fixes/fix_import.py#L19-L35
gemrb/gemrb
730206eed8d1dd358ca5e69a62f9e099aa22ffc6
gemrb/GUIScripts/GUIOPT.py
python
OpenVideoOptionsWindow
()
return
Open video options window
Open video options window
[ "Open", "video", "options", "window" ]
def OpenVideoOptionsWindow (): """Open video options window""" global HelpTextArea #GemRB.GetView("SUB_WIN", 0).Close() Window = GemRB.LoadWindow (6, "GUIOPT") Window.AddAlias("SUB_WIN", 0) Window.SetFlags (WF_BORDERLESS, OP_OR) HelpTextArea = GUIOPTControls.OptHelpText ('VideoOptions', Window, 33, 18038) GUIOPTControls.OptDone (CloseVideoOptionsWindow, Window, 21) GUIOPTControls.OptCancel (CloseVideoOptionsWindow, Window, 32) GUIOPTControls.OptSlider (18038, 17203, HelpTextArea, Window, 3, 35, 17129, 'Brightness Correction', DisplayHelpBrightness, 4) GUIOPTControls.OptSlider (18038, 17204, HelpTextArea, Window, 22, 36, 17128, 'Gamma Correction', DisplayHelpContrast) GUIOPTControls.OptRadio (DisplayHelpBPP, Window, 5, 37, 'BitsPerPixel', 16) GUIOPTControls.OptRadio (DisplayHelpBPP, Window, 6, 37, 'BitsPerPixel', 24) GUIOPTControls.OptRadio (DisplayHelpBPP, Window, 7, 37, 'BitsPerPixel', 32) GUIOPTControls.OptCheckbox (18038, 18000, HelpTextArea, Window, 9, 38, 17131, 'Full Screen', DisplayHelpFullScreen) GUIOPTControls.OptCheckbox (18038, 20620, HelpTextArea, Window, 51, 50, 20617, 'Translucent Shadows') GUIOPTControls.OptCheckbox (18038, 18004, HelpTextArea, Window, 40, 44, 17134, 'SoftMirrorBlt') GUIOPTControls.OptCheckbox (18038, 18006, HelpTextArea, Window, 41, 46, 17136, 'SoftSrcKeyBlt') # software standard blit GUIOPTControls.OptCheckbox (18038, 18007, HelpTextArea, Window, 42, 48, 17135, 'SoftBltFast') # software transparent blit Window.ShowModal (MODAL_SHADOW_GRAY) return
[ "def", "OpenVideoOptionsWindow", "(", ")", ":", "global", "HelpTextArea", "#GemRB.GetView(\"SUB_WIN\", 0).Close()", "Window", "=", "GemRB", ".", "LoadWindow", "(", "6", ",", "\"GUIOPT\"", ")", "Window", ".", "AddAlias", "(", "\"SUB_WIN\"", ",", "0", ")", "Window", ".", "SetFlags", "(", "WF_BORDERLESS", ",", "OP_OR", ")", "HelpTextArea", "=", "GUIOPTControls", ".", "OptHelpText", "(", "'VideoOptions'", ",", "Window", ",", "33", ",", "18038", ")", "GUIOPTControls", ".", "OptDone", "(", "CloseVideoOptionsWindow", ",", "Window", ",", "21", ")", "GUIOPTControls", ".", "OptCancel", "(", "CloseVideoOptionsWindow", ",", "Window", ",", "32", ")", "GUIOPTControls", ".", "OptSlider", "(", "18038", ",", "17203", ",", "HelpTextArea", ",", "Window", ",", "3", ",", "35", ",", "17129", ",", "'Brightness Correction'", ",", "DisplayHelpBrightness", ",", "4", ")", "GUIOPTControls", ".", "OptSlider", "(", "18038", ",", "17204", ",", "HelpTextArea", ",", "Window", ",", "22", ",", "36", ",", "17128", ",", "'Gamma Correction'", ",", "DisplayHelpContrast", ")", "GUIOPTControls", ".", "OptRadio", "(", "DisplayHelpBPP", ",", "Window", ",", "5", ",", "37", ",", "'BitsPerPixel'", ",", "16", ")", "GUIOPTControls", ".", "OptRadio", "(", "DisplayHelpBPP", ",", "Window", ",", "6", ",", "37", ",", "'BitsPerPixel'", ",", "24", ")", "GUIOPTControls", ".", "OptRadio", "(", "DisplayHelpBPP", ",", "Window", ",", "7", ",", "37", ",", "'BitsPerPixel'", ",", "32", ")", "GUIOPTControls", ".", "OptCheckbox", "(", "18038", ",", "18000", ",", "HelpTextArea", ",", "Window", ",", "9", ",", "38", ",", "17131", ",", "'Full Screen'", ",", "DisplayHelpFullScreen", ")", "GUIOPTControls", ".", "OptCheckbox", "(", "18038", ",", "20620", ",", "HelpTextArea", ",", "Window", ",", "51", ",", "50", ",", "20617", ",", "'Translucent Shadows'", ")", "GUIOPTControls", ".", "OptCheckbox", "(", "18038", ",", "18004", ",", "HelpTextArea", ",", "Window", ",", "40", ",", "44", ",", "17134", ",", "'SoftMirrorBlt'", ")", "GUIOPTControls", ".", "OptCheckbox", "(", "18038", ",", "18006", ",", "HelpTextArea", ",", "Window", ",", "41", ",", "46", ",", "17136", ",", "'SoftSrcKeyBlt'", ")", "# software standard blit", "GUIOPTControls", ".", "OptCheckbox", "(", "18038", ",", "18007", ",", "HelpTextArea", ",", "Window", ",", "42", ",", "48", ",", "17135", ",", "'SoftBltFast'", ")", "# software transparent blit", "Window", ".", "ShowModal", "(", "MODAL_SHADOW_GRAY", ")", "return" ]
https://github.com/gemrb/gemrb/blob/730206eed8d1dd358ca5e69a62f9e099aa22ffc6/gemrb/GUIScripts/GUIOPT.py#L124-L154
scanner-research/scanner
04a0c4b4196341995985acd729c0788aab823e1c
python/scannerpy/client.py
python
Client.load_op
(self, so_path: str, proto_path: str = None)
r"""Loads a custom op into the Scanner runtime. Parameters ---------- so_path Path to the custom op's shared library (.so). proto_path Path to the custom op's arguments protobuf if one exists. Raises ------ ScannerException Raised when the master fails to load the op.
r"""Loads a custom op into the Scanner runtime.
[ "r", "Loads", "a", "custom", "op", "into", "the", "Scanner", "runtime", "." ]
def load_op(self, so_path: str, proto_path: str = None): r"""Loads a custom op into the Scanner runtime. Parameters ---------- so_path Path to the custom op's shared library (.so). proto_path Path to the custom op's arguments protobuf if one exists. Raises ------ ScannerException Raised when the master fails to load the op. """ if proto_path is not None: protobufs.add_module(proto_path) op_path = protobufs.OpPath() op_path.path = so_path self._try_rpc( lambda: self._master.LoadOp(op_path, timeout=self._grpc_timeout)) self._modules.add((so_path, proto_path))
[ "def", "load_op", "(", "self", ",", "so_path", ":", "str", ",", "proto_path", ":", "str", "=", "None", ")", ":", "if", "proto_path", "is", "not", "None", ":", "protobufs", ".", "add_module", "(", "proto_path", ")", "op_path", "=", "protobufs", ".", "OpPath", "(", ")", "op_path", ".", "path", "=", "so_path", "self", ".", "_try_rpc", "(", "lambda", ":", "self", ".", "_master", ".", "LoadOp", "(", "op_path", ",", "timeout", "=", "self", ".", "_grpc_timeout", ")", ")", "self", ".", "_modules", ".", "add", "(", "(", "so_path", ",", "proto_path", ")", ")" ]
https://github.com/scanner-research/scanner/blob/04a0c4b4196341995985acd729c0788aab823e1c/python/scannerpy/client.py#L514-L537
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/asyncio/locks.py
python
Condition.wait
(self)
Wait until notified. If the calling coroutine has not acquired the lock when this method is called, a RuntimeError is raised. This method releases the underlying lock, and then blocks until it is awakened by a notify() or notify_all() call for the same condition variable in another coroutine. Once awakened, it re-acquires the lock and returns True.
Wait until notified.
[ "Wait", "until", "notified", "." ]
async def wait(self): """Wait until notified. If the calling coroutine has not acquired the lock when this method is called, a RuntimeError is raised. This method releases the underlying lock, and then blocks until it is awakened by a notify() or notify_all() call for the same condition variable in another coroutine. Once awakened, it re-acquires the lock and returns True. """ if not self.locked(): raise RuntimeError('cannot wait on un-acquired lock') self.release() try: fut = self._loop.create_future() self._waiters.append(fut) try: await fut return True finally: self._waiters.remove(fut) finally: # Must reacquire lock even if wait is cancelled cancelled = False while True: try: await self.acquire() break except futures.CancelledError: cancelled = True if cancelled: raise futures.CancelledError
[ "async", "def", "wait", "(", "self", ")", ":", "if", "not", "self", ".", "locked", "(", ")", ":", "raise", "RuntimeError", "(", "'cannot wait on un-acquired lock'", ")", "self", ".", "release", "(", ")", "try", ":", "fut", "=", "self", ".", "_loop", ".", "create_future", "(", ")", "self", ".", "_waiters", ".", "append", "(", "fut", ")", "try", ":", "await", "fut", "return", "True", "finally", ":", "self", ".", "_waiters", ".", "remove", "(", "fut", ")", "finally", ":", "# Must reacquire lock even if wait is cancelled", "cancelled", "=", "False", "while", "True", ":", "try", ":", "await", "self", ".", "acquire", "(", ")", "break", "except", "futures", ".", "CancelledError", ":", "cancelled", "=", "True", "if", "cancelled", ":", "raise", "futures", ".", "CancelledError" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/asyncio/locks.py#L335-L370
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
python/mozbuild/mozbuild/configure/libstdcxx.py
python
find_version
(e)
return encode_ver(last_version)
Given the value of environment variable CXX or HOST_CXX, find the version of the libstdc++ it uses.
Given the value of environment variable CXX or HOST_CXX, find the version of the libstdc++ it uses.
[ "Given", "the", "value", "of", "environment", "variable", "CXX", "or", "HOST_CXX", "find", "the", "version", "of", "the", "libstdc", "++", "it", "uses", "." ]
def find_version(e): """Given the value of environment variable CXX or HOST_CXX, find the version of the libstdc++ it uses. """ args = e.split() args += ['-shared', '-Wl,-t'] p = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) candidates = [x for x in p.stdout if 'libstdc++.so' in x] if not candidates: return '' assert len(candidates) == 1 libstdcxx = parse_ld_line(candidates[-1]) p = subprocess.Popen(['readelf', '-V', libstdcxx], stdout=subprocess.PIPE) versions = [parse_readelf_line(x) for x in p.stdout.readlines() if 'Name: GLIBCXX' in x] last_version = sorted(versions, cmp = cmp_ver)[-1] return encode_ver(last_version)
[ "def", "find_version", "(", "e", ")", ":", "args", "=", "e", ".", "split", "(", ")", "args", "+=", "[", "'-shared'", ",", "'-Wl,-t'", "]", "p", "=", "subprocess", ".", "Popen", "(", "args", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "candidates", "=", "[", "x", "for", "x", "in", "p", ".", "stdout", "if", "'libstdc++.so'", "in", "x", "]", "if", "not", "candidates", ":", "return", "''", "assert", "len", "(", "candidates", ")", "==", "1", "libstdcxx", "=", "parse_ld_line", "(", "candidates", "[", "-", "1", "]", ")", "p", "=", "subprocess", ".", "Popen", "(", "[", "'readelf'", ",", "'-V'", ",", "libstdcxx", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "versions", "=", "[", "parse_readelf_line", "(", "x", ")", "for", "x", "in", "p", ".", "stdout", ".", "readlines", "(", ")", "if", "'Name: GLIBCXX'", "in", "x", "]", "last_version", "=", "sorted", "(", "versions", ",", "cmp", "=", "cmp_ver", ")", "[", "-", "1", "]", "return", "encode_ver", "(", "last_version", ")" ]
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/python/mozbuild/mozbuild/configure/libstdcxx.py#L56-L73
mongodb/mongo
d8ff665343ad29cf286ee2cf4a1960d29371937b
buildscripts/resmokelib/hang_analyzer/dumper.py
python
Dumper._find_debugger
(self, debugger)
Find the installed debugger. :param debugger: debugger executable.
Find the installed debugger.
[ "Find", "the", "installed", "debugger", "." ]
def _find_debugger(self, debugger): """ Find the installed debugger. :param debugger: debugger executable. """ raise NotImplementedError("_find_debugger must be implemented in OS-specific subclasses")
[ "def", "_find_debugger", "(", "self", ",", "debugger", ")", ":", "raise", "NotImplementedError", "(", "\"_find_debugger must be implemented in OS-specific subclasses\"", ")" ]
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/resmokelib/hang_analyzer/dumper.py#L74-L80
turi-code/SFrame
796b9bdfb2fa1b881d82080754643c7e68629cd2
oss_src/unity/python/sframe/data_structures/sframe.py
python
SFrame.__getitem__
(self, key)
This method does things based on the type of `key`. If `key` is: * str selects column with name 'key' * type selects all columns with types matching the type * list of str or type selects all columns with names or type in the list * SArray Performs a logical filter. Expects given SArray to be the same length as all columns in current SFrame. Every row corresponding with an entry in the given SArray that is equivalent to False is filtered from the result. * int Returns a single row of the SFrame (the `key`th one) as a dictionary. * slice Returns an SFrame including only the sliced rows.
This method does things based on the type of `key`.
[ "This", "method", "does", "things", "based", "on", "the", "type", "of", "key", "." ]
def __getitem__(self, key): """ This method does things based on the type of `key`. If `key` is: * str selects column with name 'key' * type selects all columns with types matching the type * list of str or type selects all columns with names or type in the list * SArray Performs a logical filter. Expects given SArray to be the same length as all columns in current SFrame. Every row corresponding with an entry in the given SArray that is equivalent to False is filtered from the result. * int Returns a single row of the SFrame (the `key`th one) as a dictionary. * slice Returns an SFrame including only the sliced rows. """ if type(key) is SArray: return self._row_selector(key) elif type(key) is str: return self.select_column(key) elif type(key) is type: return self.select_columns([key]) elif _is_non_string_iterable(key): return self.select_columns(key) elif isinstance(key, numbers.Integral): sf_len = len(self) if key < 0: key = sf_len + key if key >= sf_len: raise IndexError("SFrame index out of range") if not hasattr(self, '_cache') or self._cache is None: self._cache = {} try: lb, ub, value_list = self._cache["getitem_cache"] if lb <= key < ub: return value_list[int(key - lb)] except KeyError: pass # Not in cache, need to grab it. Smaller here than with sarray # Do we have a good block size that won't cause memory to blow up? if not "getitem_cache_blocksize" in self._cache: block_size = \ (8*1024) // sum( (2 if dt in [int, long, float] else 8) for dt in self.column_types()) block_size = max(16, block_size) self._cache["getitem_cache_blocksize"] = block_size else: block_size = self._cache["getitem_cache_blocksize"] block_num = int(key // block_size) lb = block_num * block_size ub = min(sf_len, lb + block_size) val_list = list(SFrame(_proxy = self.__proxy__.copy_range(lb, 1, ub))) self._cache["getitem_cache"] = (lb, ub, val_list) return val_list[int(key - lb)] elif type(key) is slice: start = key.start stop = key.stop step = key.step if start is None: start = 0 if stop is None: stop = len(self) if step is None: step = 1 # handle negative indices if start < 0: start = len(self) + start if stop < 0: stop = len(self) + stop return SFrame(_proxy = self.__proxy__.copy_range(start, step, stop)) else: raise TypeError("Invalid index type: must be SArray, list, int, or str")
[ "def", "__getitem__", "(", "self", ",", "key", ")", ":", "if", "type", "(", "key", ")", "is", "SArray", ":", "return", "self", ".", "_row_selector", "(", "key", ")", "elif", "type", "(", "key", ")", "is", "str", ":", "return", "self", ".", "select_column", "(", "key", ")", "elif", "type", "(", "key", ")", "is", "type", ":", "return", "self", ".", "select_columns", "(", "[", "key", "]", ")", "elif", "_is_non_string_iterable", "(", "key", ")", ":", "return", "self", ".", "select_columns", "(", "key", ")", "elif", "isinstance", "(", "key", ",", "numbers", ".", "Integral", ")", ":", "sf_len", "=", "len", "(", "self", ")", "if", "key", "<", "0", ":", "key", "=", "sf_len", "+", "key", "if", "key", ">=", "sf_len", ":", "raise", "IndexError", "(", "\"SFrame index out of range\"", ")", "if", "not", "hasattr", "(", "self", ",", "'_cache'", ")", "or", "self", ".", "_cache", "is", "None", ":", "self", ".", "_cache", "=", "{", "}", "try", ":", "lb", ",", "ub", ",", "value_list", "=", "self", ".", "_cache", "[", "\"getitem_cache\"", "]", "if", "lb", "<=", "key", "<", "ub", ":", "return", "value_list", "[", "int", "(", "key", "-", "lb", ")", "]", "except", "KeyError", ":", "pass", "# Not in cache, need to grab it. Smaller here than with sarray", "# Do we have a good block size that won't cause memory to blow up?", "if", "not", "\"getitem_cache_blocksize\"", "in", "self", ".", "_cache", ":", "block_size", "=", "(", "8", "*", "1024", ")", "//", "sum", "(", "(", "2", "if", "dt", "in", "[", "int", ",", "long", ",", "float", "]", "else", "8", ")", "for", "dt", "in", "self", ".", "column_types", "(", ")", ")", "block_size", "=", "max", "(", "16", ",", "block_size", ")", "self", ".", "_cache", "[", "\"getitem_cache_blocksize\"", "]", "=", "block_size", "else", ":", "block_size", "=", "self", ".", "_cache", "[", "\"getitem_cache_blocksize\"", "]", "block_num", "=", "int", "(", "key", "//", "block_size", ")", "lb", "=", "block_num", "*", "block_size", "ub", "=", "min", "(", "sf_len", ",", "lb", "+", "block_size", ")", "val_list", "=", "list", "(", "SFrame", "(", "_proxy", "=", "self", ".", "__proxy__", ".", "copy_range", "(", "lb", ",", "1", ",", "ub", ")", ")", ")", "self", ".", "_cache", "[", "\"getitem_cache\"", "]", "=", "(", "lb", ",", "ub", ",", "val_list", ")", "return", "val_list", "[", "int", "(", "key", "-", "lb", ")", "]", "elif", "type", "(", "key", ")", "is", "slice", ":", "start", "=", "key", ".", "start", "stop", "=", "key", ".", "stop", "step", "=", "key", ".", "step", "if", "start", "is", "None", ":", "start", "=", "0", "if", "stop", "is", "None", ":", "stop", "=", "len", "(", "self", ")", "if", "step", "is", "None", ":", "step", "=", "1", "# handle negative indices", "if", "start", "<", "0", ":", "start", "=", "len", "(", "self", ")", "+", "start", "if", "stop", "<", "0", ":", "stop", "=", "len", "(", "self", ")", "+", "stop", "return", "SFrame", "(", "_proxy", "=", "self", ".", "__proxy__", ".", "copy_range", "(", "start", ",", "step", ",", "stop", ")", ")", "else", ":", "raise", "TypeError", "(", "\"Invalid index type: must be SArray, list, int, or str\"", ")" ]
https://github.com/turi-code/SFrame/blob/796b9bdfb2fa1b881d82080754643c7e68629cd2/oss_src/unity/python/sframe/data_structures/sframe.py#L3974-L4060
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/contrib/learn/python/learn/estimators/dnn_sampled_softmax_classifier.py
python
_DNNSampledSoftmaxClassifier.export
(self, export_dir, signature_fn=None, input_fn=None, default_batch_size=1, exports_to_keep=None)
return self._estimator.export(export_dir=export_dir, signature_fn=signature_fn, input_fn=input_fn or default_input_fn, default_batch_size=default_batch_size, exports_to_keep=exports_to_keep)
Exports inference graph into given dir. Args: export_dir: A string containing a directory to write the exported graph and checkpoints. signature_fn: Function that returns a default signature and a named signature map, given `Tensor` of `Example` strings, `dict` of `Tensor`s for features and `Tensor` or `dict` of `Tensor`s for predictions. input_fn: If `use_deprecated_input_fn` is true, then a function that given `Tensor` of `Example` strings, parses it into features that are then passed to the model. Otherwise, a function that takes no argument and returns a tuple of (features, targets), where features is a dict of string key to `Tensor` and targets is a `Tensor` that's currently not used (and so can be `None`). default_batch_size: Default batch size of the `Example` placeholder. exports_to_keep: Number of exports to keep. Returns: The string path to the exported directory. NB: this functionality was added ca. 2016/09/25; clients that depend on the return value may need to handle the case where this function returns None because subclasses are not returning a value.
Exports inference graph into given dir.
[ "Exports", "inference", "graph", "into", "given", "dir", "." ]
def export(self, export_dir, signature_fn=None, input_fn=None, default_batch_size=1, exports_to_keep=None): """Exports inference graph into given dir. Args: export_dir: A string containing a directory to write the exported graph and checkpoints. signature_fn: Function that returns a default signature and a named signature map, given `Tensor` of `Example` strings, `dict` of `Tensor`s for features and `Tensor` or `dict` of `Tensor`s for predictions. input_fn: If `use_deprecated_input_fn` is true, then a function that given `Tensor` of `Example` strings, parses it into features that are then passed to the model. Otherwise, a function that takes no argument and returns a tuple of (features, targets), where features is a dict of string key to `Tensor` and targets is a `Tensor` that's currently not used (and so can be `None`). default_batch_size: Default batch size of the `Example` placeholder. exports_to_keep: Number of exports to keep. Returns: The string path to the exported directory. NB: this functionality was added ca. 2016/09/25; clients that depend on the return value may need to handle the case where this function returns None because subclasses are not returning a value. """ def default_input_fn(unused_estimator, examples): return layers.parse_feature_columns_from_examples( examples, self._feature_columns) return self._estimator.export(export_dir=export_dir, signature_fn=signature_fn, input_fn=input_fn or default_input_fn, default_batch_size=default_batch_size, exports_to_keep=exports_to_keep)
[ "def", "export", "(", "self", ",", "export_dir", ",", "signature_fn", "=", "None", ",", "input_fn", "=", "None", ",", "default_batch_size", "=", "1", ",", "exports_to_keep", "=", "None", ")", ":", "def", "default_input_fn", "(", "unused_estimator", ",", "examples", ")", ":", "return", "layers", ".", "parse_feature_columns_from_examples", "(", "examples", ",", "self", ".", "_feature_columns", ")", "return", "self", ".", "_estimator", ".", "export", "(", "export_dir", "=", "export_dir", ",", "signature_fn", "=", "signature_fn", ",", "input_fn", "=", "input_fn", "or", "default_input_fn", ",", "default_batch_size", "=", "default_batch_size", ",", "exports_to_keep", "=", "exports_to_keep", ")" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/learn/python/learn/estimators/dnn_sampled_softmax_classifier.py#L528-L561
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_gdi.py
python
PixelDataBase.GetRowStride
(*args, **kwargs)
return _gdi_.PixelDataBase_GetRowStride(*args, **kwargs)
GetRowStride(self) -> int
GetRowStride(self) -> int
[ "GetRowStride", "(", "self", ")", "-", ">", "int" ]
def GetRowStride(*args, **kwargs): """GetRowStride(self) -> int""" return _gdi_.PixelDataBase_GetRowStride(*args, **kwargs)
[ "def", "GetRowStride", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "PixelDataBase_GetRowStride", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_gdi.py#L1006-L1008
BitMEX/api-connectors
37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812
auto-generated/python/swagger_client/models/instrument.py
python
Instrument.high_price
(self, high_price)
Sets the high_price of this Instrument. :param high_price: The high_price of this Instrument. # noqa: E501 :type: float
Sets the high_price of this Instrument.
[ "Sets", "the", "high_price", "of", "this", "Instrument", "." ]
def high_price(self, high_price): """Sets the high_price of this Instrument. :param high_price: The high_price of this Instrument. # noqa: E501 :type: float """ self._high_price = high_price
[ "def", "high_price", "(", "self", ",", "high_price", ")", ":", "self", ".", "_high_price", "=", "high_price" ]
https://github.com/BitMEX/api-connectors/blob/37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812/auto-generated/python/swagger_client/models/instrument.py#L2189-L2197
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/aquabutton.py
python
__ToggleMixin.SetToggle
(self, flag)
Sets the button as toggled/not toggled. :param bool `flag`: ``True`` to set the button as toggled, ``False`` otherwise.
Sets the button as toggled/not toggled.
[ "Sets", "the", "button", "as", "toggled", "/", "not", "toggled", "." ]
def SetToggle(self, flag): """ Sets the button as toggled/not toggled. :param bool `flag`: ``True`` to set the button as toggled, ``False`` otherwise. """ self.up = not flag self.Refresh()
[ "def", "SetToggle", "(", "self", ",", "flag", ")", ":", "self", ".", "up", "=", "not", "flag", "self", ".", "Refresh", "(", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aquabutton.py#L872-L880
ideawu/ssdb-rocks
a3cbb322cafb2f493252829c608e2239df98c9ac
deps/cpy/antlr3/treewizard.py
python
TreeWizard._visitPattern
(self, tree, pattern, visitor)
For all subtrees that match the pattern, execute the visit action.
For all subtrees that match the pattern, execute the visit action.
[ "For", "all", "subtrees", "that", "match", "the", "pattern", "execute", "the", "visit", "action", "." ]
def _visitPattern(self, tree, pattern, visitor): """ For all subtrees that match the pattern, execute the visit action. """ # Create a TreePattern from the pattern tokenizer = TreePatternLexer(pattern) parser = TreePatternParser(tokenizer, self, TreePatternTreeAdaptor()) tpattern = parser.pattern() # don't allow invalid patterns if (tpattern is None or tpattern.isNil() or isinstance(tpattern, WildcardTreePattern)): return rootTokenType = tpattern.getType() def rootvisitor(tree, parent, childIndex, labels): labels = {} if self._parse(tree, tpattern, labels): visitor(tree, parent, childIndex, labels) self.visit(tree, rootTokenType, rootvisitor)
[ "def", "_visitPattern", "(", "self", ",", "tree", ",", "pattern", ",", "visitor", ")", ":", "# Create a TreePattern from the pattern", "tokenizer", "=", "TreePatternLexer", "(", "pattern", ")", "parser", "=", "TreePatternParser", "(", "tokenizer", ",", "self", ",", "TreePatternTreeAdaptor", "(", ")", ")", "tpattern", "=", "parser", ".", "pattern", "(", ")", "# don't allow invalid patterns", "if", "(", "tpattern", "is", "None", "or", "tpattern", ".", "isNil", "(", ")", "or", "isinstance", "(", "tpattern", ",", "WildcardTreePattern", ")", ")", ":", "return", "rootTokenType", "=", "tpattern", ".", "getType", "(", ")", "def", "rootvisitor", "(", "tree", ",", "parent", ",", "childIndex", ",", "labels", ")", ":", "labels", "=", "{", "}", "if", "self", ".", "_parse", "(", "tree", ",", "tpattern", ",", "labels", ")", ":", "visitor", "(", "tree", ",", "parent", ",", "childIndex", ",", "labels", ")", "self", ".", "visit", "(", "tree", ",", "rootTokenType", ",", "rootvisitor", ")" ]
https://github.com/ideawu/ssdb-rocks/blob/a3cbb322cafb2f493252829c608e2239df98c9ac/deps/cpy/antlr3/treewizard.py#L491-L513
PaddlePaddle/PaddleOCR
b756bf5f8c90142e0d89d3db0163965c686b6ffe
ppocr/utils/e2e_utils/extract_textpoint_slow.py
python
add_id
(pos_list, image_id=0)
return new_list
Add id for gather feature, for inference.
Add id for gather feature, for inference.
[ "Add", "id", "for", "gather", "feature", "for", "inference", "." ]
def add_id(pos_list, image_id=0): """ Add id for gather feature, for inference. """ new_list = [] for item in pos_list: new_list.append((image_id, item[0], item[1])) return new_list
[ "def", "add_id", "(", "pos_list", ",", "image_id", "=", "0", ")", ":", "new_list", "=", "[", "]", "for", "item", "in", "pos_list", ":", "new_list", ".", "append", "(", "(", "image_id", ",", "item", "[", "0", "]", ",", "item", "[", "1", "]", ")", ")", "return", "new_list" ]
https://github.com/PaddlePaddle/PaddleOCR/blob/b756bf5f8c90142e0d89d3db0163965c686b6ffe/ppocr/utils/e2e_utils/extract_textpoint_slow.py#L218-L225
LiquidPlayer/LiquidCore
9405979363f2353ac9a71ad8ab59685dd7f919c9
deps/node-10.15.3/tools/gyp/pylib/gyp/xcode_emulation.py
python
XcodeSettings.GetLdflags
(self, configname, product_dir, gyp_to_build_path, arch=None)
return ldflags
Returns flags that need to be passed to the linker. Args: configname: The name of the configuration to get ld flags for. product_dir: The directory where products such static and dynamic libraries are placed. This is added to the library search path. gyp_to_build_path: A function that converts paths relative to the current gyp file to paths relative to the build direcotry.
Returns flags that need to be passed to the linker.
[ "Returns", "flags", "that", "need", "to", "be", "passed", "to", "the", "linker", "." ]
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): """Returns flags that need to be passed to the linker. Args: configname: The name of the configuration to get ld flags for. product_dir: The directory where products such static and dynamic libraries are placed. This is added to the library search path. gyp_to_build_path: A function that converts paths relative to the current gyp file to paths relative to the build direcotry. """ self.configname = configname ldflags = [] # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS # can contain entries that depend on this. Explicitly absolutify these. for ldflag in self._Settings().get('OTHER_LDFLAGS', []): ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path)) if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'): ldflags.append('-Wl,-dead_strip') if self._Test('PREBINDING', 'YES', default='NO'): ldflags.append('-Wl,-prebind') self._Appendf( ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s') self._Appendf( ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s') self._AppendPlatformVersionMinFlags(ldflags) if 'SDKROOT' in self._Settings() and self._SdkPath(): ldflags.append('-isysroot ' + self._SdkPath()) for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []): ldflags.append('-L' + gyp_to_build_path(library_path)) if 'ORDER_FILE' in self._Settings(): ldflags.append('-Wl,-order_file ' + '-Wl,' + gyp_to_build_path( self._Settings()['ORDER_FILE'])) if arch is not None: archs = [arch] else: assert self.configname archs = self.GetActiveArchs(self.configname) if len(archs) != 1: # TODO: Supporting fat binaries will be annoying. self._WarnUnimplemented('ARCHS') archs = ['i386'] ldflags.append('-arch ' + archs[0]) # Xcode adds the product directory by default. # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838 ldflags.append('-L' + (product_dir if product_dir != '.' else './')) install_name = self.GetInstallName() if install_name and self.spec['type'] != 'loadable_module': ldflags.append('-install_name ' + install_name.replace(' ', r'\ ')) for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []): ldflags.append('-Wl,-rpath,' + rpath) sdk_root = self._SdkPath() if not sdk_root: sdk_root = '' config = self.spec['configurations'][self.configname] framework_dirs = config.get('mac_framework_dirs', []) for directory in framework_dirs: ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root)) if self._IsXCTest(): platform_root = self._XcodePlatformPath(configname) if sdk_root and platform_root: ldflags.append('-F' + platform_root + '/Developer/Library/Frameworks/') ldflags.append('-framework XCTest') is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension() if sdk_root and is_extension: # Adds the link flags for extensions. These flags are common for all # extensions and provide loader and main function. # These flags reflect the compilation options used by xcode to compile # extensions. if XcodeVersion() < '0900': ldflags.append('-lpkstart') ldflags.append(sdk_root + '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit') else: ldflags.append('-e _NSExtensionMain') ldflags.append('-fapplication-extension') self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s') self.configname = None return ldflags
[ "def", "GetLdflags", "(", "self", ",", "configname", ",", "product_dir", ",", "gyp_to_build_path", ",", "arch", "=", "None", ")", ":", "self", ".", "configname", "=", "configname", "ldflags", "=", "[", "]", "# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS", "# can contain entries that depend on this. Explicitly absolutify these.", "for", "ldflag", "in", "self", ".", "_Settings", "(", ")", ".", "get", "(", "'OTHER_LDFLAGS'", ",", "[", "]", ")", ":", "ldflags", ".", "append", "(", "self", ".", "_MapLinkerFlagFilename", "(", "ldflag", ",", "gyp_to_build_path", ")", ")", "if", "self", ".", "_Test", "(", "'DEAD_CODE_STRIPPING'", ",", "'YES'", ",", "default", "=", "'NO'", ")", ":", "ldflags", ".", "append", "(", "'-Wl,-dead_strip'", ")", "if", "self", ".", "_Test", "(", "'PREBINDING'", ",", "'YES'", ",", "default", "=", "'NO'", ")", ":", "ldflags", ".", "append", "(", "'-Wl,-prebind'", ")", "self", ".", "_Appendf", "(", "ldflags", ",", "'DYLIB_COMPATIBILITY_VERSION'", ",", "'-compatibility_version %s'", ")", "self", ".", "_Appendf", "(", "ldflags", ",", "'DYLIB_CURRENT_VERSION'", ",", "'-current_version %s'", ")", "self", ".", "_AppendPlatformVersionMinFlags", "(", "ldflags", ")", "if", "'SDKROOT'", "in", "self", ".", "_Settings", "(", ")", "and", "self", ".", "_SdkPath", "(", ")", ":", "ldflags", ".", "append", "(", "'-isysroot '", "+", "self", ".", "_SdkPath", "(", ")", ")", "for", "library_path", "in", "self", ".", "_Settings", "(", ")", ".", "get", "(", "'LIBRARY_SEARCH_PATHS'", ",", "[", "]", ")", ":", "ldflags", ".", "append", "(", "'-L'", "+", "gyp_to_build_path", "(", "library_path", ")", ")", "if", "'ORDER_FILE'", "in", "self", ".", "_Settings", "(", ")", ":", "ldflags", ".", "append", "(", "'-Wl,-order_file '", "+", "'-Wl,'", "+", "gyp_to_build_path", "(", "self", ".", "_Settings", "(", ")", "[", "'ORDER_FILE'", "]", ")", ")", "if", "arch", "is", "not", "None", ":", "archs", "=", "[", "arch", "]", "else", ":", "assert", "self", ".", "configname", "archs", "=", "self", ".", "GetActiveArchs", "(", "self", ".", "configname", ")", "if", "len", "(", "archs", ")", "!=", "1", ":", "# TODO: Supporting fat binaries will be annoying.", "self", ".", "_WarnUnimplemented", "(", "'ARCHS'", ")", "archs", "=", "[", "'i386'", "]", "ldflags", ".", "append", "(", "'-arch '", "+", "archs", "[", "0", "]", ")", "# Xcode adds the product directory by default.", "# Rewrite -L. to -L./ to work around http://www.openradar.me/25313838", "ldflags", ".", "append", "(", "'-L'", "+", "(", "product_dir", "if", "product_dir", "!=", "'.'", "else", "'./'", ")", ")", "install_name", "=", "self", ".", "GetInstallName", "(", ")", "if", "install_name", "and", "self", ".", "spec", "[", "'type'", "]", "!=", "'loadable_module'", ":", "ldflags", ".", "append", "(", "'-install_name '", "+", "install_name", ".", "replace", "(", "' '", ",", "r'\\ '", ")", ")", "for", "rpath", "in", "self", ".", "_Settings", "(", ")", ".", "get", "(", "'LD_RUNPATH_SEARCH_PATHS'", ",", "[", "]", ")", ":", "ldflags", ".", "append", "(", "'-Wl,-rpath,'", "+", "rpath", ")", "sdk_root", "=", "self", ".", "_SdkPath", "(", ")", "if", "not", "sdk_root", ":", "sdk_root", "=", "''", "config", "=", "self", ".", "spec", "[", "'configurations'", "]", "[", "self", ".", "configname", "]", "framework_dirs", "=", "config", ".", "get", "(", "'mac_framework_dirs'", ",", "[", "]", ")", "for", "directory", "in", "framework_dirs", ":", "ldflags", ".", "append", "(", "'-F'", "+", "directory", ".", "replace", "(", "'$(SDKROOT)'", ",", "sdk_root", ")", ")", "if", "self", ".", "_IsXCTest", "(", ")", ":", "platform_root", "=", "self", ".", "_XcodePlatformPath", "(", "configname", ")", "if", "sdk_root", "and", "platform_root", ":", "ldflags", ".", "append", "(", "'-F'", "+", "platform_root", "+", "'/Developer/Library/Frameworks/'", ")", "ldflags", ".", "append", "(", "'-framework XCTest'", ")", "is_extension", "=", "self", ".", "_IsIosAppExtension", "(", ")", "or", "self", ".", "_IsIosWatchKitExtension", "(", ")", "if", "sdk_root", "and", "is_extension", ":", "# Adds the link flags for extensions. These flags are common for all", "# extensions and provide loader and main function.", "# These flags reflect the compilation options used by xcode to compile", "# extensions.", "if", "XcodeVersion", "(", ")", "<", "'0900'", ":", "ldflags", ".", "append", "(", "'-lpkstart'", ")", "ldflags", ".", "append", "(", "sdk_root", "+", "'/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit'", ")", "else", ":", "ldflags", ".", "append", "(", "'-e _NSExtensionMain'", ")", "ldflags", ".", "append", "(", "'-fapplication-extension'", ")", "self", ".", "_Appendf", "(", "ldflags", ",", "'CLANG_CXX_LIBRARY'", ",", "'-stdlib=%s'", ")", "self", ".", "configname", "=", "None", "return", "ldflags" ]
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/tools/gyp/pylib/gyp/xcode_emulation.py#L845-L940
blocknetdx/blocknet
f85bdf3eeebb1ed8c2321ebd928232d4885b30b6
contrib/devtools/security-check.py
python
check_ELF_Canary
(executable)
return ok
Check for use of stack canary
Check for use of stack canary
[ "Check", "for", "use", "of", "stack", "canary" ]
def check_ELF_Canary(executable): ''' Check for use of stack canary ''' p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: raise IOError('Error opening file') ok = False for line in stdout.splitlines(): if '__stack_chk_fail' in line: ok = True return ok
[ "def", "check_ELF_Canary", "(", "executable", ")", ":", "p", "=", "subprocess", ".", "Popen", "(", "[", "READELF_CMD", ",", "'--dyn-syms'", ",", "'-W'", ",", "executable", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "universal_newlines", "=", "True", ")", "(", "stdout", ",", "stderr", ")", "=", "p", ".", "communicate", "(", ")", "if", "p", ".", "returncode", ":", "raise", "IOError", "(", "'Error opening file'", ")", "ok", "=", "False", "for", "line", "in", "stdout", ".", "splitlines", "(", ")", ":", "if", "'__stack_chk_fail'", "in", "line", ":", "ok", "=", "True", "return", "ok" ]
https://github.com/blocknetdx/blocknet/blob/f85bdf3eeebb1ed8c2321ebd928232d4885b30b6/contrib/devtools/security-check.py#L104-L116
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/python/training/session_manager.py
python
SessionManager.recover_session
(self, master, saver=None, checkpoint_dir=None, wait_for_checkpoint=False, max_wait_secs=7200, config=None)
Creates a `Session`, recovering if possible. Creates a new session on 'master'. If the session is not initialized and can be recovered from a checkpoint, recover it. Args: master: `String` representation of the TensorFlow master to use. saver: A `Saver` object used to restore a model. checkpoint_dir: Path to the checkpoint files. wait_for_checkpoint: Whether to wait for checkpoint to become available. max_wait_secs: Maximum time to wait for checkpoints to become available. config: Optional `ConfigProto` proto used to configure the session. Returns: A pair (sess, initialized) where 'initialized' is `True` if the session could be recovered, `False` otherwise.
Creates a `Session`, recovering if possible.
[ "Creates", "a", "Session", "recovering", "if", "possible", "." ]
def recover_session(self, master, saver=None, checkpoint_dir=None, wait_for_checkpoint=False, max_wait_secs=7200, config=None): """Creates a `Session`, recovering if possible. Creates a new session on 'master'. If the session is not initialized and can be recovered from a checkpoint, recover it. Args: master: `String` representation of the TensorFlow master to use. saver: A `Saver` object used to restore a model. checkpoint_dir: Path to the checkpoint files. wait_for_checkpoint: Whether to wait for checkpoint to become available. max_wait_secs: Maximum time to wait for checkpoints to become available. config: Optional `ConfigProto` proto used to configure the session. Returns: A pair (sess, initialized) where 'initialized' is `True` if the session could be recovered, `False` otherwise. """ self._target = master sess = session.Session(self._target, graph=self._graph, config=config) if self._local_init_op: sess.run([self._local_init_op]) # If either saver or checkpoint_dir is not specified, cannot restore. Just # return. if not saver or not checkpoint_dir: not_ready = self._model_not_ready(sess) return sess, not_ready is None # Waits up until max_wait_secs for checkpoint to become available. wait_time = 0 ckpt = saver_mod.get_checkpoint_state(checkpoint_dir) while not ckpt or not ckpt.model_checkpoint_path: if wait_for_checkpoint and wait_time < max_wait_secs: logging.info("Waiting for checkpoint to be available.") time.sleep(self._recovery_wait_secs) wait_time += self._recovery_wait_secs ckpt = saver_mod.get_checkpoint_state(checkpoint_dir) else: return sess, False # Loads the checkpoint and verifies that it makes the model ready. saver.restore(sess, ckpt.model_checkpoint_path) last_checkpoints = [] for fname in ckpt.all_model_checkpoint_paths: fnames = gfile.Glob(fname) if fnames: mtime = gfile.Stat(fnames[0]).mtime last_checkpoints.append((fname, mtime)) saver.set_last_checkpoints_with_time(last_checkpoints) not_ready = self._model_not_ready(sess) if not_ready: logging.info("Restoring model from %s did not make model ready: %s", ckpt.model_checkpoint_path, not_ready) return sess, False else: logging.info("Restored model from %s", ckpt.model_checkpoint_path) return sess, True
[ "def", "recover_session", "(", "self", ",", "master", ",", "saver", "=", "None", ",", "checkpoint_dir", "=", "None", ",", "wait_for_checkpoint", "=", "False", ",", "max_wait_secs", "=", "7200", ",", "config", "=", "None", ")", ":", "self", ".", "_target", "=", "master", "sess", "=", "session", ".", "Session", "(", "self", ".", "_target", ",", "graph", "=", "self", ".", "_graph", ",", "config", "=", "config", ")", "if", "self", ".", "_local_init_op", ":", "sess", ".", "run", "(", "[", "self", ".", "_local_init_op", "]", ")", "# If either saver or checkpoint_dir is not specified, cannot restore. Just", "# return.", "if", "not", "saver", "or", "not", "checkpoint_dir", ":", "not_ready", "=", "self", ".", "_model_not_ready", "(", "sess", ")", "return", "sess", ",", "not_ready", "is", "None", "# Waits up until max_wait_secs for checkpoint to become available.", "wait_time", "=", "0", "ckpt", "=", "saver_mod", ".", "get_checkpoint_state", "(", "checkpoint_dir", ")", "while", "not", "ckpt", "or", "not", "ckpt", ".", "model_checkpoint_path", ":", "if", "wait_for_checkpoint", "and", "wait_time", "<", "max_wait_secs", ":", "logging", ".", "info", "(", "\"Waiting for checkpoint to be available.\"", ")", "time", ".", "sleep", "(", "self", ".", "_recovery_wait_secs", ")", "wait_time", "+=", "self", ".", "_recovery_wait_secs", "ckpt", "=", "saver_mod", ".", "get_checkpoint_state", "(", "checkpoint_dir", ")", "else", ":", "return", "sess", ",", "False", "# Loads the checkpoint and verifies that it makes the model ready.", "saver", ".", "restore", "(", "sess", ",", "ckpt", ".", "model_checkpoint_path", ")", "last_checkpoints", "=", "[", "]", "for", "fname", "in", "ckpt", ".", "all_model_checkpoint_paths", ":", "fnames", "=", "gfile", ".", "Glob", "(", "fname", ")", "if", "fnames", ":", "mtime", "=", "gfile", ".", "Stat", "(", "fnames", "[", "0", "]", ")", ".", "mtime", "last_checkpoints", ".", "append", "(", "(", "fname", ",", "mtime", ")", ")", "saver", ".", "set_last_checkpoints_with_time", "(", "last_checkpoints", ")", "not_ready", "=", "self", ".", "_model_not_ready", "(", "sess", ")", "if", "not_ready", ":", "logging", ".", "info", "(", "\"Restoring model from %s did not make model ready: %s\"", ",", "ckpt", ".", "model_checkpoint_path", ",", "not_ready", ")", "return", "sess", ",", "False", "else", ":", "logging", ".", "info", "(", "\"Restored model from %s\"", ",", "ckpt", ".", "model_checkpoint_path", ")", "return", "sess", ",", "True" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/training/session_manager.py#L180-L239
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/mixins/rubberband.py
python
normalizeBox
(box)
return (x, y, w, h)
Convert any negative measurements in the current box to positive, and adjust the origin.
Convert any negative measurements in the current box to positive, and adjust the origin.
[ "Convert", "any", "negative", "measurements", "in", "the", "current", "box", "to", "positive", "and", "adjust", "the", "origin", "." ]
def normalizeBox(box): """ Convert any negative measurements in the current box to positive, and adjust the origin. """ x, y, w, h = box if w < 0: x += (w+1) w *= -1 if h < 0: y += (h+1) h *= -1 return (x, y, w, h)
[ "def", "normalizeBox", "(", "box", ")", ":", "x", ",", "y", ",", "w", ",", "h", "=", "box", "if", "w", "<", "0", ":", "x", "+=", "(", "w", "+", "1", ")", "w", "*=", "-", "1", "if", "h", "<", "0", ":", "y", "+=", "(", "h", "+", "1", ")", "h", "*=", "-", "1", "return", "(", "x", ",", "y", ",", "w", ",", "h", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/mixins/rubberband.py#L38-L50
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/html.py
python
HtmlCell.IsBefore
(*args, **kwargs)
return _html.HtmlCell_IsBefore(*args, **kwargs)
IsBefore(self, HtmlCell cell) -> bool
IsBefore(self, HtmlCell cell) -> bool
[ "IsBefore", "(", "self", "HtmlCell", "cell", ")", "-", ">", "bool" ]
def IsBefore(*args, **kwargs): """IsBefore(self, HtmlCell cell) -> bool""" return _html.HtmlCell_IsBefore(*args, **kwargs)
[ "def", "IsBefore", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_html", ".", "HtmlCell_IsBefore", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/html.py#L738-L740