nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
sequence | function
stringlengths 34
151k
| function_tokens
sequence | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Path/PathScripts/PathWaterline.py | python | ObjectWaterline.opPropertyDefaults | (self, obj, job) | return defaults | opPropertyDefaults(obj, job) ... returns a dictionary
of default values for the operation's properties. | opPropertyDefaults(obj, job) ... returns a dictionary
of default values for the operation's properties. | [
"opPropertyDefaults",
"(",
"obj",
"job",
")",
"...",
"returns",
"a",
"dictionary",
"of",
"default",
"values",
"for",
"the",
"operation",
"s",
"properties",
"."
] | def opPropertyDefaults(self, obj, job):
"""opPropertyDefaults(obj, job) ... returns a dictionary
of default values for the operation's properties."""
defaults = {
"OptimizeLinearPaths": True,
"InternalFeaturesCut": True,
"OptimizeStepOverTransitions": False,
"BoundaryEnforcement": True,
"UseStartPoint": False,
"AvoidLastX_InternalFeatures": True,
"CutPatternReversed": False,
"IgnoreOuterAbove": obj.StartDepth.Value + 0.00001,
"StartPoint": FreeCAD.Vector(0.0, 0.0, obj.ClearanceHeight.Value),
"Algorithm": "OCL Dropcutter",
"LayerMode": "Single-pass",
"CutMode": "Conventional",
"CutPattern": "None",
"HandleMultipleFeatures": "Collectively",
"PatternCenterAt": "CenterOfMass",
"GapSizes": "No gaps identified.",
"ClearLastLayer": "Off",
"StepOver": 100.0,
"CutPatternAngle": 0.0,
"DepthOffset": 0.0,
"SampleInterval": 1.0,
"BoundaryAdjustment": 0.0,
"InternalFeaturesAdjustment": 0.0,
"AvoidLastX_Faces": 0,
"PatternCenterCustom": FreeCAD.Vector(0.0, 0.0, 0.0),
"GapThreshold": 0.005,
"AngularDeflection": 0.25,
"LinearDeflection": 0.0001,
# For debugging
"ShowTempObjects": False,
}
warn = True
if hasattr(job, "GeometryTolerance"):
if job.GeometryTolerance.Value != 0.0:
warn = False
defaults["LinearDeflection"] = job.GeometryTolerance.Value
if warn:
msg = translate(
"PathWaterline", "The GeometryTolerance for this Job is 0.0."
)
msg += translate(
"PathWaterline", "Initializing LinearDeflection to 0.0001 mm."
)
FreeCAD.Console.PrintWarning(msg + "\n")
return defaults | [
"def",
"opPropertyDefaults",
"(",
"self",
",",
"obj",
",",
"job",
")",
":",
"defaults",
"=",
"{",
"\"OptimizeLinearPaths\"",
":",
"True",
",",
"\"InternalFeaturesCut\"",
":",
"True",
",",
"\"OptimizeStepOverTransitions\"",
":",
"False",
",",
"\"BoundaryEnforcement\"",
":",
"True",
",",
"\"UseStartPoint\"",
":",
"False",
",",
"\"AvoidLastX_InternalFeatures\"",
":",
"True",
",",
"\"CutPatternReversed\"",
":",
"False",
",",
"\"IgnoreOuterAbove\"",
":",
"obj",
".",
"StartDepth",
".",
"Value",
"+",
"0.00001",
",",
"\"StartPoint\"",
":",
"FreeCAD",
".",
"Vector",
"(",
"0.0",
",",
"0.0",
",",
"obj",
".",
"ClearanceHeight",
".",
"Value",
")",
",",
"\"Algorithm\"",
":",
"\"OCL Dropcutter\"",
",",
"\"LayerMode\"",
":",
"\"Single-pass\"",
",",
"\"CutMode\"",
":",
"\"Conventional\"",
",",
"\"CutPattern\"",
":",
"\"None\"",
",",
"\"HandleMultipleFeatures\"",
":",
"\"Collectively\"",
",",
"\"PatternCenterAt\"",
":",
"\"CenterOfMass\"",
",",
"\"GapSizes\"",
":",
"\"No gaps identified.\"",
",",
"\"ClearLastLayer\"",
":",
"\"Off\"",
",",
"\"StepOver\"",
":",
"100.0",
",",
"\"CutPatternAngle\"",
":",
"0.0",
",",
"\"DepthOffset\"",
":",
"0.0",
",",
"\"SampleInterval\"",
":",
"1.0",
",",
"\"BoundaryAdjustment\"",
":",
"0.0",
",",
"\"InternalFeaturesAdjustment\"",
":",
"0.0",
",",
"\"AvoidLastX_Faces\"",
":",
"0",
",",
"\"PatternCenterCustom\"",
":",
"FreeCAD",
".",
"Vector",
"(",
"0.0",
",",
"0.0",
",",
"0.0",
")",
",",
"\"GapThreshold\"",
":",
"0.005",
",",
"\"AngularDeflection\"",
":",
"0.25",
",",
"\"LinearDeflection\"",
":",
"0.0001",
",",
"# For debugging",
"\"ShowTempObjects\"",
":",
"False",
",",
"}",
"warn",
"=",
"True",
"if",
"hasattr",
"(",
"job",
",",
"\"GeometryTolerance\"",
")",
":",
"if",
"job",
".",
"GeometryTolerance",
".",
"Value",
"!=",
"0.0",
":",
"warn",
"=",
"False",
"defaults",
"[",
"\"LinearDeflection\"",
"]",
"=",
"job",
".",
"GeometryTolerance",
".",
"Value",
"if",
"warn",
":",
"msg",
"=",
"translate",
"(",
"\"PathWaterline\"",
",",
"\"The GeometryTolerance for this Job is 0.0.\"",
")",
"msg",
"+=",
"translate",
"(",
"\"PathWaterline\"",
",",
"\"Initializing LinearDeflection to 0.0001 mm.\"",
")",
"FreeCAD",
".",
"Console",
".",
"PrintWarning",
"(",
"msg",
"+",
"\"\\n\"",
")",
"return",
"defaults"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Path/PathScripts/PathWaterline.py#L464-L514 |
|
google/earthenterprise | 0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9 | earth_enterprise/src/server/wsgi/wms/ogc/common/geom.py | python | Rect.FromLowerLeftAndUpperRight | (xy0, xy1) | return Rect(xy0.x, xy0.y, xy1.x, xy1.y) | Make a Rect from the lower left and upper right corners.
Args:
xy0: One corner (not always lower-left)
xy1: The other, opposite corner.
Returns:
A Rect with the given corners. | Make a Rect from the lower left and upper right corners. | [
"Make",
"a",
"Rect",
"from",
"the",
"lower",
"left",
"and",
"upper",
"right",
"corners",
"."
] | def FromLowerLeftAndUpperRight(xy0, xy1):
"""Make a Rect from the lower left and upper right corners.
Args:
xy0: One corner (not always lower-left)
xy1: The other, opposite corner.
Returns:
A Rect with the given corners.
"""
return Rect(xy0.x, xy0.y, xy1.x, xy1.y) | [
"def",
"FromLowerLeftAndUpperRight",
"(",
"xy0",
",",
"xy1",
")",
":",
"return",
"Rect",
"(",
"xy0",
".",
"x",
",",
"xy0",
".",
"y",
",",
"xy1",
".",
"x",
",",
"xy1",
".",
"y",
")"
] | https://github.com/google/earthenterprise/blob/0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9/earth_enterprise/src/server/wsgi/wms/ogc/common/geom.py#L92-L101 |
|
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/python/training/queue_runner.py | python | QueueRunner.__init__ | (self, queue=None, enqueue_ops=None, close_op=None,
cancel_op=None, queue_runner_def=None) | Create a QueueRunner.
On construction the `QueueRunner` adds an op to close the queue. That op
will be run if the enqueue ops raise exceptions.
When you later call the `create_threads()` method, the `QueueRunner` will
create one thread for each op in `enqueue_ops`. Each thread will run its
enqueue op in parallel with the other threads. The enqueue ops do not have
to all be the same op, but it is expected that they all enqueue tensors in
`queue`.
Args:
queue: A `Queue`.
enqueue_ops: List of enqueue ops to run in threads later.
close_op: Op to close the queue. Pending enqueue ops are preserved.
cancel_op: Op to close the queue and cancel pending enqueue ops.
queue_runner_def: Optional `QueueRunnerDef` protocol buffer. If specified,
recreates the QueueRunner from its contents. `queue_runner_def` and the
other arguments are mutually exclusive.
Raises:
ValueError: If both `queue_runner_def` and `queue` are both specified.
ValueError: If `queue` or `enqueue_ops` are not provided when not
restoring from `queue_runner_def`. | Create a QueueRunner. | [
"Create",
"a",
"QueueRunner",
"."
] | def __init__(self, queue=None, enqueue_ops=None, close_op=None,
cancel_op=None, queue_runner_def=None):
"""Create a QueueRunner.
On construction the `QueueRunner` adds an op to close the queue. That op
will be run if the enqueue ops raise exceptions.
When you later call the `create_threads()` method, the `QueueRunner` will
create one thread for each op in `enqueue_ops`. Each thread will run its
enqueue op in parallel with the other threads. The enqueue ops do not have
to all be the same op, but it is expected that they all enqueue tensors in
`queue`.
Args:
queue: A `Queue`.
enqueue_ops: List of enqueue ops to run in threads later.
close_op: Op to close the queue. Pending enqueue ops are preserved.
cancel_op: Op to close the queue and cancel pending enqueue ops.
queue_runner_def: Optional `QueueRunnerDef` protocol buffer. If specified,
recreates the QueueRunner from its contents. `queue_runner_def` and the
other arguments are mutually exclusive.
Raises:
ValueError: If both `queue_runner_def` and `queue` are both specified.
ValueError: If `queue` or `enqueue_ops` are not provided when not
restoring from `queue_runner_def`.
"""
if queue_runner_def:
if queue or enqueue_ops:
raise ValueError("queue_runner_def and queue are mutually exclusive.")
self._init_from_proto(queue_runner_def)
else:
self._init_from_args(queue=queue, enqueue_ops=enqueue_ops,
close_op=close_op, cancel_op=cancel_op)
# Protect the count of runs to wait for.
self._lock = threading.Lock()
self._runs = 0
# List of exceptions raised by the running threads.
self._exceptions_raised = [] | [
"def",
"__init__",
"(",
"self",
",",
"queue",
"=",
"None",
",",
"enqueue_ops",
"=",
"None",
",",
"close_op",
"=",
"None",
",",
"cancel_op",
"=",
"None",
",",
"queue_runner_def",
"=",
"None",
")",
":",
"if",
"queue_runner_def",
":",
"if",
"queue",
"or",
"enqueue_ops",
":",
"raise",
"ValueError",
"(",
"\"queue_runner_def and queue are mutually exclusive.\"",
")",
"self",
".",
"_init_from_proto",
"(",
"queue_runner_def",
")",
"else",
":",
"self",
".",
"_init_from_args",
"(",
"queue",
"=",
"queue",
",",
"enqueue_ops",
"=",
"enqueue_ops",
",",
"close_op",
"=",
"close_op",
",",
"cancel_op",
"=",
"cancel_op",
")",
"# Protect the count of runs to wait for.",
"self",
".",
"_lock",
"=",
"threading",
".",
"Lock",
"(",
")",
"self",
".",
"_runs",
"=",
"0",
"# List of exceptions raised by the running threads.",
"self",
".",
"_exceptions_raised",
"=",
"[",
"]"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/training/queue_runner.py#L46-L84 |
||
turi-code/SFrame | 796b9bdfb2fa1b881d82080754643c7e68629cd2 | oss_src/unity/python/sframe/data_structures/sarray.py | python | SArray._count_ngrams | (self, n=2, method="word", to_lower=True, ignore_space=True) | For documentation, see graphlab.text_analytics.count_ngrams().
..WARNING:: This function is deprecated, and will be removed in future
versions of GraphLab Create. Please use the `text_analytics.count_words`
function instead. | For documentation, see graphlab.text_analytics.count_ngrams(). | [
"For",
"documentation",
"see",
"graphlab",
".",
"text_analytics",
".",
"count_ngrams",
"()",
"."
] | def _count_ngrams(self, n=2, method="word", to_lower=True, ignore_space=True):
"""
For documentation, see graphlab.text_analytics.count_ngrams().
..WARNING:: This function is deprecated, and will be removed in future
versions of GraphLab Create. Please use the `text_analytics.count_words`
function instead.
"""
if (self.dtype() != str):
raise TypeError("Only SArray of string type is supported for counting n-grams")
if (type(n) != int):
raise TypeError("Input 'n' must be of type int")
if (n < 1):
raise ValueError("Input 'n' must be greater than 0")
if (n > 5):
warnings.warn("It is unusual for n-grams to be of size larger than 5.")
# construct options, will extend over time
options = dict()
options["to_lower"] = to_lower == True
options["ignore_space"] = ignore_space == True
if method == "word":
with cython_context():
return SArray(_proxy=self.__proxy__.count_ngrams(n, options))
elif method == "character" :
with cython_context():
return SArray(_proxy=self.__proxy__.count_character_ngrams(n, options))
else:
raise ValueError("Invalid 'method' input value. Please input either 'word' or 'character' ") | [
"def",
"_count_ngrams",
"(",
"self",
",",
"n",
"=",
"2",
",",
"method",
"=",
"\"word\"",
",",
"to_lower",
"=",
"True",
",",
"ignore_space",
"=",
"True",
")",
":",
"if",
"(",
"self",
".",
"dtype",
"(",
")",
"!=",
"str",
")",
":",
"raise",
"TypeError",
"(",
"\"Only SArray of string type is supported for counting n-grams\"",
")",
"if",
"(",
"type",
"(",
"n",
")",
"!=",
"int",
")",
":",
"raise",
"TypeError",
"(",
"\"Input 'n' must be of type int\"",
")",
"if",
"(",
"n",
"<",
"1",
")",
":",
"raise",
"ValueError",
"(",
"\"Input 'n' must be greater than 0\"",
")",
"if",
"(",
"n",
">",
"5",
")",
":",
"warnings",
".",
"warn",
"(",
"\"It is unusual for n-grams to be of size larger than 5.\"",
")",
"# construct options, will extend over time",
"options",
"=",
"dict",
"(",
")",
"options",
"[",
"\"to_lower\"",
"]",
"=",
"to_lower",
"==",
"True",
"options",
"[",
"\"ignore_space\"",
"]",
"=",
"ignore_space",
"==",
"True",
"if",
"method",
"==",
"\"word\"",
":",
"with",
"cython_context",
"(",
")",
":",
"return",
"SArray",
"(",
"_proxy",
"=",
"self",
".",
"__proxy__",
".",
"count_ngrams",
"(",
"n",
",",
"options",
")",
")",
"elif",
"method",
"==",
"\"character\"",
":",
"with",
"cython_context",
"(",
")",
":",
"return",
"SArray",
"(",
"_proxy",
"=",
"self",
".",
"__proxy__",
".",
"count_character_ngrams",
"(",
"n",
",",
"options",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Invalid 'method' input value. Please input either 'word' or 'character' \"",
")"
] | https://github.com/turi-code/SFrame/blob/796b9bdfb2fa1b881d82080754643c7e68629cd2/oss_src/unity/python/sframe/data_structures/sarray.py#L1534-L1568 |
||
miyosuda/TensorFlowAndroidDemo | 35903e0221aa5f109ea2dbef27f20b52e317f42d | jni-build/jni/include/tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py | python | average_gradients | (tower_grads) | return average_grads | Calculate the average gradient for each shared variable across all towers.
Note that this function provides a synchronization point across all towers.
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer list
is over individual gradients. The inner list is over the gradient
calculation for each tower.
Returns:
List of pairs of (gradient, variable) where the gradient has been averaged
across all towers. | Calculate the average gradient for each shared variable across all towers. | [
"Calculate",
"the",
"average",
"gradient",
"for",
"each",
"shared",
"variable",
"across",
"all",
"towers",
"."
] | def average_gradients(tower_grads):
"""Calculate the average gradient for each shared variable across all towers.
Note that this function provides a synchronization point across all towers.
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer list
is over individual gradients. The inner list is over the gradient
calculation for each tower.
Returns:
List of pairs of (gradient, variable) where the gradient has been averaged
across all towers.
"""
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
# Add 0 dimension to the gradients to represent the tower.
expanded_g = tf.expand_dims(g, 0)
# Append on a 'tower' dimension which we will average over below.
grads.append(expanded_g)
# Average over the 'tower' dimension.
grad = tf.concat(0, grads)
grad = tf.reduce_mean(grad, 0)
# Keep in mind that the Variables are redundant because they are shared
# across towers. So .. we will just return the first tower's pointer to
# the Variable.
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads | [
"def",
"average_gradients",
"(",
"tower_grads",
")",
":",
"average_grads",
"=",
"[",
"]",
"for",
"grad_and_vars",
"in",
"zip",
"(",
"*",
"tower_grads",
")",
":",
"# Note that each grad_and_vars looks like the following:",
"# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))",
"grads",
"=",
"[",
"]",
"for",
"g",
",",
"_",
"in",
"grad_and_vars",
":",
"# Add 0 dimension to the gradients to represent the tower.",
"expanded_g",
"=",
"tf",
".",
"expand_dims",
"(",
"g",
",",
"0",
")",
"# Append on a 'tower' dimension which we will average over below.",
"grads",
".",
"append",
"(",
"expanded_g",
")",
"# Average over the 'tower' dimension.",
"grad",
"=",
"tf",
".",
"concat",
"(",
"0",
",",
"grads",
")",
"grad",
"=",
"tf",
".",
"reduce_mean",
"(",
"grad",
",",
"0",
")",
"# Keep in mind that the Variables are redundant because they are shared",
"# across towers. So .. we will just return the first tower's pointer to",
"# the Variable.",
"v",
"=",
"grad_and_vars",
"[",
"0",
"]",
"[",
"1",
"]",
"grad_and_var",
"=",
"(",
"grad",
",",
"v",
")",
"average_grads",
".",
"append",
"(",
"grad_and_var",
")",
"return",
"average_grads"
] | https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/models/image/cifar10/cifar10_multi_gpu_train.py#L110-L145 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/ttk.py | python | Treeview.identify_column | (self, x) | return self.identify("column", x, 0) | Returns the data column identifier of the cell at position x.
The tree column has ID #0. | Returns the data column identifier of the cell at position x. | [
"Returns",
"the",
"data",
"column",
"identifier",
"of",
"the",
"cell",
"at",
"position",
"x",
"."
] | def identify_column(self, x):
"""Returns the data column identifier of the cell at position x.
The tree column has ID #0."""
return self.identify("column", x, 0) | [
"def",
"identify_column",
"(",
"self",
",",
"x",
")",
":",
"return",
"self",
".",
"identify",
"(",
"\"column\"",
",",
"x",
",",
"0",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/ttk.py#L1321-L1325 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/logging/__init__.py | python | setLoggerClass | (klass) | Set the class to be used when instantiating a logger. The class should
define __init__() such that only a name argument is required, and the
__init__() should call Logger.__init__() | Set the class to be used when instantiating a logger. The class should
define __init__() such that only a name argument is required, and the
__init__() should call Logger.__init__() | [
"Set",
"the",
"class",
"to",
"be",
"used",
"when",
"instantiating",
"a",
"logger",
".",
"The",
"class",
"should",
"define",
"__init__",
"()",
"such",
"that",
"only",
"a",
"name",
"argument",
"is",
"required",
"and",
"the",
"__init__",
"()",
"should",
"call",
"Logger",
".",
"__init__",
"()"
] | def setLoggerClass(klass):
"""
Set the class to be used when instantiating a logger. The class should
define __init__() such that only a name argument is required, and the
__init__() should call Logger.__init__()
"""
if klass != Logger:
if not issubclass(klass, Logger):
raise TypeError("logger not derived from logging.Logger: "
+ klass.__name__)
global _loggerClass
_loggerClass = klass | [
"def",
"setLoggerClass",
"(",
"klass",
")",
":",
"if",
"klass",
"!=",
"Logger",
":",
"if",
"not",
"issubclass",
"(",
"klass",
",",
"Logger",
")",
":",
"raise",
"TypeError",
"(",
"\"logger not derived from logging.Logger: \"",
"+",
"klass",
".",
"__name__",
")",
"global",
"_loggerClass",
"_loggerClass",
"=",
"klass"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/logging/__init__.py#L972-L983 |
||
rapidsai/cudf | d5b2448fc69f17509304d594f029d0df56984962 | python/cudf/cudf/core/frame.py | python | Frame.mod | (self, other, axis, level=None, fill_value=None) | return self._binaryop(other, "mod", fill_value) | Get Modulo division of dataframe or series and other, element-wise
(binary operator `mod`).
Equivalent to ``frame % other``, but with support to substitute a
fill_value for missing data in one of the inputs. With reverse
version, `rmod`.
Parameters
----------
other : scalar, sequence, Series, or DataFrame
Any single or multiple element data structure, or list-like object.
axis : int or string
Only ``0`` is supported for series, ``1`` or ``columns`` supported
for dataframe
fill_value : float or None, default None
Fill existing missing (NaN) values, and any new element needed
for successful DataFrame alignment, with this value before
computation. If data in both corresponding DataFrame locations
is missing the result will be missing.
Returns
-------
DataFrame or Series
Result of the arithmetic operation.
Examples
--------
**DataFrame**
>>> import cudf
>>> df = cudf.DataFrame({'angles': [0, 3, 4],
... 'degrees': [360, 180, 360]},
... index=['circle', 'triangle', 'rectangle'])
>>> df % 100
angles degrees
circle 0 60
triangle 3 80
rectangle 4 60
>>> df.mod(100)
angles degrees
circle 0 60
triangle 3 80
rectangle 4 60
**Series**
>>> import cudf
>>> series = cudf.Series([10, 20, 30])
>>> series
0 10
1 20
2 30
dtype: int64
>>> series.mod(4)
0 2
1 0
2 2
dtype: int64 | Get Modulo division of dataframe or series and other, element-wise
(binary operator `mod`). | [
"Get",
"Modulo",
"division",
"of",
"dataframe",
"or",
"series",
"and",
"other",
"element",
"-",
"wise",
"(",
"binary",
"operator",
"mod",
")",
"."
] | def mod(self, other, axis, level=None, fill_value=None):
"""
Get Modulo division of dataframe or series and other, element-wise
(binary operator `mod`).
Equivalent to ``frame % other``, but with support to substitute a
fill_value for missing data in one of the inputs. With reverse
version, `rmod`.
Parameters
----------
other : scalar, sequence, Series, or DataFrame
Any single or multiple element data structure, or list-like object.
axis : int or string
Only ``0`` is supported for series, ``1`` or ``columns`` supported
for dataframe
fill_value : float or None, default None
Fill existing missing (NaN) values, and any new element needed
for successful DataFrame alignment, with this value before
computation. If data in both corresponding DataFrame locations
is missing the result will be missing.
Returns
-------
DataFrame or Series
Result of the arithmetic operation.
Examples
--------
**DataFrame**
>>> import cudf
>>> df = cudf.DataFrame({'angles': [0, 3, 4],
... 'degrees': [360, 180, 360]},
... index=['circle', 'triangle', 'rectangle'])
>>> df % 100
angles degrees
circle 0 60
triangle 3 80
rectangle 4 60
>>> df.mod(100)
angles degrees
circle 0 60
triangle 3 80
rectangle 4 60
**Series**
>>> import cudf
>>> series = cudf.Series([10, 20, 30])
>>> series
0 10
1 20
2 30
dtype: int64
>>> series.mod(4)
0 2
1 0
2 2
dtype: int64
"""
if level is not None:
raise NotImplementedError("level parameter is not supported yet.")
return self._binaryop(other, "mod", fill_value) | [
"def",
"mod",
"(",
"self",
",",
"other",
",",
"axis",
",",
"level",
"=",
"None",
",",
"fill_value",
"=",
"None",
")",
":",
"if",
"level",
"is",
"not",
"None",
":",
"raise",
"NotImplementedError",
"(",
"\"level parameter is not supported yet.\"",
")",
"return",
"self",
".",
"_binaryop",
"(",
"other",
",",
"\"mod\"",
",",
"fill_value",
")"
] | https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/frame.py#L5341-L5409 |
|
CNugteren/CLBlast | 4500a03440e2cc54998c0edab366babf5e504d67 | scripts/generator/generator/routine.py | python | Routine.arguments_def_netlib | (self, flavour) | return result | As above, but for the Netlib CBLAS API | As above, but for the Netlib CBLAS API | [
"As",
"above",
"but",
"for",
"the",
"Netlib",
"CBLAS",
"API"
] | def arguments_def_netlib(self, flavour):
"""As above, but for the Netlib CBLAS API"""
result=(self.options_def_c() + self.sizes_def_netlib() +
self.scalar_def_void("alpha", flavour) +
list(chain(*[self.buffer_def_pointer(b, flavour) for b in self.buffers_first()])) +
self.scalar_def_void("beta", flavour) +
list(chain(*[self.buffer_def_pointer(b, flavour) for b in self.buffers_second()])) +
list(chain(*[self.buffer_def_pointer(b, flavour) for b in self.scalar_buffers_second()])) +
list(chain(*[self.scalar_def(s, flavour) for s in self.other_scalars()])))
if self.name in self.routines_scalar_no_return():
result += list(chain(*[self.buffer_def_pointer(b, flavour) for b in self.scalar_buffers_first()]))
result += self.batch_count_def()
return result | [
"def",
"arguments_def_netlib",
"(",
"self",
",",
"flavour",
")",
":",
"result",
"=",
"(",
"self",
".",
"options_def_c",
"(",
")",
"+",
"self",
".",
"sizes_def_netlib",
"(",
")",
"+",
"self",
".",
"scalar_def_void",
"(",
"\"alpha\"",
",",
"flavour",
")",
"+",
"list",
"(",
"chain",
"(",
"*",
"[",
"self",
".",
"buffer_def_pointer",
"(",
"b",
",",
"flavour",
")",
"for",
"b",
"in",
"self",
".",
"buffers_first",
"(",
")",
"]",
")",
")",
"+",
"self",
".",
"scalar_def_void",
"(",
"\"beta\"",
",",
"flavour",
")",
"+",
"list",
"(",
"chain",
"(",
"*",
"[",
"self",
".",
"buffer_def_pointer",
"(",
"b",
",",
"flavour",
")",
"for",
"b",
"in",
"self",
".",
"buffers_second",
"(",
")",
"]",
")",
")",
"+",
"list",
"(",
"chain",
"(",
"*",
"[",
"self",
".",
"buffer_def_pointer",
"(",
"b",
",",
"flavour",
")",
"for",
"b",
"in",
"self",
".",
"scalar_buffers_second",
"(",
")",
"]",
")",
")",
"+",
"list",
"(",
"chain",
"(",
"*",
"[",
"self",
".",
"scalar_def",
"(",
"s",
",",
"flavour",
")",
"for",
"s",
"in",
"self",
".",
"other_scalars",
"(",
")",
"]",
")",
")",
")",
"if",
"self",
".",
"name",
"in",
"self",
".",
"routines_scalar_no_return",
"(",
")",
":",
"result",
"+=",
"list",
"(",
"chain",
"(",
"*",
"[",
"self",
".",
"buffer_def_pointer",
"(",
"b",
",",
"flavour",
")",
"for",
"b",
"in",
"self",
".",
"scalar_buffers_first",
"(",
")",
"]",
")",
")",
"result",
"+=",
"self",
".",
"batch_count_def",
"(",
")",
"return",
"result"
] | https://github.com/CNugteren/CLBlast/blob/4500a03440e2cc54998c0edab366babf5e504d67/scripts/generator/generator/routine.py#L741-L753 |
|
tomahawk-player/tomahawk-resolvers | 7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d | archive/spotify/breakpad/third_party/protobuf/protobuf/python/mox.py | python | MockObject.__init__ | (self, class_to_mock) | Initialize a mock object.
This determines the methods and properties of the class and stores them.
Args:
# class_to_mock: class to be mocked
class_to_mock: class | Initialize a mock object. | [
"Initialize",
"a",
"mock",
"object",
"."
] | def __init__(self, class_to_mock):
"""Initialize a mock object.
This determines the methods and properties of the class and stores them.
Args:
# class_to_mock: class to be mocked
class_to_mock: class
"""
# This is used to hack around the mixin/inheritance of MockAnything, which
# is not a proper object (it can be anything. :-)
MockAnything.__dict__['__init__'](self)
# Get a list of all the public and special methods we should mock.
self._known_methods = set()
self._known_vars = set()
self._class_to_mock = class_to_mock
for method in dir(class_to_mock):
if callable(getattr(class_to_mock, method)):
self._known_methods.add(method)
else:
self._known_vars.add(method) | [
"def",
"__init__",
"(",
"self",
",",
"class_to_mock",
")",
":",
"# This is used to hack around the mixin/inheritance of MockAnything, which",
"# is not a proper object (it can be anything. :-)",
"MockAnything",
".",
"__dict__",
"[",
"'__init__'",
"]",
"(",
"self",
")",
"# Get a list of all the public and special methods we should mock.",
"self",
".",
"_known_methods",
"=",
"set",
"(",
")",
"self",
".",
"_known_vars",
"=",
"set",
"(",
")",
"self",
".",
"_class_to_mock",
"=",
"class_to_mock",
"for",
"method",
"in",
"dir",
"(",
"class_to_mock",
")",
":",
"if",
"callable",
"(",
"getattr",
"(",
"class_to_mock",
",",
"method",
")",
")",
":",
"self",
".",
"_known_methods",
".",
"add",
"(",
"method",
")",
"else",
":",
"self",
".",
"_known_vars",
".",
"add",
"(",
"method",
")"
] | https://github.com/tomahawk-player/tomahawk-resolvers/blob/7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d/archive/spotify/breakpad/third_party/protobuf/protobuf/python/mox.py#L362-L384 |
||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/auto_bisect/bisect_perf_regression.py | python | BisectPerformanceMetrics.CheckIfRevisionsInProperOrder | (
self, target_depot, good_revision, bad_revision) | return good_position <= bad_position | Checks that |good_revision| is an earlier revision than |bad_revision|.
Args:
good_revision: Number/tag of the known good revision.
bad_revision: Number/tag of the known bad revision.
Returns:
True if the revisions are in the proper order (good earlier than bad). | Checks that |good_revision| is an earlier revision than |bad_revision|. | [
"Checks",
"that",
"|good_revision|",
"is",
"an",
"earlier",
"revision",
"than",
"|bad_revision|",
"."
] | def CheckIfRevisionsInProperOrder(
self, target_depot, good_revision, bad_revision):
"""Checks that |good_revision| is an earlier revision than |bad_revision|.
Args:
good_revision: Number/tag of the known good revision.
bad_revision: Number/tag of the known bad revision.
Returns:
True if the revisions are in the proper order (good earlier than bad).
"""
cwd = self.depot_registry.GetDepotDir(target_depot)
good_position = source_control.GetCommitPosition(good_revision, cwd)
bad_position = source_control.GetCommitPosition(bad_revision, cwd)
# Compare commit timestamp for repos that don't support commit position.
if not (bad_position and good_position):
logging.info('Could not get commit positions for revisions %s and %s in '
'depot %s', good_position, bad_position, target_depot)
good_position = source_control.GetCommitTime(good_revision, cwd=cwd)
bad_position = source_control.GetCommitTime(bad_revision, cwd=cwd)
return good_position <= bad_position | [
"def",
"CheckIfRevisionsInProperOrder",
"(",
"self",
",",
"target_depot",
",",
"good_revision",
",",
"bad_revision",
")",
":",
"cwd",
"=",
"self",
".",
"depot_registry",
".",
"GetDepotDir",
"(",
"target_depot",
")",
"good_position",
"=",
"source_control",
".",
"GetCommitPosition",
"(",
"good_revision",
",",
"cwd",
")",
"bad_position",
"=",
"source_control",
".",
"GetCommitPosition",
"(",
"bad_revision",
",",
"cwd",
")",
"# Compare commit timestamp for repos that don't support commit position.",
"if",
"not",
"(",
"bad_position",
"and",
"good_position",
")",
":",
"logging",
".",
"info",
"(",
"'Could not get commit positions for revisions %s and %s in '",
"'depot %s'",
",",
"good_position",
",",
"bad_position",
",",
"target_depot",
")",
"good_position",
"=",
"source_control",
".",
"GetCommitTime",
"(",
"good_revision",
",",
"cwd",
"=",
"cwd",
")",
"bad_position",
"=",
"source_control",
".",
"GetCommitTime",
"(",
"bad_revision",
",",
"cwd",
"=",
"cwd",
")",
"return",
"good_position",
"<=",
"bad_position"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/auto_bisect/bisect_perf_regression.py#L1974-L1995 |
|
jolibrain/deepdetect | 9bc840f0b1055426670d64b5285701d6faceabb9 | demo/imgsearch/dd_client.py | python | LOG | (msg) | Output a log message. | Output a log message. | [
"Output",
"a",
"log",
"message",
"."
] | def LOG(msg):
"""Output a log message."""
# XXX: may want to use python log manager classes instead of this stupid print
if VERBOSE:
msg = str(datetime.datetime.now()) + ' ' + msg
print (msg) | [
"def",
"LOG",
"(",
"msg",
")",
":",
"# XXX: may want to use python log manager classes instead of this stupid print",
"if",
"VERBOSE",
":",
"msg",
"=",
"str",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
")",
"+",
"' '",
"+",
"msg",
"print",
"(",
"msg",
")"
] | https://github.com/jolibrain/deepdetect/blob/9bc840f0b1055426670d64b5285701d6faceabb9/demo/imgsearch/dd_client.py#L35-L40 |
||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tix.py | python | TixWidget._subwidget_name | (self,name) | Get a subwidget name (returns a String, not a Widget !) | Get a subwidget name (returns a String, not a Widget !) | [
"Get",
"a",
"subwidget",
"name",
"(",
"returns",
"a",
"String",
"not",
"a",
"Widget",
"!",
")"
] | def _subwidget_name(self,name):
"""Get a subwidget name (returns a String, not a Widget !)"""
try:
return self.tk.call(self._w, 'subwidget', name)
except TclError:
return None | [
"def",
"_subwidget_name",
"(",
"self",
",",
"name",
")",
":",
"try",
":",
"return",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'subwidget'",
",",
"name",
")",
"except",
"TclError",
":",
"return",
"None"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tix.py#L372-L377 |
||
Tencent/CMONGO | c40380caa14e05509f46993aa8b8da966b09b0b5 | src/third_party/mozjs-38/extract/js/src/jit/arm/gen-double-encoder-table.py | python | encodeDouble | (value) | return (a << 31) | (B << 30) | (rep(b, 8) << 22) | cdefgh << 16 | Generate an ARM ARM 'VFP modified immediate constant' with format:
aBbbbbbb bbcdefgh 000...
We will return the top 32 bits of the double; the rest are 0. | Generate an ARM ARM 'VFP modified immediate constant' with format:
aBbbbbbb bbcdefgh 000... | [
"Generate",
"an",
"ARM",
"ARM",
"VFP",
"modified",
"immediate",
"constant",
"with",
"format",
":",
"aBbbbbbb",
"bbcdefgh",
"000",
"..."
] | def encodeDouble(value):
"""Generate an ARM ARM 'VFP modified immediate constant' with format:
aBbbbbbb bbcdefgh 000...
We will return the top 32 bits of the double; the rest are 0."""
assert (0 <= value) and (value <= 255)
a = value >> 7
b = (value >> 6) & 1
B = int(b == 0)
cdefgh = value & 0x3f
return (a << 31) | (B << 30) | (rep(b, 8) << 22) | cdefgh << 16 | [
"def",
"encodeDouble",
"(",
"value",
")",
":",
"assert",
"(",
"0",
"<=",
"value",
")",
"and",
"(",
"value",
"<=",
"255",
")",
"a",
"=",
"value",
">>",
"7",
"b",
"=",
"(",
"value",
">>",
"6",
")",
"&",
"1",
"B",
"=",
"int",
"(",
"b",
"==",
"0",
")",
"cdefgh",
"=",
"value",
"&",
"0x3f",
"return",
"(",
"a",
"<<",
"31",
")",
"|",
"(",
"B",
"<<",
"30",
")",
"|",
"(",
"rep",
"(",
"b",
",",
"8",
")",
"<<",
"22",
")",
"|",
"cdefgh",
"<<",
"16"
] | https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/src/third_party/mozjs-38/extract/js/src/jit/arm/gen-double-encoder-table.py#L18-L28 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/_abcoll.py | python | Mapping.itervalues | (self) | D.itervalues() -> an iterator over the values of D | D.itervalues() -> an iterator over the values of D | [
"D",
".",
"itervalues",
"()",
"-",
">",
"an",
"iterator",
"over",
"the",
"values",
"of",
"D"
] | def itervalues(self):
'D.itervalues() -> an iterator over the values of D'
for key in self:
yield self[key] | [
"def",
"itervalues",
"(",
"self",
")",
":",
"for",
"key",
"in",
"self",
":",
"yield",
"self",
"[",
"key",
"]"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/_abcoll.py#L398-L401 |
||
kushview/Element | 1cc16380caa2ab79461246ba758b9de1f46db2a5 | waflib/extras/doxygen.py | python | configure | (conf) | Check if doxygen and tar commands are present in the system
If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
variables will be set. Detection can be controlled by setting DOXYGEN and
TAR environmental variables. | Check if doxygen and tar commands are present in the system | [
"Check",
"if",
"doxygen",
"and",
"tar",
"commands",
"are",
"present",
"in",
"the",
"system"
] | def configure(conf):
'''
Check if doxygen and tar commands are present in the system
If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
variables will be set. Detection can be controlled by setting DOXYGEN and
TAR environmental variables.
'''
conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
conf.find_program('tar', var='TAR', mandatory=False) | [
"def",
"configure",
"(",
"conf",
")",
":",
"conf",
".",
"find_program",
"(",
"'doxygen'",
",",
"var",
"=",
"'DOXYGEN'",
",",
"mandatory",
"=",
"False",
")",
"conf",
".",
"find_program",
"(",
"'tar'",
",",
"var",
"=",
"'TAR'",
",",
"mandatory",
"=",
"False",
")"
] | https://github.com/kushview/Element/blob/1cc16380caa2ab79461246ba758b9de1f46db2a5/waflib/extras/doxygen.py#L226-L236 |
||
Tencent/CMONGO | c40380caa14e05509f46993aa8b8da966b09b0b5 | src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Node/FS.py | python | Dir._create | (self) | Create this directory, silently and without worrying about
whether the builder is the default or not. | Create this directory, silently and without worrying about
whether the builder is the default or not. | [
"Create",
"this",
"directory",
"silently",
"and",
"without",
"worrying",
"about",
"whether",
"the",
"builder",
"is",
"the",
"default",
"or",
"not",
"."
] | def _create(self):
"""Create this directory, silently and without worrying about
whether the builder is the default or not."""
listDirs = []
parent = self
while parent:
if parent.exists():
break
listDirs.append(parent)
p = parent.up()
if p is None:
# Don't use while: - else: for this condition because
# if so, then parent is None and has no .path attribute.
raise SCons.Errors.StopError(parent._path)
parent = p
listDirs.reverse()
for dirnode in listDirs:
try:
# Don't call dirnode.build(), call the base Node method
# directly because we definitely *must* create this
# directory. The dirnode.build() method will suppress
# the build if it's the default builder.
SCons.Node.Node.build(dirnode)
dirnode.get_executor().nullify()
# The build() action may or may not have actually
# created the directory, depending on whether the -n
# option was used or not. Delete the _exists and
# _rexists attributes so they can be reevaluated.
dirnode.clear()
except OSError:
pass | [
"def",
"_create",
"(",
"self",
")",
":",
"listDirs",
"=",
"[",
"]",
"parent",
"=",
"self",
"while",
"parent",
":",
"if",
"parent",
".",
"exists",
"(",
")",
":",
"break",
"listDirs",
".",
"append",
"(",
"parent",
")",
"p",
"=",
"parent",
".",
"up",
"(",
")",
"if",
"p",
"is",
"None",
":",
"# Don't use while: - else: for this condition because",
"# if so, then parent is None and has no .path attribute.",
"raise",
"SCons",
".",
"Errors",
".",
"StopError",
"(",
"parent",
".",
"_path",
")",
"parent",
"=",
"p",
"listDirs",
".",
"reverse",
"(",
")",
"for",
"dirnode",
"in",
"listDirs",
":",
"try",
":",
"# Don't call dirnode.build(), call the base Node method",
"# directly because we definitely *must* create this",
"# directory. The dirnode.build() method will suppress",
"# the build if it's the default builder.",
"SCons",
".",
"Node",
".",
"Node",
".",
"build",
"(",
"dirnode",
")",
"dirnode",
".",
"get_executor",
"(",
")",
".",
"nullify",
"(",
")",
"# The build() action may or may not have actually",
"# created the directory, depending on whether the -n",
"# option was used or not. Delete the _exists and",
"# _rexists attributes so they can be reevaluated.",
"dirnode",
".",
"clear",
"(",
")",
"except",
"OSError",
":",
"pass"
] | https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Node/FS.py#L1807-L1837 |
||
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/framework/traceable_stack.py | python | TraceableObject.set_filename_and_line_from_caller | (self, offset=0) | Set filename and line using the caller's stack frame.
If the requested stack information is not available, a heuristic may
be applied and self.HEURISTIC USED will be returned. If the heuristic
fails then no change will be made to the filename and lineno members
(None by default) and self.FAILURE will be returned.
Args:
offset: Integer. If 0, the caller's stack frame is used. If 1,
the caller's caller's stack frame is used. Larger values are
permissible but if out-of-range (larger than the number of stack
frames available) the outermost stack frame will be used.
Returns:
TraceableObject.SUCCESS if appropriate stack information was found,
TraceableObject.HEURISTIC_USED if the offset was larger than the stack,
and TraceableObject.FAILURE if the stack was empty. | Set filename and line using the caller's stack frame. | [
"Set",
"filename",
"and",
"line",
"using",
"the",
"caller",
"s",
"stack",
"frame",
"."
] | def set_filename_and_line_from_caller(self, offset=0):
"""Set filename and line using the caller's stack frame.
If the requested stack information is not available, a heuristic may
be applied and self.HEURISTIC USED will be returned. If the heuristic
fails then no change will be made to the filename and lineno members
(None by default) and self.FAILURE will be returned.
Args:
offset: Integer. If 0, the caller's stack frame is used. If 1,
the caller's caller's stack frame is used. Larger values are
permissible but if out-of-range (larger than the number of stack
frames available) the outermost stack frame will be used.
Returns:
TraceableObject.SUCCESS if appropriate stack information was found,
TraceableObject.HEURISTIC_USED if the offset was larger than the stack,
and TraceableObject.FAILURE if the stack was empty.
"""
# Offset is defined in "Args" as relative to the caller. We are one frame
# beyond the caller.
local_offset = offset + 1
frame_records = tf_stack.extract_stack(
limit=local_offset + 1)
if not frame_records:
return self.FAILURE
if len(frame_records) > local_offset:
frame = frame_records[len(frame_records) - (local_offset + 1)]
self.filename = frame.filename
self.lineno = frame.lineno
return self.SUCCESS
else:
# If the offset is too large then we use the largest offset possible,
# meaning we use the outermost stack frame at index 0.
frame = frame_records[0]
self.filename = frame.filename
self.lineno = frame.lineno
return self.HEURISTIC_USED | [
"def",
"set_filename_and_line_from_caller",
"(",
"self",
",",
"offset",
"=",
"0",
")",
":",
"# Offset is defined in \"Args\" as relative to the caller. We are one frame",
"# beyond the caller.",
"local_offset",
"=",
"offset",
"+",
"1",
"frame_records",
"=",
"tf_stack",
".",
"extract_stack",
"(",
"limit",
"=",
"local_offset",
"+",
"1",
")",
"if",
"not",
"frame_records",
":",
"return",
"self",
".",
"FAILURE",
"if",
"len",
"(",
"frame_records",
")",
">",
"local_offset",
":",
"frame",
"=",
"frame_records",
"[",
"len",
"(",
"frame_records",
")",
"-",
"(",
"local_offset",
"+",
"1",
")",
"]",
"self",
".",
"filename",
"=",
"frame",
".",
"filename",
"self",
".",
"lineno",
"=",
"frame",
".",
"lineno",
"return",
"self",
".",
"SUCCESS",
"else",
":",
"# If the offset is too large then we use the largest offset possible,",
"# meaning we use the outermost stack frame at index 0.",
"frame",
"=",
"frame_records",
"[",
"0",
"]",
"self",
".",
"filename",
"=",
"frame",
".",
"filename",
"self",
".",
"lineno",
"=",
"frame",
".",
"lineno",
"return",
"self",
".",
"HEURISTIC_USED"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/framework/traceable_stack.py#L35-L73 |
||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/training/rmsprop.py | python | RMSPropOptimizer.__init__ | (self,
learning_rate,
decay=0.9,
momentum=0.0,
epsilon=1e-10,
use_locking=False,
centered=False,
name="RMSProp") | Construct a new RMSProp optimizer.
Note that in the dense implementation of this algorithm, variables and their
corresponding accumulators (momentum, gradient moving average, square
gradient moving average) will be updated even if the gradient is zero
(i.e. accumulators will decay, momentum will be applied). The sparse
implementation (used when the gradient is an `IndexedSlices` object,
typically because of `tf.gather` or an embedding lookup in the forward pass)
will not update variable slices or their accumulators unless those slices
were used in the forward pass (nor is there an "eventual" correction to
account for these omitted updates). This leads to more efficient updates for
large embedding lookup tables (where most of the slices are not accessed in
a particular graph execution), but differs from the published algorithm.
Args:
learning_rate: A Tensor or a floating point value. The learning rate.
decay: Discounting factor for the history/coming gradient
momentum: A scalar tensor.
epsilon: Small value to avoid zero denominator.
use_locking: If True use locks for update operation.
centered: If True, gradients are normalized by the estimated variance of
the gradient; if False, by the uncentered second moment. Setting this to
True may help with training, but is slightly more expensive in terms of
computation and memory. Defaults to False.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "RMSProp". | Construct a new RMSProp optimizer. | [
"Construct",
"a",
"new",
"RMSProp",
"optimizer",
"."
] | def __init__(self,
learning_rate,
decay=0.9,
momentum=0.0,
epsilon=1e-10,
use_locking=False,
centered=False,
name="RMSProp"):
"""Construct a new RMSProp optimizer.
Note that in the dense implementation of this algorithm, variables and their
corresponding accumulators (momentum, gradient moving average, square
gradient moving average) will be updated even if the gradient is zero
(i.e. accumulators will decay, momentum will be applied). The sparse
implementation (used when the gradient is an `IndexedSlices` object,
typically because of `tf.gather` or an embedding lookup in the forward pass)
will not update variable slices or their accumulators unless those slices
were used in the forward pass (nor is there an "eventual" correction to
account for these omitted updates). This leads to more efficient updates for
large embedding lookup tables (where most of the slices are not accessed in
a particular graph execution), but differs from the published algorithm.
Args:
learning_rate: A Tensor or a floating point value. The learning rate.
decay: Discounting factor for the history/coming gradient
momentum: A scalar tensor.
epsilon: Small value to avoid zero denominator.
use_locking: If True use locks for update operation.
centered: If True, gradients are normalized by the estimated variance of
the gradient; if False, by the uncentered second moment. Setting this to
True may help with training, but is slightly more expensive in terms of
computation and memory. Defaults to False.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "RMSProp".
"""
super(RMSPropOptimizer, self).__init__(use_locking, name)
self._learning_rate = learning_rate
self._decay = decay
self._momentum = momentum
self._epsilon = epsilon
self._centered = centered
# Tensors for learning rate and momentum. Created in _prepare.
self._learning_rate_tensor = None
self._decay_tensor = None
self._momentum_tensor = None
self._epsilon_tensor = None | [
"def",
"__init__",
"(",
"self",
",",
"learning_rate",
",",
"decay",
"=",
"0.9",
",",
"momentum",
"=",
"0.0",
",",
"epsilon",
"=",
"1e-10",
",",
"use_locking",
"=",
"False",
",",
"centered",
"=",
"False",
",",
"name",
"=",
"\"RMSProp\"",
")",
":",
"super",
"(",
"RMSPropOptimizer",
",",
"self",
")",
".",
"__init__",
"(",
"use_locking",
",",
"name",
")",
"self",
".",
"_learning_rate",
"=",
"learning_rate",
"self",
".",
"_decay",
"=",
"decay",
"self",
".",
"_momentum",
"=",
"momentum",
"self",
".",
"_epsilon",
"=",
"epsilon",
"self",
".",
"_centered",
"=",
"centered",
"# Tensors for learning rate and momentum. Created in _prepare.",
"self",
".",
"_learning_rate_tensor",
"=",
"None",
"self",
".",
"_decay_tensor",
"=",
"None",
"self",
".",
"_momentum_tensor",
"=",
"None",
"self",
".",
"_epsilon_tensor",
"=",
"None"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/training/rmsprop.py#L58-L104 |
||
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/lib/grpc_debug_server.py | python | EventListenerBaseServicer.request_unwatch | (self, node_name, output_slot, debug_op) | Request disabling a debug tensor watchpoint or breakpoint.
This is the opposite of `request_watch()`.
Args:
node_name: (`str`) name of the node that the to-be-watched tensor belongs
to, e.g., "hidden/Weights".
output_slot: (`int`) output slot index of the tensor to watch.
debug_op: (`str`) name of the debug op to enable. This should not include
any attribute substrings. | Request disabling a debug tensor watchpoint or breakpoint. | [
"Request",
"disabling",
"a",
"debug",
"tensor",
"watchpoint",
"or",
"breakpoint",
"."
] | def request_unwatch(self, node_name, output_slot, debug_op):
"""Request disabling a debug tensor watchpoint or breakpoint.
This is the opposite of `request_watch()`.
Args:
node_name: (`str`) name of the node that the to-be-watched tensor belongs
to, e.g., "hidden/Weights".
output_slot: (`int`) output slot index of the tensor to watch.
debug_op: (`str`) name of the debug op to enable. This should not include
any attribute substrings.
"""
self._debug_ops_state_change_queue.put(
_state_change(
debug_service_pb2.EventReply.DebugOpStateChange.DISABLED, node_name,
output_slot, debug_op)) | [
"def",
"request_unwatch",
"(",
"self",
",",
"node_name",
",",
"output_slot",
",",
"debug_op",
")",
":",
"self",
".",
"_debug_ops_state_change_queue",
".",
"put",
"(",
"_state_change",
"(",
"debug_service_pb2",
".",
"EventReply",
".",
"DebugOpStateChange",
".",
"DISABLED",
",",
"node_name",
",",
"output_slot",
",",
"debug_op",
")",
")"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/lib/grpc_debug_server.py#L423-L438 |
||
firebreath/FireBreath | d79534ef5a91d78debd7b102a640ff7857750e48 | fbgen/gen_templates.py | python | Base.updateCfg | (self, cfg) | Override in sub-classes. Updates a configuration object with current
values. | Override in sub-classes. Updates a configuration object with current
values. | [
"Override",
"in",
"sub",
"-",
"classes",
".",
"Updates",
"a",
"configuration",
"object",
"with",
"current",
"values",
"."
] | def updateCfg(self, cfg):
"""
Override in sub-classes. Updates a configuration object with current
values.
"""
pass | [
"def",
"updateCfg",
"(",
"self",
",",
"cfg",
")",
":",
"pass"
] | https://github.com/firebreath/FireBreath/blob/d79534ef5a91d78debd7b102a640ff7857750e48/fbgen/gen_templates.py#L122-L127 |
||
neoml-lib/neoml | a0d370fba05269a1b2258cef126f77bbd2054a3e | NeoML/Python/neoml/Dnn/Conv.py | python | TimeConv.filter_count | (self, new_filter_count) | Sets the number of filters. | Sets the number of filters. | [
"Sets",
"the",
"number",
"of",
"filters",
"."
] | def filter_count(self, new_filter_count):
"""Sets the number of filters.
"""
self._internal.set_filter_count(int(new_filter_count)) | [
"def",
"filter_count",
"(",
"self",
",",
"new_filter_count",
")",
":",
"self",
".",
"_internal",
".",
"set_filter_count",
"(",
"int",
"(",
"new_filter_count",
")",
")"
] | https://github.com/neoml-lib/neoml/blob/a0d370fba05269a1b2258cef126f77bbd2054a3e/NeoML/Python/neoml/Dnn/Conv.py#L1077-L1080 |
||
nlohmann/json | eb2182414749825be086c825edb5229e5c28503d | third_party/cpplint/cpplint.py | python | FilesBelongToSameModule | (filename_cc, filename_h) | return files_belong_to_same_module, common_path | Check if these two filenames belong to the same module.
The concept of a 'module' here is a as follows:
foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
same 'module' if they are in the same directory.
some/path/public/xyzzy and some/path/internal/xyzzy are also considered
to belong to the same module here.
If the filename_cc contains a longer path than the filename_h, for example,
'/absolute/path/to/base/sysinfo.cc', and this file would include
'base/sysinfo.h', this function also produces the prefix needed to open the
header. This is used by the caller of this function to more robustly open the
header file. We don't have access to the real include paths in this context,
so we need this guesswork here.
Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
according to this implementation. Because of this, this function gives
some false positives. This should be sufficiently rare in practice.
Args:
filename_cc: is the path for the source (e.g. .cc) file
filename_h: is the path for the header path
Returns:
Tuple with a bool and a string:
bool: True if filename_cc and filename_h belong to the same module.
string: the additional prefix needed to open the header file. | Check if these two filenames belong to the same module. | [
"Check",
"if",
"these",
"two",
"filenames",
"belong",
"to",
"the",
"same",
"module",
"."
] | def FilesBelongToSameModule(filename_cc, filename_h):
"""Check if these two filenames belong to the same module.
The concept of a 'module' here is a as follows:
foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
same 'module' if they are in the same directory.
some/path/public/xyzzy and some/path/internal/xyzzy are also considered
to belong to the same module here.
If the filename_cc contains a longer path than the filename_h, for example,
'/absolute/path/to/base/sysinfo.cc', and this file would include
'base/sysinfo.h', this function also produces the prefix needed to open the
header. This is used by the caller of this function to more robustly open the
header file. We don't have access to the real include paths in this context,
so we need this guesswork here.
Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
according to this implementation. Because of this, this function gives
some false positives. This should be sufficiently rare in practice.
Args:
filename_cc: is the path for the source (e.g. .cc) file
filename_h: is the path for the header path
Returns:
Tuple with a bool and a string:
bool: True if filename_cc and filename_h belong to the same module.
string: the additional prefix needed to open the header file.
"""
fileinfo_cc = FileInfo(filename_cc)
if not fileinfo_cc.Extension().lstrip('.') in GetNonHeaderExtensions():
return (False, '')
fileinfo_h = FileInfo(filename_h)
if not IsHeaderExtension(fileinfo_h.Extension().lstrip('.')):
return (False, '')
filename_cc = filename_cc[:-(len(fileinfo_cc.Extension()))]
matched_test_suffix = Search(_TEST_FILE_SUFFIX, fileinfo_cc.BaseName())
if matched_test_suffix:
filename_cc = filename_cc[:-len(matched_test_suffix.group(1))]
filename_cc = filename_cc.replace('/public/', '/')
filename_cc = filename_cc.replace('/internal/', '/')
filename_h = filename_h[:-(len(fileinfo_h.Extension()))]
if filename_h.endswith('-inl'):
filename_h = filename_h[:-len('-inl')]
filename_h = filename_h.replace('/public/', '/')
filename_h = filename_h.replace('/internal/', '/')
files_belong_to_same_module = filename_cc.endswith(filename_h)
common_path = ''
if files_belong_to_same_module:
common_path = filename_cc[:-len(filename_h)]
return files_belong_to_same_module, common_path | [
"def",
"FilesBelongToSameModule",
"(",
"filename_cc",
",",
"filename_h",
")",
":",
"fileinfo_cc",
"=",
"FileInfo",
"(",
"filename_cc",
")",
"if",
"not",
"fileinfo_cc",
".",
"Extension",
"(",
")",
".",
"lstrip",
"(",
"'.'",
")",
"in",
"GetNonHeaderExtensions",
"(",
")",
":",
"return",
"(",
"False",
",",
"''",
")",
"fileinfo_h",
"=",
"FileInfo",
"(",
"filename_h",
")",
"if",
"not",
"IsHeaderExtension",
"(",
"fileinfo_h",
".",
"Extension",
"(",
")",
".",
"lstrip",
"(",
"'.'",
")",
")",
":",
"return",
"(",
"False",
",",
"''",
")",
"filename_cc",
"=",
"filename_cc",
"[",
":",
"-",
"(",
"len",
"(",
"fileinfo_cc",
".",
"Extension",
"(",
")",
")",
")",
"]",
"matched_test_suffix",
"=",
"Search",
"(",
"_TEST_FILE_SUFFIX",
",",
"fileinfo_cc",
".",
"BaseName",
"(",
")",
")",
"if",
"matched_test_suffix",
":",
"filename_cc",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"matched_test_suffix",
".",
"group",
"(",
"1",
")",
")",
"]",
"filename_cc",
"=",
"filename_cc",
".",
"replace",
"(",
"'/public/'",
",",
"'/'",
")",
"filename_cc",
"=",
"filename_cc",
".",
"replace",
"(",
"'/internal/'",
",",
"'/'",
")",
"filename_h",
"=",
"filename_h",
"[",
":",
"-",
"(",
"len",
"(",
"fileinfo_h",
".",
"Extension",
"(",
")",
")",
")",
"]",
"if",
"filename_h",
".",
"endswith",
"(",
"'-inl'",
")",
":",
"filename_h",
"=",
"filename_h",
"[",
":",
"-",
"len",
"(",
"'-inl'",
")",
"]",
"filename_h",
"=",
"filename_h",
".",
"replace",
"(",
"'/public/'",
",",
"'/'",
")",
"filename_h",
"=",
"filename_h",
".",
"replace",
"(",
"'/internal/'",
",",
"'/'",
")",
"files_belong_to_same_module",
"=",
"filename_cc",
".",
"endswith",
"(",
"filename_h",
")",
"common_path",
"=",
"''",
"if",
"files_belong_to_same_module",
":",
"common_path",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"filename_h",
")",
"]",
"return",
"files_belong_to_same_module",
",",
"common_path"
] | https://github.com/nlohmann/json/blob/eb2182414749825be086c825edb5229e5c28503d/third_party/cpplint/cpplint.py#L5967-L6022 |
|
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | scripts/update_onnx_weight.py | python | update_onnx_initializer | (onnx_file, ckpt_file, output_file) | Update onnx initializer. | Update onnx initializer. | [
"Update",
"onnx",
"initializer",
"."
] | def update_onnx_initializer(onnx_file, ckpt_file, output_file):
"Update onnx initializer."
with open(onnx_file, 'rb') as f:
data = f.read()
model = onnx_pb.ModelProto()
model.ParseFromString(data)
initializer = model.graph.initializer
param_dict = load_checkpoint(ckpt_file)
for i, _ in enumerate(initializer):
item = initializer[i]
if not item.name in param_dict:
print(f"Warning: Can not find '{item.name}' in checkpoint parameters dictionary")
continue
weight = param_dict[item.name].data.asnumpy()
bin_data = weight.tobytes()
if len(item.raw_data) != len(bin_data):
print(f"Warning: Size of weight from checkpoint is different from original size, ignore it")
continue
item.raw_data = bin_data
pb_msg = model.SerializeToString()
with open(output_file, 'wb') as f:
f.write(pb_msg)
print(f'Graph name: {model.graph.name}')
print(f'Initializer length: {len(initializer)}')
print(f'Checkpoint dict length: {len(param_dict)}')
print(f'The new weights have been written to file {output_file} successfully') | [
"def",
"update_onnx_initializer",
"(",
"onnx_file",
",",
"ckpt_file",
",",
"output_file",
")",
":",
"with",
"open",
"(",
"onnx_file",
",",
"'rb'",
")",
"as",
"f",
":",
"data",
"=",
"f",
".",
"read",
"(",
")",
"model",
"=",
"onnx_pb",
".",
"ModelProto",
"(",
")",
"model",
".",
"ParseFromString",
"(",
"data",
")",
"initializer",
"=",
"model",
".",
"graph",
".",
"initializer",
"param_dict",
"=",
"load_checkpoint",
"(",
"ckpt_file",
")",
"for",
"i",
",",
"_",
"in",
"enumerate",
"(",
"initializer",
")",
":",
"item",
"=",
"initializer",
"[",
"i",
"]",
"if",
"not",
"item",
".",
"name",
"in",
"param_dict",
":",
"print",
"(",
"f\"Warning: Can not find '{item.name}' in checkpoint parameters dictionary\"",
")",
"continue",
"weight",
"=",
"param_dict",
"[",
"item",
".",
"name",
"]",
".",
"data",
".",
"asnumpy",
"(",
")",
"bin_data",
"=",
"weight",
".",
"tobytes",
"(",
")",
"if",
"len",
"(",
"item",
".",
"raw_data",
")",
"!=",
"len",
"(",
"bin_data",
")",
":",
"print",
"(",
"f\"Warning: Size of weight from checkpoint is different from original size, ignore it\"",
")",
"continue",
"item",
".",
"raw_data",
"=",
"bin_data",
"pb_msg",
"=",
"model",
".",
"SerializeToString",
"(",
")",
"with",
"open",
"(",
"output_file",
",",
"'wb'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"pb_msg",
")",
"print",
"(",
"f'Graph name: {model.graph.name}'",
")",
"print",
"(",
"f'Initializer length: {len(initializer)}'",
")",
"print",
"(",
"f'Checkpoint dict length: {len(param_dict)}'",
")",
"print",
"(",
"f'The new weights have been written to file {output_file} successfully'",
")"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/scripts/update_onnx_weight.py#L28-L56 |
||
CRYTEK/CRYENGINE | 232227c59a220cbbd311576f0fbeba7bb53b2a8c | Code/Tools/waf-1.7.13/crywaflib/msvs.py | python | msvs_generator.init | (self) | Some data that needs to be present | Some data that needs to be present | [
"Some",
"data",
"that",
"needs",
"to",
"be",
"present"
] | def init(self):
"""
Some data that needs to be present
"""
host = Utils.unversioned_sys_platform()
if host == 'linux' or host == 'darwin':
Logs.warn('Skipping MSVS project generation as host platform is not Windows')
return
# Remove unsupported MSBUILD platforms from list
strip_unsupported_msbuild_platforms(self)
# Detect the most recent nsight tegra version installed if any
detect_nsight_tegra_vs_plugin_version(self)
if not getattr(self, 'configurations', None):
build_configurations = self.get_supported_configurations()
self.configurations = []
for spec in self.loaded_specs():
if not is_valid_spec(self, spec):
continue
for conf in build_configurations:
if not is_valid_configuration(self ,spec, conf):
continue
solution_conf_name = '[' + self.convert_waf_spec_to_vs_spec(spec) + '] ' + conf
solution_conf_name_vs = self.convert_waf_configuration_to_vs_configuration( solution_conf_name )
self.configurations.append(solution_conf_name_vs)
if not getattr(self, 'platforms', None):
self.platforms = []
for platform in self.get_supported_platforms():
self.platforms.append(self.convert_waf_platform_to_vs_platform(platform))
if not getattr(self, 'all_projects', None):
self.all_projects = []
if not getattr(self, 'project_extension', None):
self.project_extension = '.vcxproj'
if not getattr(self, 'projects_dir', None):
self.projects_dir = self.srcnode.make_node('.depproj')
self.projects_dir.mkdir()
# bind the classes to the object, so that subclass can provide custom generators
if not getattr(self, 'vsnode_vsdir', None):
self.vsnode_vsdir = vsnode_vsdir
if not getattr(self, 'vsnode_target', None):
self.vsnode_target = vsnode_target
if not getattr(self, 'vsnode_android_package_target', None):
self.vsnode_android_package_target = vsnode_android_package_target
if not getattr(self, 'vsnode_build_all', None):
self.vsnode_build_all = vsnode_build_all
if not getattr(self, 'vsnode_install_all', None):
self.vsnode_install_all = vsnode_install_all
if not getattr(self, 'vsnode_project_view', None):
self.vsnode_project_view = vsnode_project_view
self.numver = '12.00'
self.vsver = '2012'
# Note: only vsver is relevant for Visual Studio version selector
# We select from the highest Visual C++ version in use in the current configuration
max_msvc_version = 0
for env in self.all_envs.values():
msvc_version = getattr(env, 'MSVC_VERSION', '')
if isinstance(msvc_version, basestring):
if float(msvc_version) > max_msvc_version:
max_msvc_version = float(msvc_version)
self.vsver = msvc_version | [
"def",
"init",
"(",
"self",
")",
":",
"host",
"=",
"Utils",
".",
"unversioned_sys_platform",
"(",
")",
"if",
"host",
"==",
"'linux'",
"or",
"host",
"==",
"'darwin'",
":",
"Logs",
".",
"warn",
"(",
"'Skipping MSVS project generation as host platform is not Windows'",
")",
"return",
"# Remove unsupported MSBUILD platforms from list",
"strip_unsupported_msbuild_platforms",
"(",
"self",
")",
"# Detect the most recent nsight tegra version installed if any",
"detect_nsight_tegra_vs_plugin_version",
"(",
"self",
")",
"if",
"not",
"getattr",
"(",
"self",
",",
"'configurations'",
",",
"None",
")",
":",
"build_configurations",
"=",
"self",
".",
"get_supported_configurations",
"(",
")",
"self",
".",
"configurations",
"=",
"[",
"]",
"for",
"spec",
"in",
"self",
".",
"loaded_specs",
"(",
")",
":",
"if",
"not",
"is_valid_spec",
"(",
"self",
",",
"spec",
")",
":",
"continue",
"for",
"conf",
"in",
"build_configurations",
":",
"if",
"not",
"is_valid_configuration",
"(",
"self",
",",
"spec",
",",
"conf",
")",
":",
"continue",
"solution_conf_name",
"=",
"'['",
"+",
"self",
".",
"convert_waf_spec_to_vs_spec",
"(",
"spec",
")",
"+",
"'] '",
"+",
"conf",
"solution_conf_name_vs",
"=",
"self",
".",
"convert_waf_configuration_to_vs_configuration",
"(",
"solution_conf_name",
")",
"self",
".",
"configurations",
".",
"append",
"(",
"solution_conf_name_vs",
")",
"if",
"not",
"getattr",
"(",
"self",
",",
"'platforms'",
",",
"None",
")",
":",
"self",
".",
"platforms",
"=",
"[",
"]",
"for",
"platform",
"in",
"self",
".",
"get_supported_platforms",
"(",
")",
":",
"self",
".",
"platforms",
".",
"append",
"(",
"self",
".",
"convert_waf_platform_to_vs_platform",
"(",
"platform",
")",
")",
"if",
"not",
"getattr",
"(",
"self",
",",
"'all_projects'",
",",
"None",
")",
":",
"self",
".",
"all_projects",
"=",
"[",
"]",
"if",
"not",
"getattr",
"(",
"self",
",",
"'project_extension'",
",",
"None",
")",
":",
"self",
".",
"project_extension",
"=",
"'.vcxproj'",
"if",
"not",
"getattr",
"(",
"self",
",",
"'projects_dir'",
",",
"None",
")",
":",
"self",
".",
"projects_dir",
"=",
"self",
".",
"srcnode",
".",
"make_node",
"(",
"'.depproj'",
")",
"self",
".",
"projects_dir",
".",
"mkdir",
"(",
")",
"# bind the classes to the object, so that subclass can provide custom generators",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_vsdir'",
",",
"None",
")",
":",
"self",
".",
"vsnode_vsdir",
"=",
"vsnode_vsdir",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_target'",
",",
"None",
")",
":",
"self",
".",
"vsnode_target",
"=",
"vsnode_target",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_android_package_target'",
",",
"None",
")",
":",
"self",
".",
"vsnode_android_package_target",
"=",
"vsnode_android_package_target",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_build_all'",
",",
"None",
")",
":",
"self",
".",
"vsnode_build_all",
"=",
"vsnode_build_all",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_install_all'",
",",
"None",
")",
":",
"self",
".",
"vsnode_install_all",
"=",
"vsnode_install_all",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_project_view'",
",",
"None",
")",
":",
"self",
".",
"vsnode_project_view",
"=",
"vsnode_project_view",
"self",
".",
"numver",
"=",
"'12.00'",
"self",
".",
"vsver",
"=",
"'2012'",
"# Note: only vsver is relevant for Visual Studio version selector",
"# We select from the highest Visual C++ version in use in the current configuration",
"max_msvc_version",
"=",
"0",
"for",
"env",
"in",
"self",
".",
"all_envs",
".",
"values",
"(",
")",
":",
"msvc_version",
"=",
"getattr",
"(",
"env",
",",
"'MSVC_VERSION'",
",",
"''",
")",
"if",
"isinstance",
"(",
"msvc_version",
",",
"basestring",
")",
":",
"if",
"float",
"(",
"msvc_version",
")",
">",
"max_msvc_version",
":",
"max_msvc_version",
"=",
"float",
"(",
"msvc_version",
")",
"self",
".",
"vsver",
"=",
"msvc_version"
] | https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Code/Tools/waf-1.7.13/crywaflib/msvs.py#L1631-L1696 |
||
kismetwireless/kismet | a7c0dc270c960fb1f58bd9cec4601c201885fd4e | capture_sdr_rtl433/KismetCaptureRtl433/kismetexternal/__init__.py | python | Datasource.send_datasource_error_report | (self, seqno=0, message=None) | When acting as a Kismet datasource, send a source error. This can be in response
to a specific command, or a runtime failure.
:param seqno: Command which failed, or 0
:param message: Optional user message
:return: None | When acting as a Kismet datasource, send a source error. This can be in response
to a specific command, or a runtime failure. | [
"When",
"acting",
"as",
"a",
"Kismet",
"datasource",
"send",
"a",
"source",
"error",
".",
"This",
"can",
"be",
"in",
"response",
"to",
"a",
"specific",
"command",
"or",
"a",
"runtime",
"failure",
"."
] | def send_datasource_error_report(self, seqno=0, message=None):
"""
When acting as a Kismet datasource, send a source error. This can be in response
to a specific command, or a runtime failure.
:param seqno: Command which failed, or 0
:param message: Optional user message
:return: None
"""
report = datasource_pb2.ErrorReport()
report.success.success = False
report.success.seqno = seqno
if message is not None:
report.message.msgtext = message
report.message.msgtype = self.MSG_ERROR
self.write_ext_packet("KDSERROR", report) | [
"def",
"send_datasource_error_report",
"(",
"self",
",",
"seqno",
"=",
"0",
",",
"message",
"=",
"None",
")",
":",
"report",
"=",
"datasource_pb2",
".",
"ErrorReport",
"(",
")",
"report",
".",
"success",
".",
"success",
"=",
"False",
"report",
".",
"success",
".",
"seqno",
"=",
"seqno",
"if",
"message",
"is",
"not",
"None",
":",
"report",
".",
"message",
".",
"msgtext",
"=",
"message",
"report",
".",
"message",
".",
"msgtype",
"=",
"self",
".",
"MSG_ERROR",
"self",
".",
"write_ext_packet",
"(",
"\"KDSERROR\"",
",",
"report",
")"
] | https://github.com/kismetwireless/kismet/blob/a7c0dc270c960fb1f58bd9cec4601c201885fd4e/capture_sdr_rtl433/KismetCaptureRtl433/kismetexternal/__init__.py#L978-L998 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/idlelib/searchbase.py | python | SearchDialogBase.make_button | (self, label, command, isdef=0) | return b | Return command button gridded in command frame. | Return command button gridded in command frame. | [
"Return",
"command",
"button",
"gridded",
"in",
"command",
"frame",
"."
] | def make_button(self, label, command, isdef=0):
"Return command button gridded in command frame."
b = Button(self.buttonframe,
text=label, command=command,
default=isdef and "active" or "normal")
cols,rows=self.buttonframe.grid_size()
b.grid(pady=1,row=rows,column=0,sticky="ew")
self.buttonframe.grid(rowspan=rows+1)
return b | [
"def",
"make_button",
"(",
"self",
",",
"label",
",",
"command",
",",
"isdef",
"=",
"0",
")",
":",
"b",
"=",
"Button",
"(",
"self",
".",
"buttonframe",
",",
"text",
"=",
"label",
",",
"command",
"=",
"command",
",",
"default",
"=",
"isdef",
"and",
"\"active\"",
"or",
"\"normal\"",
")",
"cols",
",",
"rows",
"=",
"self",
".",
"buttonframe",
".",
"grid_size",
"(",
")",
"b",
".",
"grid",
"(",
"pady",
"=",
"1",
",",
"row",
"=",
"rows",
",",
"column",
"=",
"0",
",",
"sticky",
"=",
"\"ew\"",
")",
"self",
".",
"buttonframe",
".",
"grid",
"(",
"rowspan",
"=",
"rows",
"+",
"1",
")",
"return",
"b"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/idlelib/searchbase.py#L162-L170 |
|
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/symbol/numpy/_symbol.py | python | ldexp | (x1, x2, out=None, **kwargs) | return _ufunc_helper(x1, x2, _npi.ldexp, _np.ldexp, _npi.ldexp_scalar, _npi.rldexp_scalar, out) | Returns x1 * 2**x2, element-wise.
The mantissas `x1` and twos exponents `x2` are used to construct
floating point numbers ``x1 * 2**x2``.
Parameters
----------
x1 : _Symbol
Array of multipliers.
x2 : _Symbol
Array of twos exponents.
out : _Symbol or None
Dummy parameter to keep the consistency with the ndarray counterpart.
Returns
-------
y : _Symbol
The result of ``x1 * 2**x2``.
Notes
-----
Complex dtypes are not supported, they will raise a TypeError.
Different from numpy, we allow x2 to be float besides int.
`ldexp` is useful as the inverse of `frexp`, if used by itself it is
more clear to simply use the expression ``x1 * 2**x2``. | Returns x1 * 2**x2, element-wise.
The mantissas `x1` and twos exponents `x2` are used to construct
floating point numbers ``x1 * 2**x2``. | [
"Returns",
"x1",
"*",
"2",
"**",
"x2",
"element",
"-",
"wise",
".",
"The",
"mantissas",
"x1",
"and",
"twos",
"exponents",
"x2",
"are",
"used",
"to",
"construct",
"floating",
"point",
"numbers",
"x1",
"*",
"2",
"**",
"x2",
"."
] | def ldexp(x1, x2, out=None, **kwargs):
"""
Returns x1 * 2**x2, element-wise.
The mantissas `x1` and twos exponents `x2` are used to construct
floating point numbers ``x1 * 2**x2``.
Parameters
----------
x1 : _Symbol
Array of multipliers.
x2 : _Symbol
Array of twos exponents.
out : _Symbol or None
Dummy parameter to keep the consistency with the ndarray counterpart.
Returns
-------
y : _Symbol
The result of ``x1 * 2**x2``.
Notes
-----
Complex dtypes are not supported, they will raise a TypeError.
Different from numpy, we allow x2 to be float besides int.
`ldexp` is useful as the inverse of `frexp`, if used by itself it is
more clear to simply use the expression ``x1 * 2**x2``.
"""
return _ufunc_helper(x1, x2, _npi.ldexp, _np.ldexp, _npi.ldexp_scalar, _npi.rldexp_scalar, out) | [
"def",
"ldexp",
"(",
"x1",
",",
"x2",
",",
"out",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_ufunc_helper",
"(",
"x1",
",",
"x2",
",",
"_npi",
".",
"ldexp",
",",
"_np",
".",
"ldexp",
",",
"_npi",
".",
"ldexp_scalar",
",",
"_npi",
".",
"rldexp_scalar",
",",
"out",
")"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/symbol/numpy/_symbol.py#L6069-L6096 |
|
xiaolonw/caffe-video_triplet | c39ea1ad6e937ccf7deba4510b7e555165abf05f | scripts/cpp_lint.py | python | _SetCountingStyle | (level) | Sets the module's counting options. | Sets the module's counting options. | [
"Sets",
"the",
"module",
"s",
"counting",
"options",
"."
] | def _SetCountingStyle(level):
"""Sets the module's counting options."""
_cpplint_state.SetCountingStyle(level) | [
"def",
"_SetCountingStyle",
"(",
"level",
")",
":",
"_cpplint_state",
".",
"SetCountingStyle",
"(",
"level",
")"
] | https://github.com/xiaolonw/caffe-video_triplet/blob/c39ea1ad6e937ccf7deba4510b7e555165abf05f/scripts/cpp_lint.py#L787-L789 |
||
devsisters/libquic | 8954789a056d8e7d5fcb6452fd1572ca57eb5c4e | src/third_party/protobuf/python/mox.py | python | IsA.equals | (self, rhs) | Check to see if the RHS is an instance of class_name.
Args:
# rhs: the right hand side of the test
rhs: object
Returns:
bool | Check to see if the RHS is an instance of class_name. | [
"Check",
"to",
"see",
"if",
"the",
"RHS",
"is",
"an",
"instance",
"of",
"class_name",
"."
] | def equals(self, rhs):
"""Check to see if the RHS is an instance of class_name.
Args:
# rhs: the right hand side of the test
rhs: object
Returns:
bool
"""
try:
return isinstance(rhs, self._class_name)
except TypeError:
# Check raw types if there was a type error. This is helpful for
# things like cStringIO.StringIO.
return type(rhs) == type(self._class_name) | [
"def",
"equals",
"(",
"self",
",",
"rhs",
")",
":",
"try",
":",
"return",
"isinstance",
"(",
"rhs",
",",
"self",
".",
"_class_name",
")",
"except",
"TypeError",
":",
"# Check raw types if there was a type error. This is helpful for",
"# things like cStringIO.StringIO.",
"return",
"type",
"(",
"rhs",
")",
"==",
"type",
"(",
"self",
".",
"_class_name",
")"
] | https://github.com/devsisters/libquic/blob/8954789a056d8e7d5fcb6452fd1572ca57eb5c4e/src/third_party/protobuf/python/mox.py#L807-L823 |
||
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/function.py | python | _DelayedRewriteGradientFunctions._construct_forward_backward | (self, num_doutputs) | Constructs a pair of forward and backward functions.
Args:
num_doutputs: The constructed backprop function will take output gradients
for the first `num_doutputs` outputs of the forward function. Defaults
to the number of outputs for the inference function, but when
higher-order gradients are computed this will increase to include side
outputs.
Returns:
A pair of (forward_function, backward_function):
forward_function: A re-generated inference function (an
_EagerDefinedFunction) to account for new side outputs, if any extra
were required when building the backward pass.
backward_function: A ConcreteFunction that Takes `num_doutputs`
arguments and returns gradients with respect to inputs of the forward
function. | Constructs a pair of forward and backward functions. | [
"Constructs",
"a",
"pair",
"of",
"forward",
"and",
"backward",
"functions",
"."
] | def _construct_forward_backward(self, num_doutputs):
"""Constructs a pair of forward and backward functions.
Args:
num_doutputs: The constructed backprop function will take output gradients
for the first `num_doutputs` outputs of the forward function. Defaults
to the number of outputs for the inference function, but when
higher-order gradients are computed this will increase to include side
outputs.
Returns:
A pair of (forward_function, backward_function):
forward_function: A re-generated inference function (an
_EagerDefinedFunction) to account for new side outputs, if any extra
were required when building the backward pass.
backward_function: A ConcreteFunction that Takes `num_doutputs`
arguments and returns gradients with respect to inputs of the forward
function.
"""
trainable_outputs = [
output for output in self._func_graph.outputs[:num_doutputs]
if gradients_util.IsTrainable(output)]
signature = []
for t in trainable_outputs:
signature.append(
tensor_spec.TensorSpec(*default_gradient.shape_and_dtype(t)))
def _backprop_function(*grad_ys):
return gradients_util._GradientsHelper( # pylint: disable=protected-access
trainable_outputs,
self._func_graph.inputs,
grad_ys=grad_ys,
src_graph=self._func_graph)
with self._func_graph.as_default():
backwards_graph = func_graph_module.FuncGraph(
_backward_name(self._func_graph.name))
func_graph_module.func_graph_from_py_func(
name=backwards_graph.name,
python_func=_backprop_function,
args=[], kwargs={},
signature=signature,
func_graph=backwards_graph)
backwards_graph_captures = backwards_graph.external_captures
captures_from_forward = [
c for c in backwards_graph_captures if
not isinstance(c, ops.EagerTensor) and c.graph is self._func_graph]
forward_function_name = _forward_name(self._func_graph.name)
existing_outputs = object_identity.ObjectIdentitySet(
self._func_graph.outputs)
for capture in captures_from_forward:
if capture not in existing_outputs:
existing_outputs.add(capture)
self._func_graph.outputs.append(capture)
backward_function_attr = _parse_func_attrs(
{FORWARD_FUNCTION_ATTRIBUTE_NAME: forward_function_name})
backward_function_attr.update(self._attrs)
backward_function = ConcreteFunction(
backwards_graph, attrs=backward_function_attr)
forward_function_attr = _parse_func_attrs({
BACKWARD_FUNCTION_ATTRIBUTE_NAME:
backward_function.name})
forward_function_attr.update(self._attrs)
forward_function = _EagerDefinedFunction(
forward_function_name, self._func_graph, self._func_graph.inputs,
self._func_graph.outputs, forward_function_attr)
return forward_function, backward_function | [
"def",
"_construct_forward_backward",
"(",
"self",
",",
"num_doutputs",
")",
":",
"trainable_outputs",
"=",
"[",
"output",
"for",
"output",
"in",
"self",
".",
"_func_graph",
".",
"outputs",
"[",
":",
"num_doutputs",
"]",
"if",
"gradients_util",
".",
"IsTrainable",
"(",
"output",
")",
"]",
"signature",
"=",
"[",
"]",
"for",
"t",
"in",
"trainable_outputs",
":",
"signature",
".",
"append",
"(",
"tensor_spec",
".",
"TensorSpec",
"(",
"*",
"default_gradient",
".",
"shape_and_dtype",
"(",
"t",
")",
")",
")",
"def",
"_backprop_function",
"(",
"*",
"grad_ys",
")",
":",
"return",
"gradients_util",
".",
"_GradientsHelper",
"(",
"# pylint: disable=protected-access",
"trainable_outputs",
",",
"self",
".",
"_func_graph",
".",
"inputs",
",",
"grad_ys",
"=",
"grad_ys",
",",
"src_graph",
"=",
"self",
".",
"_func_graph",
")",
"with",
"self",
".",
"_func_graph",
".",
"as_default",
"(",
")",
":",
"backwards_graph",
"=",
"func_graph_module",
".",
"FuncGraph",
"(",
"_backward_name",
"(",
"self",
".",
"_func_graph",
".",
"name",
")",
")",
"func_graph_module",
".",
"func_graph_from_py_func",
"(",
"name",
"=",
"backwards_graph",
".",
"name",
",",
"python_func",
"=",
"_backprop_function",
",",
"args",
"=",
"[",
"]",
",",
"kwargs",
"=",
"{",
"}",
",",
"signature",
"=",
"signature",
",",
"func_graph",
"=",
"backwards_graph",
")",
"backwards_graph_captures",
"=",
"backwards_graph",
".",
"external_captures",
"captures_from_forward",
"=",
"[",
"c",
"for",
"c",
"in",
"backwards_graph_captures",
"if",
"not",
"isinstance",
"(",
"c",
",",
"ops",
".",
"EagerTensor",
")",
"and",
"c",
".",
"graph",
"is",
"self",
".",
"_func_graph",
"]",
"forward_function_name",
"=",
"_forward_name",
"(",
"self",
".",
"_func_graph",
".",
"name",
")",
"existing_outputs",
"=",
"object_identity",
".",
"ObjectIdentitySet",
"(",
"self",
".",
"_func_graph",
".",
"outputs",
")",
"for",
"capture",
"in",
"captures_from_forward",
":",
"if",
"capture",
"not",
"in",
"existing_outputs",
":",
"existing_outputs",
".",
"add",
"(",
"capture",
")",
"self",
".",
"_func_graph",
".",
"outputs",
".",
"append",
"(",
"capture",
")",
"backward_function_attr",
"=",
"_parse_func_attrs",
"(",
"{",
"FORWARD_FUNCTION_ATTRIBUTE_NAME",
":",
"forward_function_name",
"}",
")",
"backward_function_attr",
".",
"update",
"(",
"self",
".",
"_attrs",
")",
"backward_function",
"=",
"ConcreteFunction",
"(",
"backwards_graph",
",",
"attrs",
"=",
"backward_function_attr",
")",
"forward_function_attr",
"=",
"_parse_func_attrs",
"(",
"{",
"BACKWARD_FUNCTION_ATTRIBUTE_NAME",
":",
"backward_function",
".",
"name",
"}",
")",
"forward_function_attr",
".",
"update",
"(",
"self",
".",
"_attrs",
")",
"forward_function",
"=",
"_EagerDefinedFunction",
"(",
"forward_function_name",
",",
"self",
".",
"_func_graph",
",",
"self",
".",
"_func_graph",
".",
"inputs",
",",
"self",
".",
"_func_graph",
".",
"outputs",
",",
"forward_function_attr",
")",
"return",
"forward_function",
",",
"backward_function"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/function.py#L586-L657 |
||
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/python/pystache/pystache/renderer.py | python | Renderer._make_resolve_context | (self) | return resolve_context | Return the resolve_context function to pass to RenderEngine.__init__(). | Return the resolve_context function to pass to RenderEngine.__init__(). | [
"Return",
"the",
"resolve_context",
"function",
"to",
"pass",
"to",
"RenderEngine",
".",
"__init__",
"()",
"."
] | def _make_resolve_context(self):
"""
Return the resolve_context function to pass to RenderEngine.__init__().
"""
if self._is_missing_tags_strict():
return context_get
# Otherwise, ignore missing tags.
def resolve_context(stack, name):
try:
return context_get(stack, name)
except KeyNotFoundError:
return u''
return resolve_context | [
"def",
"_make_resolve_context",
"(",
"self",
")",
":",
"if",
"self",
".",
"_is_missing_tags_strict",
"(",
")",
":",
"return",
"context_get",
"# Otherwise, ignore missing tags.",
"def",
"resolve_context",
"(",
"stack",
",",
"name",
")",
":",
"try",
":",
"return",
"context_get",
"(",
"stack",
",",
"name",
")",
"except",
"KeyNotFoundError",
":",
"return",
"u''",
"return",
"resolve_context"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/pystache/pystache/renderer.py#L306-L321 |
|
KratosMultiphysics/Kratos | 0000833054ed0503424eb28205d6508d9ca6cbbc | applications/ContactStructuralMechanicsApplication/python_scripts/mpc_contact_process.py | python | MPCContactProcess._get_condition_name | (self) | return condition_name | This method returns the condition name
Keyword arguments:
self -- It signifies an instance of a class. | This method returns the condition name | [
"This",
"method",
"returns",
"the",
"condition",
"name"
] | def _get_condition_name(self):
""" This method returns the condition name
Keyword arguments:
self -- It signifies an instance of a class.
"""
# We define the condition name to be used
condition_name = "MPCMortarContact"
return condition_name | [
"def",
"_get_condition_name",
"(",
"self",
")",
":",
"# We define the condition name to be used",
"condition_name",
"=",
"\"MPCMortarContact\"",
"return",
"condition_name"
] | https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/applications/ContactStructuralMechanicsApplication/python_scripts/mpc_contact_process.py#L313-L323 |
|
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/external/coremltools_wrap/coremltools/coremltools/models/neural_network/builder.py | python | NeuralNetworkBuilder.add_reduce_l2 | (
self, name, input_name, output_name, axes=None, keepdims=True, reduce_all=False
) | return spec_layer | Add a reduce_l2 layer to the model that reduces the input tensor
using ``l2_normalization(elements across given dimensions)``.
Refer to the **ReduceL2LayerParams** message in specification
(NeuralNetwork.proto) for more details.
Parameters
----------
name: str
The name of this layer.
input_name: str
The input blob name of this layer.
output_name: str
The output blob name of this layer.
axes: list of int or tuple of int, optional
List of dimensions for the reduce operations.
Each should be in range [-rank(input), rank(input)), default: None (reduce_all)
keepdims: bool, optional
Whether or not to retain the reduced dimensions with length 1, default: true.
reduce_all: bool, optional
Whether or not to reduce on all axes, default: false.
See Also
--------
add_reduce_l1, add_reduce_sum, add_reduce_min, add_reduce_max, add_reduce_prod
add_reduce_mean, add_reduce_logsum, add_reduce_logsumexp, add_reduce_sumsquare | Add a reduce_l2 layer to the model that reduces the input tensor
using ``l2_normalization(elements across given dimensions)``.
Refer to the **ReduceL2LayerParams** message in specification
(NeuralNetwork.proto) for more details. | [
"Add",
"a",
"reduce_l2",
"layer",
"to",
"the",
"model",
"that",
"reduces",
"the",
"input",
"tensor",
"using",
"l2_normalization",
"(",
"elements",
"across",
"given",
"dimensions",
")",
".",
"Refer",
"to",
"the",
"**",
"ReduceL2LayerParams",
"**",
"message",
"in",
"specification",
"(",
"NeuralNetwork",
".",
"proto",
")",
"for",
"more",
"details",
"."
] | def add_reduce_l2(
self, name, input_name, output_name, axes=None, keepdims=True, reduce_all=False
):
"""
Add a reduce_l2 layer to the model that reduces the input tensor
using ``l2_normalization(elements across given dimensions)``.
Refer to the **ReduceL2LayerParams** message in specification
(NeuralNetwork.proto) for more details.
Parameters
----------
name: str
The name of this layer.
input_name: str
The input blob name of this layer.
output_name: str
The output blob name of this layer.
axes: list of int or tuple of int, optional
List of dimensions for the reduce operations.
Each should be in range [-rank(input), rank(input)), default: None (reduce_all)
keepdims: bool, optional
Whether or not to retain the reduced dimensions with length 1, default: true.
reduce_all: bool, optional
Whether or not to reduce on all axes, default: false.
See Also
--------
add_reduce_l1, add_reduce_sum, add_reduce_min, add_reduce_max, add_reduce_prod
add_reduce_mean, add_reduce_logsum, add_reduce_logsumexp, add_reduce_sumsquare
"""
spec_layer = self._add_generic_layer(name, [input_name], [output_name])
spec_layer_params = spec_layer.reduceL2
if axes is not None and len(axes) != 0:
spec_layer_params.axes.extend(map(int, axes))
else:
reduce_all = True
spec_layer_params.keepDims = keepdims
spec_layer_params.reduceAll = reduce_all
self._set_rank_for_reduce_op(
input_name, output_name, axes, keepdims, reduce_all
)
return spec_layer | [
"def",
"add_reduce_l2",
"(",
"self",
",",
"name",
",",
"input_name",
",",
"output_name",
",",
"axes",
"=",
"None",
",",
"keepdims",
"=",
"True",
",",
"reduce_all",
"=",
"False",
")",
":",
"spec_layer",
"=",
"self",
".",
"_add_generic_layer",
"(",
"name",
",",
"[",
"input_name",
"]",
",",
"[",
"output_name",
"]",
")",
"spec_layer_params",
"=",
"spec_layer",
".",
"reduceL2",
"if",
"axes",
"is",
"not",
"None",
"and",
"len",
"(",
"axes",
")",
"!=",
"0",
":",
"spec_layer_params",
".",
"axes",
".",
"extend",
"(",
"map",
"(",
"int",
",",
"axes",
")",
")",
"else",
":",
"reduce_all",
"=",
"True",
"spec_layer_params",
".",
"keepDims",
"=",
"keepdims",
"spec_layer_params",
".",
"reduceAll",
"=",
"reduce_all",
"self",
".",
"_set_rank_for_reduce_op",
"(",
"input_name",
",",
"output_name",
",",
"axes",
",",
"keepdims",
",",
"reduce_all",
")",
"return",
"spec_layer"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/coremltools_wrap/coremltools/coremltools/models/neural_network/builder.py#L7882-L7927 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/plat-mac/pimp.py | python | _run | (mode, verbose, force, args, prefargs, watcher) | Engine for the main program | Engine for the main program | [
"Engine",
"for",
"the",
"main",
"program"
] | def _run(mode, verbose, force, args, prefargs, watcher):
"""Engine for the main program"""
prefs = PimpPreferences(**prefargs)
if watcher:
prefs.setWatcher(watcher)
rv = prefs.check()
if rv:
sys.stdout.write(rv)
db = PimpDatabase(prefs)
db.appendURL(prefs.pimpDatabase)
if mode == 'dump':
db.dump(sys.stdout)
elif mode =='list':
if not args:
args = db.listnames()
print "%-20.20s\t%s" % ("Package", "Description")
print
for pkgname in args:
pkg = db.find(pkgname)
if pkg:
description = pkg.shortdescription()
pkgname = pkg.fullname()
else:
description = 'Error: no such package'
print "%-20.20s\t%s" % (pkgname, description)
if verbose:
print "\tHome page:\t", pkg.homepage()
try:
print "\tDownload URL:\t", pkg.downloadURL()
except KeyError:
pass
description = pkg.description()
description = '\n\t\t\t\t\t'.join(description.splitlines())
print "\tDescription:\t%s" % description
elif mode =='status':
if not args:
args = db.listnames()
print "%-20.20s\t%s\t%s" % ("Package", "Installed", "Message")
print
for pkgname in args:
pkg = db.find(pkgname)
if pkg:
status, msg = pkg.installed()
pkgname = pkg.fullname()
else:
status = 'error'
msg = 'No such package'
print "%-20.20s\t%-9.9s\t%s" % (pkgname, status, msg)
if verbose and status == "no":
prereq = pkg.prerequisites()
for pkg, msg in prereq:
if not pkg:
pkg = ''
else:
pkg = pkg.fullname()
print "%-20.20s\tRequirement: %s %s" % ("", pkg, msg)
elif mode == 'install':
if not args:
print 'Please specify packages to install'
sys.exit(1)
inst = PimpInstaller(db)
for pkgname in args:
pkg = db.find(pkgname)
if not pkg:
print '%s: No such package' % pkgname
continue
list, messages = inst.prepareInstall(pkg, force)
if messages and not force:
print "%s: Not installed:" % pkgname
for m in messages:
print "\t", m
else:
if verbose:
output = sys.stdout
else:
output = None
messages = inst.install(list, output)
if messages:
print "%s: Not installed:" % pkgname
for m in messages:
print "\t", m | [
"def",
"_run",
"(",
"mode",
",",
"verbose",
",",
"force",
",",
"args",
",",
"prefargs",
",",
"watcher",
")",
":",
"prefs",
"=",
"PimpPreferences",
"(",
"*",
"*",
"prefargs",
")",
"if",
"watcher",
":",
"prefs",
".",
"setWatcher",
"(",
"watcher",
")",
"rv",
"=",
"prefs",
".",
"check",
"(",
")",
"if",
"rv",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"rv",
")",
"db",
"=",
"PimpDatabase",
"(",
"prefs",
")",
"db",
".",
"appendURL",
"(",
"prefs",
".",
"pimpDatabase",
")",
"if",
"mode",
"==",
"'dump'",
":",
"db",
".",
"dump",
"(",
"sys",
".",
"stdout",
")",
"elif",
"mode",
"==",
"'list'",
":",
"if",
"not",
"args",
":",
"args",
"=",
"db",
".",
"listnames",
"(",
")",
"print",
"\"%-20.20s\\t%s\"",
"%",
"(",
"\"Package\"",
",",
"\"Description\"",
")",
"print",
"for",
"pkgname",
"in",
"args",
":",
"pkg",
"=",
"db",
".",
"find",
"(",
"pkgname",
")",
"if",
"pkg",
":",
"description",
"=",
"pkg",
".",
"shortdescription",
"(",
")",
"pkgname",
"=",
"pkg",
".",
"fullname",
"(",
")",
"else",
":",
"description",
"=",
"'Error: no such package'",
"print",
"\"%-20.20s\\t%s\"",
"%",
"(",
"pkgname",
",",
"description",
")",
"if",
"verbose",
":",
"print",
"\"\\tHome page:\\t\"",
",",
"pkg",
".",
"homepage",
"(",
")",
"try",
":",
"print",
"\"\\tDownload URL:\\t\"",
",",
"pkg",
".",
"downloadURL",
"(",
")",
"except",
"KeyError",
":",
"pass",
"description",
"=",
"pkg",
".",
"description",
"(",
")",
"description",
"=",
"'\\n\\t\\t\\t\\t\\t'",
".",
"join",
"(",
"description",
".",
"splitlines",
"(",
")",
")",
"print",
"\"\\tDescription:\\t%s\"",
"%",
"description",
"elif",
"mode",
"==",
"'status'",
":",
"if",
"not",
"args",
":",
"args",
"=",
"db",
".",
"listnames",
"(",
")",
"print",
"\"%-20.20s\\t%s\\t%s\"",
"%",
"(",
"\"Package\"",
",",
"\"Installed\"",
",",
"\"Message\"",
")",
"print",
"for",
"pkgname",
"in",
"args",
":",
"pkg",
"=",
"db",
".",
"find",
"(",
"pkgname",
")",
"if",
"pkg",
":",
"status",
",",
"msg",
"=",
"pkg",
".",
"installed",
"(",
")",
"pkgname",
"=",
"pkg",
".",
"fullname",
"(",
")",
"else",
":",
"status",
"=",
"'error'",
"msg",
"=",
"'No such package'",
"print",
"\"%-20.20s\\t%-9.9s\\t%s\"",
"%",
"(",
"pkgname",
",",
"status",
",",
"msg",
")",
"if",
"verbose",
"and",
"status",
"==",
"\"no\"",
":",
"prereq",
"=",
"pkg",
".",
"prerequisites",
"(",
")",
"for",
"pkg",
",",
"msg",
"in",
"prereq",
":",
"if",
"not",
"pkg",
":",
"pkg",
"=",
"''",
"else",
":",
"pkg",
"=",
"pkg",
".",
"fullname",
"(",
")",
"print",
"\"%-20.20s\\tRequirement: %s %s\"",
"%",
"(",
"\"\"",
",",
"pkg",
",",
"msg",
")",
"elif",
"mode",
"==",
"'install'",
":",
"if",
"not",
"args",
":",
"print",
"'Please specify packages to install'",
"sys",
".",
"exit",
"(",
"1",
")",
"inst",
"=",
"PimpInstaller",
"(",
"db",
")",
"for",
"pkgname",
"in",
"args",
":",
"pkg",
"=",
"db",
".",
"find",
"(",
"pkgname",
")",
"if",
"not",
"pkg",
":",
"print",
"'%s: No such package'",
"%",
"pkgname",
"continue",
"list",
",",
"messages",
"=",
"inst",
".",
"prepareInstall",
"(",
"pkg",
",",
"force",
")",
"if",
"messages",
"and",
"not",
"force",
":",
"print",
"\"%s: Not installed:\"",
"%",
"pkgname",
"for",
"m",
"in",
"messages",
":",
"print",
"\"\\t\"",
",",
"m",
"else",
":",
"if",
"verbose",
":",
"output",
"=",
"sys",
".",
"stdout",
"else",
":",
"output",
"=",
"None",
"messages",
"=",
"inst",
".",
"install",
"(",
"list",
",",
"output",
")",
"if",
"messages",
":",
"print",
"\"%s: Not installed:\"",
"%",
"pkgname",
"for",
"m",
"in",
"messages",
":",
"print",
"\"\\t\"",
",",
"m"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/plat-mac/pimp.py#L1011-L1093 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/lmbr_aws/cleanup_utils/cleanup_glue_utils.py | python | __glue_crawler_exists | (cleaner, glue_crawler_name) | return True | Verifies if a crawler exists. This is should be replaced once glue supports Waiter objects for crawlers.
:param cleaner: A Cleaner object from the main cleanup.py script
:param glue_crawler_name: Can be retrieved from the boto3 get_crawlers() with response['Crawlers']['Name']
:return: True if the crawler exists, False if it doesn't exist or an error occurs. | Verifies if a crawler exists. This is should be replaced once glue supports Waiter objects for crawlers.
:param cleaner: A Cleaner object from the main cleanup.py script
:param glue_crawler_name: Can be retrieved from the boto3 get_crawlers() with response['Crawlers']['Name']
:return: True if the crawler exists, False if it doesn't exist or an error occurs. | [
"Verifies",
"if",
"a",
"crawler",
"exists",
".",
"This",
"is",
"should",
"be",
"replaced",
"once",
"glue",
"supports",
"Waiter",
"objects",
"for",
"crawlers",
".",
":",
"param",
"cleaner",
":",
"A",
"Cleaner",
"object",
"from",
"the",
"main",
"cleanup",
".",
"py",
"script",
":",
"param",
"glue_crawler_name",
":",
"Can",
"be",
"retrieved",
"from",
"the",
"boto3",
"get_crawlers",
"()",
"with",
"response",
"[",
"Crawlers",
"]",
"[",
"Name",
"]",
":",
"return",
":",
"True",
"if",
"the",
"crawler",
"exists",
"False",
"if",
"it",
"doesn",
"t",
"exist",
"or",
"an",
"error",
"occurs",
"."
] | def __glue_crawler_exists(cleaner, glue_crawler_name):
"""
Verifies if a crawler exists. This is should be replaced once glue supports Waiter objects for crawlers.
:param cleaner: A Cleaner object from the main cleanup.py script
:param glue_crawler_name: Can be retrieved from the boto3 get_crawlers() with response['Crawlers']['Name']
:return: True if the crawler exists, False if it doesn't exist or an error occurs.
"""
try:
cleaner.glue.get_crawler(Name=glue_crawler_name)
except cleaner.glue.exceptions.EntityNotFoundException:
return False
except ClientError as err:
print(" ERROR: Unexpected error occurred when checking if crawler {0} exists due to {1}"
.format(glue_crawler_name, exception_utils.message(err)))
return False
return True | [
"def",
"__glue_crawler_exists",
"(",
"cleaner",
",",
"glue_crawler_name",
")",
":",
"try",
":",
"cleaner",
".",
"glue",
".",
"get_crawler",
"(",
"Name",
"=",
"glue_crawler_name",
")",
"except",
"cleaner",
".",
"glue",
".",
"exceptions",
".",
"EntityNotFoundException",
":",
"return",
"False",
"except",
"ClientError",
"as",
"err",
":",
"print",
"(",
"\" ERROR: Unexpected error occurred when checking if crawler {0} exists due to {1}\"",
".",
"format",
"(",
"glue_crawler_name",
",",
"exception_utils",
".",
"message",
"(",
"err",
")",
")",
")",
"return",
"False",
"return",
"True"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/lmbr_aws/cleanup_utils/cleanup_glue_utils.py#L20-L35 |
|
hpi-xnor/BMXNet-v2 | af2b1859eafc5c721b1397cef02f946aaf2ce20d | python/mxnet/contrib/onnx/mx2onnx/_op_translations.py | python | create_helper_trans_node | (op_name, input_node, node_name) | return trans_node | create extra transpose node for dot operator | create extra transpose node for dot operator | [
"create",
"extra",
"transpose",
"node",
"for",
"dot",
"operator"
] | def create_helper_trans_node(op_name, input_node, node_name):
"""create extra transpose node for dot operator"""
node_name = op_name + "_" + node_name
trans_node = onnx.helper.make_node(
'Transpose',
inputs=[input_node],
outputs=[node_name],
name=node_name
)
return trans_node | [
"def",
"create_helper_trans_node",
"(",
"op_name",
",",
"input_node",
",",
"node_name",
")",
":",
"node_name",
"=",
"op_name",
"+",
"\"_\"",
"+",
"node_name",
"trans_node",
"=",
"onnx",
".",
"helper",
".",
"make_node",
"(",
"'Transpose'",
",",
"inputs",
"=",
"[",
"input_node",
"]",
",",
"outputs",
"=",
"[",
"node_name",
"]",
",",
"name",
"=",
"node_name",
")",
"return",
"trans_node"
] | https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py#L500-L509 |
|
SoarGroup/Soar | a1c5e249499137a27da60533c72969eef3b8ab6b | scons/scons-local-4.1.0/SCons/Node/FS.py | python | File.release_target_info | (self) | Called just after this node has been marked
up-to-date or was built completely.
This is where we try to release as many target node infos
as possible for clean builds and update runs, in order
to minimize the overall memory consumption.
We'd like to remove a lot more attributes like self.sources
and self.sources_set, but they might get used
in a next build step. For example, during configuration
the source files for a built E{*}.o file are used to figure out
which linker to use for the resulting Program (gcc vs. g++)!
That's why we check for the 'keep_targetinfo' attribute,
config Nodes and the Interactive mode just don't allow
an early release of most variables.
In the same manner, we can't simply remove the self.attributes
here. The smart linking relies on the shared flag, and some
parts of the java Tool use it to transport information
about nodes...
@see: built() and Node.release_target_info() | Called just after this node has been marked
up-to-date or was built completely. | [
"Called",
"just",
"after",
"this",
"node",
"has",
"been",
"marked",
"up",
"-",
"to",
"-",
"date",
"or",
"was",
"built",
"completely",
"."
] | def release_target_info(self):
"""Called just after this node has been marked
up-to-date or was built completely.
This is where we try to release as many target node infos
as possible for clean builds and update runs, in order
to minimize the overall memory consumption.
We'd like to remove a lot more attributes like self.sources
and self.sources_set, but they might get used
in a next build step. For example, during configuration
the source files for a built E{*}.o file are used to figure out
which linker to use for the resulting Program (gcc vs. g++)!
That's why we check for the 'keep_targetinfo' attribute,
config Nodes and the Interactive mode just don't allow
an early release of most variables.
In the same manner, we can't simply remove the self.attributes
here. The smart linking relies on the shared flag, and some
parts of the java Tool use it to transport information
about nodes...
@see: built() and Node.release_target_info()
"""
if self.released_target_info or SCons.Node.interactive:
return
if not hasattr(self.attributes, 'keep_targetinfo'):
# Cache some required values, before releasing
# stuff like env, executor and builder...
self.changed(allowcache=True)
self.get_contents_sig()
self.get_build_env()
# Now purge unneeded stuff to free memory...
self.executor = None
self._memo.pop('rfile', None)
self.prerequisites = None
# Cleanup lists, but only if they're empty
if not len(self.ignore_set):
self.ignore_set = None
if not len(self.implicit_set):
self.implicit_set = None
if not len(self.depends_set):
self.depends_set = None
if not len(self.ignore):
self.ignore = None
if not len(self.depends):
self.depends = None
# Mark this node as done, we only have to release
# the memory once...
self.released_target_info = True | [
"def",
"release_target_info",
"(",
"self",
")",
":",
"if",
"self",
".",
"released_target_info",
"or",
"SCons",
".",
"Node",
".",
"interactive",
":",
"return",
"if",
"not",
"hasattr",
"(",
"self",
".",
"attributes",
",",
"'keep_targetinfo'",
")",
":",
"# Cache some required values, before releasing",
"# stuff like env, executor and builder...",
"self",
".",
"changed",
"(",
"allowcache",
"=",
"True",
")",
"self",
".",
"get_contents_sig",
"(",
")",
"self",
".",
"get_build_env",
"(",
")",
"# Now purge unneeded stuff to free memory...",
"self",
".",
"executor",
"=",
"None",
"self",
".",
"_memo",
".",
"pop",
"(",
"'rfile'",
",",
"None",
")",
"self",
".",
"prerequisites",
"=",
"None",
"# Cleanup lists, but only if they're empty",
"if",
"not",
"len",
"(",
"self",
".",
"ignore_set",
")",
":",
"self",
".",
"ignore_set",
"=",
"None",
"if",
"not",
"len",
"(",
"self",
".",
"implicit_set",
")",
":",
"self",
".",
"implicit_set",
"=",
"None",
"if",
"not",
"len",
"(",
"self",
".",
"depends_set",
")",
":",
"self",
".",
"depends_set",
"=",
"None",
"if",
"not",
"len",
"(",
"self",
".",
"ignore",
")",
":",
"self",
".",
"ignore",
"=",
"None",
"if",
"not",
"len",
"(",
"self",
".",
"depends",
")",
":",
"self",
".",
"depends",
"=",
"None",
"# Mark this node as done, we only have to release",
"# the memory once...",
"self",
".",
"released_target_info",
"=",
"True"
] | https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Node/FS.py#L3010-L3060 |
||
DLR-SC/tigl | d1c5901e948e33d10b1f9659ff3e22c4717b455f | thirdparty/nsiqcppstyle/nsiqcppstyle_state.py | python | _NsiqCppStyleState.ResetErrorCount | (self) | Sets the module's error statistic back to zero. | Sets the module's error statistic back to zero. | [
"Sets",
"the",
"module",
"s",
"error",
"statistic",
"back",
"to",
"zero",
"."
] | def ResetErrorCount(self):
"""Sets the module's error statistic back to zero."""
self.error_count = 0
self.errorPerChecker = {}
self.errorPerFile = {} | [
"def",
"ResetErrorCount",
"(",
"self",
")",
":",
"self",
".",
"error_count",
"=",
"0",
"self",
".",
"errorPerChecker",
"=",
"{",
"}",
"self",
".",
"errorPerFile",
"=",
"{",
"}"
] | https://github.com/DLR-SC/tigl/blob/d1c5901e948e33d10b1f9659ff3e22c4717b455f/thirdparty/nsiqcppstyle/nsiqcppstyle_state.py#L59-L63 |
||
commaai/openpilot | 4416c21b1e738ab7d04147c5ae52b5135e0cdb40 | pyextra/acados_template/acados_ocp.py | python | AcadosOcpDims.nphi | (self) | return self.__nphi | :math:`n_{\phi}` - number of convex-over-nonlinear constraints.
Type: int; default: 0 | :math:`n_{\phi}` - number of convex-over-nonlinear constraints.
Type: int; default: 0 | [
":",
"math",
":",
"n_",
"{",
"\\",
"phi",
"}",
"-",
"number",
"of",
"convex",
"-",
"over",
"-",
"nonlinear",
"constraints",
".",
"Type",
":",
"int",
";",
"default",
":",
"0"
] | def nphi(self):
""":math:`n_{\phi}` - number of convex-over-nonlinear constraints.
Type: int; default: 0"""
return self.__nphi | [
"def",
"nphi",
"(",
"self",
")",
":",
"return",
"self",
".",
"__nphi"
] | https://github.com/commaai/openpilot/blob/4416c21b1e738ab7d04147c5ae52b5135e0cdb40/pyextra/acados_template/acados_ocp.py#L146-L149 |
|
PX4/PX4-Autopilot | 0b9f60a0370be53d683352c63fd92db3d6586e18 | Tools/mavlink_px4.py | python | MAVLink.file_transfer_dir_list_send | (self, transfer_uid, dir_path, flags) | return self.send(self.file_transfer_dir_list_encode(transfer_uid, dir_path, flags)) | Get directory listing
transfer_uid : Unique transfer ID (uint64_t)
dir_path : Directory path to list (char)
flags : RESERVED (uint8_t) | Get directory listing | [
"Get",
"directory",
"listing"
] | def file_transfer_dir_list_send(self, transfer_uid, dir_path, flags):
'''
Get directory listing
transfer_uid : Unique transfer ID (uint64_t)
dir_path : Directory path to list (char)
flags : RESERVED (uint8_t)
'''
return self.send(self.file_transfer_dir_list_encode(transfer_uid, dir_path, flags)) | [
"def",
"file_transfer_dir_list_send",
"(",
"self",
",",
"transfer_uid",
",",
"dir_path",
",",
"flags",
")",
":",
"return",
"self",
".",
"send",
"(",
"self",
".",
"file_transfer_dir_list_encode",
"(",
"transfer_uid",
",",
"dir_path",
",",
"flags",
")",
")"
] | https://github.com/PX4/PX4-Autopilot/blob/0b9f60a0370be53d683352c63fd92db3d6586e18/Tools/mavlink_px4.py#L4954-L4963 |
|
mongodb/mongo | d8ff665343ad29cf286ee2cf4a1960d29371937b | src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/FS.py | python | FileNodeInfo.__getstate__ | (self) | return state | Return all fields that shall be pickled. Walk the slots in the class
hierarchy and add those to the state dictionary. If a '__dict__' slot is
available, copy all entries to the dictionary. Also include the version
id, which is fixed for all instances of a class. | Return all fields that shall be pickled. Walk the slots in the class
hierarchy and add those to the state dictionary. If a '__dict__' slot is
available, copy all entries to the dictionary. Also include the version
id, which is fixed for all instances of a class. | [
"Return",
"all",
"fields",
"that",
"shall",
"be",
"pickled",
".",
"Walk",
"the",
"slots",
"in",
"the",
"class",
"hierarchy",
"and",
"add",
"those",
"to",
"the",
"state",
"dictionary",
".",
"If",
"a",
"__dict__",
"slot",
"is",
"available",
"copy",
"all",
"entries",
"to",
"the",
"dictionary",
".",
"Also",
"include",
"the",
"version",
"id",
"which",
"is",
"fixed",
"for",
"all",
"instances",
"of",
"a",
"class",
"."
] | def __getstate__(self):
"""
Return all fields that shall be pickled. Walk the slots in the class
hierarchy and add those to the state dictionary. If a '__dict__' slot is
available, copy all entries to the dictionary. Also include the version
id, which is fixed for all instances of a class.
"""
state = getattr(self, '__dict__', {}).copy()
for obj in type(self).mro():
for name in getattr(obj, '__slots__', ()):
if hasattr(self, name):
state[name] = getattr(self, name)
state['_version_id'] = self.current_version_id
try:
del state['__weakref__']
except KeyError:
pass
return state | [
"def",
"__getstate__",
"(",
"self",
")",
":",
"state",
"=",
"getattr",
"(",
"self",
",",
"'__dict__'",
",",
"{",
"}",
")",
".",
"copy",
"(",
")",
"for",
"obj",
"in",
"type",
"(",
"self",
")",
".",
"mro",
"(",
")",
":",
"for",
"name",
"in",
"getattr",
"(",
"obj",
",",
"'__slots__'",
",",
"(",
")",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"name",
")",
":",
"state",
"[",
"name",
"]",
"=",
"getattr",
"(",
"self",
",",
"name",
")",
"state",
"[",
"'_version_id'",
"]",
"=",
"self",
".",
"current_version_id",
"try",
":",
"del",
"state",
"[",
"'__weakref__'",
"]",
"except",
"KeyError",
":",
"pass",
"return",
"state"
] | https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Node/FS.py#L2460-L2479 |
|
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/shutil.py | python | copytree | (src, dst, symlinks=False, ignore=None) | Recursively copy a directory tree using copy2().
The destination directory must not already exist.
If exception(s) occur, an Error is raised with a list of reasons.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied.
The optional ignore argument is a callable. If given, it
is called with the `src` parameter, which is the directory
being visited by copytree(), and `names` which is the list of
`src` contents, as returned by os.listdir():
callable(src, names) -> ignored_names
Since copytree() is called recursively, the callable will be
called once for each directory that is copied. It returns a
list of names relative to the `src` directory that should
not be copied.
XXX Consider this example code rather than the ultimate tool. | Recursively copy a directory tree using copy2(). | [
"Recursively",
"copy",
"a",
"directory",
"tree",
"using",
"copy2",
"()",
"."
] | def copytree(src, dst, symlinks=False, ignore=None):
"""Recursively copy a directory tree using copy2().
The destination directory must not already exist.
If exception(s) occur, an Error is raised with a list of reasons.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied.
The optional ignore argument is a callable. If given, it
is called with the `src` parameter, which is the directory
being visited by copytree(), and `names` which is the list of
`src` contents, as returned by os.listdir():
callable(src, names) -> ignored_names
Since copytree() is called recursively, the callable will be
called once for each directory that is copied. It returns a
list of names relative to the `src` directory that should
not be copied.
XXX Consider this example code rather than the ultimate tool.
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore)
else:
copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except (IOError, os.error), why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error, err:
errors.extend(err.args[0])
try:
copystat(src, dst)
except OSError, why:
if WindowsError is not None and isinstance(why, WindowsError):
# Copying file access times may fail on Windows
pass
else:
errors.extend((src, dst, str(why)))
if errors:
raise Error, errors | [
"def",
"copytree",
"(",
"src",
",",
"dst",
",",
"symlinks",
"=",
"False",
",",
"ignore",
"=",
"None",
")",
":",
"names",
"=",
"os",
".",
"listdir",
"(",
"src",
")",
"if",
"ignore",
"is",
"not",
"None",
":",
"ignored_names",
"=",
"ignore",
"(",
"src",
",",
"names",
")",
"else",
":",
"ignored_names",
"=",
"set",
"(",
")",
"os",
".",
"makedirs",
"(",
"dst",
")",
"errors",
"=",
"[",
"]",
"for",
"name",
"in",
"names",
":",
"if",
"name",
"in",
"ignored_names",
":",
"continue",
"srcname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"src",
",",
"name",
")",
"dstname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dst",
",",
"name",
")",
"try",
":",
"if",
"symlinks",
"and",
"os",
".",
"path",
".",
"islink",
"(",
"srcname",
")",
":",
"linkto",
"=",
"os",
".",
"readlink",
"(",
"srcname",
")",
"os",
".",
"symlink",
"(",
"linkto",
",",
"dstname",
")",
"elif",
"os",
".",
"path",
".",
"isdir",
"(",
"srcname",
")",
":",
"copytree",
"(",
"srcname",
",",
"dstname",
",",
"symlinks",
",",
"ignore",
")",
"else",
":",
"copy2",
"(",
"srcname",
",",
"dstname",
")",
"# XXX What about devices, sockets etc.?",
"except",
"(",
"IOError",
",",
"os",
".",
"error",
")",
",",
"why",
":",
"errors",
".",
"append",
"(",
"(",
"srcname",
",",
"dstname",
",",
"str",
"(",
"why",
")",
")",
")",
"# catch the Error from the recursive copytree so that we can",
"# continue with other files",
"except",
"Error",
",",
"err",
":",
"errors",
".",
"extend",
"(",
"err",
".",
"args",
"[",
"0",
"]",
")",
"try",
":",
"copystat",
"(",
"src",
",",
"dst",
")",
"except",
"OSError",
",",
"why",
":",
"if",
"WindowsError",
"is",
"not",
"None",
"and",
"isinstance",
"(",
"why",
",",
"WindowsError",
")",
":",
"# Copying file access times may fail on Windows",
"pass",
"else",
":",
"errors",
".",
"extend",
"(",
"(",
"src",
",",
"dst",
",",
"str",
"(",
"why",
")",
")",
")",
"if",
"errors",
":",
"raise",
"Error",
",",
"errors"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/shutil.py#L114-L177 |
||
metashell/metashell | f4177e4854ea00c8dbc722cadab26ef413d798ea | 3rd/templight/clang/bindings/python/clang/cindex.py | python | Type.kind | (self) | return TypeKind.from_id(self._kind_id) | Return the kind of this type. | Return the kind of this type. | [
"Return",
"the",
"kind",
"of",
"this",
"type",
"."
] | def kind(self):
"""Return the kind of this type."""
return TypeKind.from_id(self._kind_id) | [
"def",
"kind",
"(",
"self",
")",
":",
"return",
"TypeKind",
".",
"from_id",
"(",
"self",
".",
"_kind_id",
")"
] | https://github.com/metashell/metashell/blob/f4177e4854ea00c8dbc722cadab26ef413d798ea/3rd/templight/clang/bindings/python/clang/cindex.py#L2187-L2189 |
|
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/ops/array_ops.py | python | size | (input, name=None, out_type=dtypes.int32) | return size_internal(input, name, optimize=True, out_type=out_type) | Returns the size of a tensor.
This operation returns an integer representing the number of elements in
`input`.
For example:
```python
t = tf.constant([[[1, 1, 1], [2, 2, 2]], [[3, 3, 3], [4, 4, 4]]])
tf.size(t) # 12
```
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
out_type: (Optional) The specified output type of the operation
(`int32` or `int64`). Defaults to tf.int32.
Returns:
A `Tensor` of type `out_type`. Defaults to tf.int32. | Returns the size of a tensor. | [
"Returns",
"the",
"size",
"of",
"a",
"tensor",
"."
] | def size(input, name=None, out_type=dtypes.int32):
# pylint: disable=redefined-builtin
"""Returns the size of a tensor.
This operation returns an integer representing the number of elements in
`input`.
For example:
```python
t = tf.constant([[[1, 1, 1], [2, 2, 2]], [[3, 3, 3], [4, 4, 4]]])
tf.size(t) # 12
```
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
out_type: (Optional) The specified output type of the operation
(`int32` or `int64`). Defaults to tf.int32.
Returns:
A `Tensor` of type `out_type`. Defaults to tf.int32.
"""
return size_internal(input, name, optimize=True, out_type=out_type) | [
"def",
"size",
"(",
"input",
",",
"name",
"=",
"None",
",",
"out_type",
"=",
"dtypes",
".",
"int32",
")",
":",
"# pylint: disable=redefined-builtin",
"return",
"size_internal",
"(",
"input",
",",
"name",
",",
"optimize",
"=",
"True",
",",
"out_type",
"=",
"out_type",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/ops/array_ops.py#L308-L331 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tkinter.py | python | Checkbutton.deselect | (self) | Put the button in off-state. | Put the button in off-state. | [
"Put",
"the",
"button",
"in",
"off",
"-",
"state",
"."
] | def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect') | [
"def",
"deselect",
"(",
"self",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'deselect'",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tkinter.py#L2418-L2420 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_controls.py | python | TreeEvent.GetPoint | (*args, **kwargs) | return _controls_.TreeEvent_GetPoint(*args, **kwargs) | GetPoint(self) -> Point | GetPoint(self) -> Point | [
"GetPoint",
"(",
"self",
")",
"-",
">",
"Point"
] | def GetPoint(*args, **kwargs):
"""GetPoint(self) -> Point"""
return _controls_.TreeEvent_GetPoint(*args, **kwargs) | [
"def",
"GetPoint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"TreeEvent_GetPoint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_controls.py#L5131-L5133 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/distlib/database.py | python | InstalledDistribution.exports | (self) | return result | Return the information exported by this distribution.
:return: A dictionary of exports, mapping an export category to a dict
of :class:`ExportEntry` instances describing the individual
export entries, and keyed by name. | Return the information exported by this distribution.
:return: A dictionary of exports, mapping an export category to a dict
of :class:`ExportEntry` instances describing the individual
export entries, and keyed by name. | [
"Return",
"the",
"information",
"exported",
"by",
"this",
"distribution",
".",
":",
"return",
":",
"A",
"dictionary",
"of",
"exports",
"mapping",
"an",
"export",
"category",
"to",
"a",
"dict",
"of",
":",
"class",
":",
"ExportEntry",
"instances",
"describing",
"the",
"individual",
"export",
"entries",
"and",
"keyed",
"by",
"name",
"."
] | def exports(self):
"""
Return the information exported by this distribution.
:return: A dictionary of exports, mapping an export category to a dict
of :class:`ExportEntry` instances describing the individual
export entries, and keyed by name.
"""
result = {}
r = self.get_distinfo_resource(EXPORTS_FILENAME)
if r:
result = self.read_exports()
return result | [
"def",
"exports",
"(",
"self",
")",
":",
"result",
"=",
"{",
"}",
"r",
"=",
"self",
".",
"get_distinfo_resource",
"(",
"EXPORTS_FILENAME",
")",
"if",
"r",
":",
"result",
"=",
"self",
".",
"read_exports",
"(",
")",
"return",
"result"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/distlib/database.py#L604-L615 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/calendar.py | python | Calendar.yeardayscalendar | (self, year, width=3) | return [months[i:i+width] for i in range(0, len(months), width) ] | Return the data for the specified year ready for formatting (similar to
yeardatescalendar()). Entries in the week lists are day numbers.
Day numbers outside this month are zero. | Return the data for the specified year ready for formatting (similar to
yeardatescalendar()). Entries in the week lists are day numbers.
Day numbers outside this month are zero. | [
"Return",
"the",
"data",
"for",
"the",
"specified",
"year",
"ready",
"for",
"formatting",
"(",
"similar",
"to",
"yeardatescalendar",
"()",
")",
".",
"Entries",
"in",
"the",
"week",
"lists",
"are",
"day",
"numbers",
".",
"Day",
"numbers",
"outside",
"this",
"month",
"are",
"zero",
"."
] | def yeardayscalendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting (similar to
yeardatescalendar()). Entries in the week lists are day numbers.
Day numbers outside this month are zero.
"""
months = [
self.monthdayscalendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ] | [
"def",
"yeardayscalendar",
"(",
"self",
",",
"year",
",",
"width",
"=",
"3",
")",
":",
"months",
"=",
"[",
"self",
".",
"monthdayscalendar",
"(",
"year",
",",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"January",
",",
"January",
"+",
"12",
")",
"]",
"return",
"[",
"months",
"[",
"i",
":",
"i",
"+",
"width",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"months",
")",
",",
"width",
")",
"]"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/calendar.py#L246-L256 |
|
twhui/LiteFlowNet | 00925aebf2db9ac50f4b1666f718688b10dd10d1 | scripts/cpp_lint.py | python | ReplaceAll | (pattern, rep, s) | return _regexp_compile_cache[pattern].sub(rep, s) | Replaces instances of pattern in a string with a replacement.
The compiled regex is kept in a cache shared by Match and Search.
Args:
pattern: regex pattern
rep: replacement text
s: search string
Returns:
string with replacements made (or original string if no replacements) | Replaces instances of pattern in a string with a replacement. | [
"Replaces",
"instances",
"of",
"pattern",
"in",
"a",
"string",
"with",
"a",
"replacement",
"."
] | def ReplaceAll(pattern, rep, s):
"""Replaces instances of pattern in a string with a replacement.
The compiled regex is kept in a cache shared by Match and Search.
Args:
pattern: regex pattern
rep: replacement text
s: search string
Returns:
string with replacements made (or original string if no replacements)
"""
if pattern not in _regexp_compile_cache:
_regexp_compile_cache[pattern] = sre_compile.compile(pattern)
return _regexp_compile_cache[pattern].sub(rep, s) | [
"def",
"ReplaceAll",
"(",
"pattern",
",",
"rep",
",",
"s",
")",
":",
"if",
"pattern",
"not",
"in",
"_regexp_compile_cache",
":",
"_regexp_compile_cache",
"[",
"pattern",
"]",
"=",
"sre_compile",
".",
"compile",
"(",
"pattern",
")",
"return",
"_regexp_compile_cache",
"[",
"pattern",
"]",
".",
"sub",
"(",
"rep",
",",
"s",
")"
] | https://github.com/twhui/LiteFlowNet/blob/00925aebf2db9ac50f4b1666f718688b10dd10d1/scripts/cpp_lint.py#L525-L540 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/bayesflow/python/ops/csiszar_divergence_impl.py | python | arithmetic_geometric | (logu, self_normalized=False, name=None) | The Arithmetic-Geometric Csiszar-function in log-space.
A Csiszar-function is a member of,
```none
F = { f:R_+ to R : f convex }.
```
When `self_normalized = True` the Arithmetic-Geometric Csiszar-function is:
```none
f(u) = (1 + u) log( (1 + u) / sqrt(u) ) - (1 + u) log(2)
```
When `self_normalized = False` the `(1 + u) log(2)` term is omitted.
Observe that as an f-Divergence, this Csiszar-function implies:
```none
D_f[p, q] = KL[m, p] + KL[m, q]
m(x) = 0.5 p(x) + 0.5 q(x)
```
In a sense, this divergence is the "reverse" of the Jensen-Shannon
f-Divergence.
This Csiszar-function induces a symmetric f-Divergence, i.e.,
`D_f[p, q] = D_f[q, p]`.
Warning: this function makes non-log-space calculations and may therefore be
numerically unstable for `|logu| >> 0`.
Args:
logu: Floating-type `Tensor` representing `log(u)` from above.
self_normalized: Python `bool` indicating whether `f'(u=1)=0`. When
`f'(u=1)=0` the implied Csiszar f-Divergence remains non-negative even
when `p, q` are unnormalized measures.
name: Python `str` name prefixed to Ops created by this function.
Returns:
arithmetic_geometric_of_u: Floating-type `Tensor` of the
Csiszar-function evaluated at `u = exp(logu)`. | The Arithmetic-Geometric Csiszar-function in log-space. | [
"The",
"Arithmetic",
"-",
"Geometric",
"Csiszar",
"-",
"function",
"in",
"log",
"-",
"space",
"."
] | def arithmetic_geometric(logu, self_normalized=False, name=None):
"""The Arithmetic-Geometric Csiszar-function in log-space.
A Csiszar-function is a member of,
```none
F = { f:R_+ to R : f convex }.
```
When `self_normalized = True` the Arithmetic-Geometric Csiszar-function is:
```none
f(u) = (1 + u) log( (1 + u) / sqrt(u) ) - (1 + u) log(2)
```
When `self_normalized = False` the `(1 + u) log(2)` term is omitted.
Observe that as an f-Divergence, this Csiszar-function implies:
```none
D_f[p, q] = KL[m, p] + KL[m, q]
m(x) = 0.5 p(x) + 0.5 q(x)
```
In a sense, this divergence is the "reverse" of the Jensen-Shannon
f-Divergence.
This Csiszar-function induces a symmetric f-Divergence, i.e.,
`D_f[p, q] = D_f[q, p]`.
Warning: this function makes non-log-space calculations and may therefore be
numerically unstable for `|logu| >> 0`.
Args:
logu: Floating-type `Tensor` representing `log(u)` from above.
self_normalized: Python `bool` indicating whether `f'(u=1)=0`. When
`f'(u=1)=0` the implied Csiszar f-Divergence remains non-negative even
when `p, q` are unnormalized measures.
name: Python `str` name prefixed to Ops created by this function.
Returns:
arithmetic_geometric_of_u: Floating-type `Tensor` of the
Csiszar-function evaluated at `u = exp(logu)`.
"""
with ops.name_scope(name, "arithmetic_geometric", [logu]):
logu = ops.convert_to_tensor(logu, name="logu")
y = nn_ops.softplus(logu) - 0.5 * logu
if self_normalized:
y -= np.log(2.).astype(logu.dtype.as_numpy_dtype)
return (1. + math_ops.exp(logu)) * y | [
"def",
"arithmetic_geometric",
"(",
"logu",
",",
"self_normalized",
"=",
"False",
",",
"name",
"=",
"None",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"\"arithmetic_geometric\"",
",",
"[",
"logu",
"]",
")",
":",
"logu",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"logu",
",",
"name",
"=",
"\"logu\"",
")",
"y",
"=",
"nn_ops",
".",
"softplus",
"(",
"logu",
")",
"-",
"0.5",
"*",
"logu",
"if",
"self_normalized",
":",
"y",
"-=",
"np",
".",
"log",
"(",
"2.",
")",
".",
"astype",
"(",
"logu",
".",
"dtype",
".",
"as_numpy_dtype",
")",
"return",
"(",
"1.",
"+",
"math_ops",
".",
"exp",
"(",
"logu",
")",
")",
"*",
"y"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/bayesflow/python/ops/csiszar_divergence_impl.py#L274-L324 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_core.py | python | PyImageHandler._SetSelf | (*args, **kwargs) | return _core_.PyImageHandler__SetSelf(*args, **kwargs) | _SetSelf(self, PyObject self) | _SetSelf(self, PyObject self) | [
"_SetSelf",
"(",
"self",
"PyObject",
"self",
")"
] | def _SetSelf(*args, **kwargs):
"""_SetSelf(self, PyObject self)"""
return _core_.PyImageHandler__SetSelf(*args, **kwargs) | [
"def",
"_SetSelf",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"PyImageHandler__SetSelf",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L2734-L2736 |
|
turi-code/SFrame | 796b9bdfb2fa1b881d82080754643c7e68629cd2 | oss_src/unity/python/sframe/meta/asttools/visitors/print_visitor.py | python | print_ast | (ast, indent=' ', initlevel=0, newline='\n', file=sys.stdout) | Pretty print an ast node.
:param ast: the ast to print.
:param indent: how far to indent a newline.
:param initlevel: starting indent level
:param newline: The newline character.
:param file: file object to print to
To print a short ast you may want to use::
node = ast.parse(source)
print_ast(node, indent='', newline='') | Pretty print an ast node.
:param ast: the ast to print.
:param indent: how far to indent a newline.
:param initlevel: starting indent level
:param newline: The newline character.
:param file: file object to print to
To print a short ast you may want to use::
node = ast.parse(source)
print_ast(node, indent='', newline='') | [
"Pretty",
"print",
"an",
"ast",
"node",
".",
":",
"param",
"ast",
":",
"the",
"ast",
"to",
"print",
".",
":",
"param",
"indent",
":",
"how",
"far",
"to",
"indent",
"a",
"newline",
".",
":",
"param",
"initlevel",
":",
"starting",
"indent",
"level",
":",
"param",
"newline",
":",
"The",
"newline",
"character",
".",
":",
"param",
"file",
":",
"file",
"object",
"to",
"print",
"to",
"To",
"print",
"a",
"short",
"ast",
"you",
"may",
"want",
"to",
"use",
"::",
"node",
"=",
"ast",
".",
"parse",
"(",
"source",
")",
"print_ast",
"(",
"node",
"indent",
"=",
"newline",
"=",
")"
] | def print_ast(ast, indent=' ', initlevel=0, newline='\n', file=sys.stdout):
'''
Pretty print an ast node.
:param ast: the ast to print.
:param indent: how far to indent a newline.
:param initlevel: starting indent level
:param newline: The newline character.
:param file: file object to print to
To print a short ast you may want to use::
node = ast.parse(source)
print_ast(node, indent='', newline='')
'''
visitor = ASTPrinter(indent=indent, level=initlevel, newline=newline)
visitor.visit(ast)
visitor.dump(file=file) | [
"def",
"print_ast",
"(",
"ast",
",",
"indent",
"=",
"' '",
",",
"initlevel",
"=",
"0",
",",
"newline",
"=",
"'\\n'",
",",
"file",
"=",
"sys",
".",
"stdout",
")",
":",
"visitor",
"=",
"ASTPrinter",
"(",
"indent",
"=",
"indent",
",",
"level",
"=",
"initlevel",
",",
"newline",
"=",
"newline",
")",
"visitor",
".",
"visit",
"(",
"ast",
")",
"visitor",
".",
"dump",
"(",
"file",
"=",
"file",
")"
] | https://github.com/turi-code/SFrame/blob/796b9bdfb2fa1b881d82080754643c7e68629cd2/oss_src/unity/python/sframe/meta/asttools/visitors/print_visitor.py#L159-L178 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/s3transfer/manager.py | python | TransferManager.shutdown | (self, cancel=False, cancel_msg='') | Shutdown the TransferManager
It will wait till all transfers complete before it completely shuts
down.
:type cancel: boolean
:param cancel: If True, calls TransferFuture.cancel() for
all in-progress in transfers. This is useful if you want the
shutdown to happen quicker.
:type cancel_msg: str
:param cancel_msg: The message to specify if canceling all in-progress
transfers. | Shutdown the TransferManager | [
"Shutdown",
"the",
"TransferManager"
] | def shutdown(self, cancel=False, cancel_msg=''):
"""Shutdown the TransferManager
It will wait till all transfers complete before it completely shuts
down.
:type cancel: boolean
:param cancel: If True, calls TransferFuture.cancel() for
all in-progress in transfers. This is useful if you want the
shutdown to happen quicker.
:type cancel_msg: str
:param cancel_msg: The message to specify if canceling all in-progress
transfers.
"""
self._shutdown(cancel, cancel, cancel_msg) | [
"def",
"shutdown",
"(",
"self",
",",
"cancel",
"=",
"False",
",",
"cancel_msg",
"=",
"''",
")",
":",
"self",
".",
"_shutdown",
"(",
"cancel",
",",
"cancel",
",",
"cancel_msg",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/s3transfer/manager.py#L541-L556 |
||
OAID/Caffe-HRT | aae71e498ab842c6f92bcc23fc668423615a4d65 | scripts/cpp_lint.py | python | _FunctionState.Count | (self) | Count line in current function body. | Count line in current function body. | [
"Count",
"line",
"in",
"current",
"function",
"body",
"."
] | def Count(self):
"""Count line in current function body."""
if self.in_a_function:
self.lines_in_function += 1 | [
"def",
"Count",
"(",
"self",
")",
":",
"if",
"self",
".",
"in_a_function",
":",
"self",
".",
"lines_in_function",
"+=",
"1"
] | https://github.com/OAID/Caffe-HRT/blob/aae71e498ab842c6f92bcc23fc668423615a4d65/scripts/cpp_lint.py#L831-L834 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/richtext.py | python | RichTextCtrl.ApplyItalicToSelection | (*args, **kwargs) | return _richtext.RichTextCtrl_ApplyItalicToSelection(*args, **kwargs) | ApplyItalicToSelection(self) -> bool
Apply italic to the selection | ApplyItalicToSelection(self) -> bool | [
"ApplyItalicToSelection",
"(",
"self",
")",
"-",
">",
"bool"
] | def ApplyItalicToSelection(*args, **kwargs):
"""
ApplyItalicToSelection(self) -> bool
Apply italic to the selection
"""
return _richtext.RichTextCtrl_ApplyItalicToSelection(*args, **kwargs) | [
"def",
"ApplyItalicToSelection",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextCtrl_ApplyItalicToSelection",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/richtext.py#L3963-L3969 |
|
facebookincubator/BOLT | 88c70afe9d388ad430cc150cc158641701397f70 | mlir/utils/spirv/gen_spirv_dialect.py | python | get_spirv_doc_from_html_spec | (url, settings) | return doc | Extracts instruction documentation from SPIR-V HTML spec.
Returns:
- A dict mapping from instruction opcode to documentation. | Extracts instruction documentation from SPIR-V HTML spec. | [
"Extracts",
"instruction",
"documentation",
"from",
"SPIR",
"-",
"V",
"HTML",
"spec",
"."
] | def get_spirv_doc_from_html_spec(url, settings):
"""Extracts instruction documentation from SPIR-V HTML spec.
Returns:
- A dict mapping from instruction opcode to documentation.
"""
if url is None:
url = SPIRV_HTML_SPEC_URL
response = requests.get(url)
spec = response.content
from bs4 import BeautifulSoup
spirv = BeautifulSoup(spec, 'html.parser')
doc = {}
if settings.gen_ocl_ops:
section_anchor = spirv.find('h2', {'id': '_a_id_binary_a_binary_form'})
for section in section_anchor.parent.find_all('div', {'class': 'sect2'}):
for table in section.find_all('table'):
inst_html = table.tbody.tr.td
opname = inst_html.a['id']
# Ignore the first line, which is just the opname.
doc[opname] = inst_html.text.split('\n', 1)[1].strip()
else:
section_anchor = spirv.find('h3', {'id': '_a_id_instructions_a_instructions'})
for section in section_anchor.parent.find_all('div', {'class': 'sect3'}):
for table in section.find_all('table'):
inst_html = table.tbody.tr.td.p
opname = inst_html.a['id']
# Ignore the first line, which is just the opname.
doc[opname] = inst_html.text.split('\n', 1)[1].strip()
return doc | [
"def",
"get_spirv_doc_from_html_spec",
"(",
"url",
",",
"settings",
")",
":",
"if",
"url",
"is",
"None",
":",
"url",
"=",
"SPIRV_HTML_SPEC_URL",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"spec",
"=",
"response",
".",
"content",
"from",
"bs4",
"import",
"BeautifulSoup",
"spirv",
"=",
"BeautifulSoup",
"(",
"spec",
",",
"'html.parser'",
")",
"doc",
"=",
"{",
"}",
"if",
"settings",
".",
"gen_ocl_ops",
":",
"section_anchor",
"=",
"spirv",
".",
"find",
"(",
"'h2'",
",",
"{",
"'id'",
":",
"'_a_id_binary_a_binary_form'",
"}",
")",
"for",
"section",
"in",
"section_anchor",
".",
"parent",
".",
"find_all",
"(",
"'div'",
",",
"{",
"'class'",
":",
"'sect2'",
"}",
")",
":",
"for",
"table",
"in",
"section",
".",
"find_all",
"(",
"'table'",
")",
":",
"inst_html",
"=",
"table",
".",
"tbody",
".",
"tr",
".",
"td",
"opname",
"=",
"inst_html",
".",
"a",
"[",
"'id'",
"]",
"# Ignore the first line, which is just the opname.",
"doc",
"[",
"opname",
"]",
"=",
"inst_html",
".",
"text",
".",
"split",
"(",
"'\\n'",
",",
"1",
")",
"[",
"1",
"]",
".",
"strip",
"(",
")",
"else",
":",
"section_anchor",
"=",
"spirv",
".",
"find",
"(",
"'h3'",
",",
"{",
"'id'",
":",
"'_a_id_instructions_a_instructions'",
"}",
")",
"for",
"section",
"in",
"section_anchor",
".",
"parent",
".",
"find_all",
"(",
"'div'",
",",
"{",
"'class'",
":",
"'sect3'",
"}",
")",
":",
"for",
"table",
"in",
"section",
".",
"find_all",
"(",
"'table'",
")",
":",
"inst_html",
"=",
"table",
".",
"tbody",
".",
"tr",
".",
"td",
".",
"p",
"opname",
"=",
"inst_html",
".",
"a",
"[",
"'id'",
"]",
"# Ignore the first line, which is just the opname.",
"doc",
"[",
"opname",
"]",
"=",
"inst_html",
".",
"text",
".",
"split",
"(",
"'\\n'",
",",
"1",
")",
"[",
"1",
"]",
".",
"strip",
"(",
")",
"return",
"doc"
] | https://github.com/facebookincubator/BOLT/blob/88c70afe9d388ad430cc150cc158641701397f70/mlir/utils/spirv/gen_spirv_dialect.py#L36-L70 |
|
hpi-xnor/BMXNet-v2 | af2b1859eafc5c721b1397cef02f946aaf2ce20d | python/mxnet/contrib/quantization.py | python | _load_sym | (sym, logger=logging) | Given a str as a path the symbol .json file or a symbol, returns a Symbol object. | Given a str as a path the symbol .json file or a symbol, returns a Symbol object. | [
"Given",
"a",
"str",
"as",
"a",
"path",
"the",
"symbol",
".",
"json",
"file",
"or",
"a",
"symbol",
"returns",
"a",
"Symbol",
"object",
"."
] | def _load_sym(sym, logger=logging):
"""Given a str as a path the symbol .json file or a symbol, returns a Symbol object."""
if isinstance(sym, str): # sym is a symbol file path
cur_path = os.path.dirname(os.path.realpath(__file__))
symbol_file_path = os.path.join(cur_path, sym)
logger.info('Loading symbol from file %s' % symbol_file_path)
return sym_load(symbol_file_path)
elif isinstance(sym, Symbol):
return sym
else:
raise ValueError('_load_sym only accepts Symbol or path to the symbol file,'
' while received type %s' % str(type(sym))) | [
"def",
"_load_sym",
"(",
"sym",
",",
"logger",
"=",
"logging",
")",
":",
"if",
"isinstance",
"(",
"sym",
",",
"str",
")",
":",
"# sym is a symbol file path",
"cur_path",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"__file__",
")",
")",
"symbol_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"cur_path",
",",
"sym",
")",
"logger",
".",
"info",
"(",
"'Loading symbol from file %s'",
"%",
"symbol_file_path",
")",
"return",
"sym_load",
"(",
"symbol_file_path",
")",
"elif",
"isinstance",
"(",
"sym",
",",
"Symbol",
")",
":",
"return",
"sym",
"else",
":",
"raise",
"ValueError",
"(",
"'_load_sym only accepts Symbol or path to the symbol file,'",
"' while received type %s'",
"%",
"str",
"(",
"type",
"(",
"sym",
")",
")",
")"
] | https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/contrib/quantization.py#L385-L396 |
||
openvinotoolkit/openvino | dedcbeafa8b84cccdc55ca64b8da516682b381c7 | .github/github_org_control/github_api.py | python | GithubOrgApi.get_valid_github_users | (self, emails) | return valid_users | Gets valid GitHub users by email and prints status | Gets valid GitHub users by email and prints status | [
"Gets",
"valid",
"GitHub",
"users",
"by",
"email",
"and",
"prints",
"status"
] | def get_valid_github_users(self, emails):
"""Gets valid GitHub users by email and prints status"""
valid_users = set()
wrong_emails = set()
no_account_emails = set()
no_account_names = set()
print(f"\nGitHub users from {len(emails)} invite emails (email - status):")
for email in emails:
if not is_intel_email(email):
print(f"{email} - Non Intel email")
wrong_emails.add(email)
continue
# You can make up to 30 requests per minute; https://developer.github.com/v3/search/
time.sleep(2)
user = self.get_github_user_by_email(email)
if not user:
print(f"{email} - No valid GitHub account")
no_account_emails.add(email)
continue
if user.email and user.email.lower() == email:
if is_valid_name(user.name):
print(f"{email} - OK")
valid_users.add(user)
else:
print(f"{email} - No valid name in GitHub account: ", end="")
print_users(user)
no_account_names.add(email)
else:
print(f"{email} - Non public or wrong email in GitHub account: ", end="")
print_users(user)
no_account_emails.add(email)
print("\nValid users:")
print_users(valid_users)
print(f"\nWrong emails {len(wrong_emails)}:", "; ".join(wrong_emails))
print(
f"\nIntel emails - No valid GitHub account {len(no_account_emails)}:",
"; ".join(no_account_emails),
)
print(
f"\nIntel emails - No valid name in GitHub account {len(no_account_names)}:",
"; ".join(no_account_names),
)
return valid_users | [
"def",
"get_valid_github_users",
"(",
"self",
",",
"emails",
")",
":",
"valid_users",
"=",
"set",
"(",
")",
"wrong_emails",
"=",
"set",
"(",
")",
"no_account_emails",
"=",
"set",
"(",
")",
"no_account_names",
"=",
"set",
"(",
")",
"print",
"(",
"f\"\\nGitHub users from {len(emails)} invite emails (email - status):\"",
")",
"for",
"email",
"in",
"emails",
":",
"if",
"not",
"is_intel_email",
"(",
"email",
")",
":",
"print",
"(",
"f\"{email} - Non Intel email\"",
")",
"wrong_emails",
".",
"add",
"(",
"email",
")",
"continue",
"# You can make up to 30 requests per minute; https://developer.github.com/v3/search/",
"time",
".",
"sleep",
"(",
"2",
")",
"user",
"=",
"self",
".",
"get_github_user_by_email",
"(",
"email",
")",
"if",
"not",
"user",
":",
"print",
"(",
"f\"{email} - No valid GitHub account\"",
")",
"no_account_emails",
".",
"add",
"(",
"email",
")",
"continue",
"if",
"user",
".",
"email",
"and",
"user",
".",
"email",
".",
"lower",
"(",
")",
"==",
"email",
":",
"if",
"is_valid_name",
"(",
"user",
".",
"name",
")",
":",
"print",
"(",
"f\"{email} - OK\"",
")",
"valid_users",
".",
"add",
"(",
"user",
")",
"else",
":",
"print",
"(",
"f\"{email} - No valid name in GitHub account: \"",
",",
"end",
"=",
"\"\"",
")",
"print_users",
"(",
"user",
")",
"no_account_names",
".",
"add",
"(",
"email",
")",
"else",
":",
"print",
"(",
"f\"{email} - Non public or wrong email in GitHub account: \"",
",",
"end",
"=",
"\"\"",
")",
"print_users",
"(",
"user",
")",
"no_account_emails",
".",
"add",
"(",
"email",
")",
"print",
"(",
"\"\\nValid users:\"",
")",
"print_users",
"(",
"valid_users",
")",
"print",
"(",
"f\"\\nWrong emails {len(wrong_emails)}:\"",
",",
"\"; \"",
".",
"join",
"(",
"wrong_emails",
")",
")",
"print",
"(",
"f\"\\nIntel emails - No valid GitHub account {len(no_account_emails)}:\"",
",",
"\"; \"",
".",
"join",
"(",
"no_account_emails",
")",
",",
")",
"print",
"(",
"f\"\\nIntel emails - No valid name in GitHub account {len(no_account_names)}:\"",
",",
"\"; \"",
".",
"join",
"(",
"no_account_names",
")",
",",
")",
"return",
"valid_users"
] | https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/.github/github_org_control/github_api.py#L267-L316 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Source/ThirdParty/CEF3/cef_source/tools/gn_args.py | python | GetMergedArgs | (build_args) | return MergeDicts(dict, required) | Return merged GN args. | Return merged GN args. | [
"Return",
"merged",
"GN",
"args",
"."
] | def GetMergedArgs(build_args):
"""
Return merged GN args.
"""
dict = MergeDicts(GetRecommendedDefaultArgs(), GetGNEnvArgs(), build_args)
# Verify that the user is not trying to override required args.
required = GetRequiredArgs()
for key in required.keys():
if key in dict:
assert dict[key] == required[key], \
"%s=%s is required" % (key, GetValueString(required[key]))
return MergeDicts(dict, required) | [
"def",
"GetMergedArgs",
"(",
"build_args",
")",
":",
"dict",
"=",
"MergeDicts",
"(",
"GetRecommendedDefaultArgs",
"(",
")",
",",
"GetGNEnvArgs",
"(",
")",
",",
"build_args",
")",
"# Verify that the user is not trying to override required args.",
"required",
"=",
"GetRequiredArgs",
"(",
")",
"for",
"key",
"in",
"required",
".",
"keys",
"(",
")",
":",
"if",
"key",
"in",
"dict",
":",
"assert",
"dict",
"[",
"key",
"]",
"==",
"required",
"[",
"key",
"]",
",",
"\"%s=%s is required\"",
"%",
"(",
"key",
",",
"GetValueString",
"(",
"required",
"[",
"key",
"]",
")",
")",
"return",
"MergeDicts",
"(",
"dict",
",",
"required",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Source/ThirdParty/CEF3/cef_source/tools/gn_args.py#L246-L259 |
|
Evolving-AI-Lab/fooling | 66f097dd6bd2eb6794ade3e187a7adfdf1887688 | caffe/scripts/cpp_lint.py | python | CheckAccess | (filename, clean_lines, linenum, nesting_state, error) | Checks for improper use of DISALLOW* macros.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A _NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found. | Checks for improper use of DISALLOW* macros. | [
"Checks",
"for",
"improper",
"use",
"of",
"DISALLOW",
"*",
"macros",
"."
] | def CheckAccess(filename, clean_lines, linenum, nesting_state, error):
"""Checks for improper use of DISALLOW* macros.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A _NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum] # get rid of comments and strings
matched = Match((r'\s*(DISALLOW_COPY_AND_ASSIGN|'
r'DISALLOW_EVIL_CONSTRUCTORS|'
r'DISALLOW_IMPLICIT_CONSTRUCTORS)'), line)
if not matched:
return
if nesting_state.stack and isinstance(nesting_state.stack[-1], _ClassInfo):
if nesting_state.stack[-1].access != 'private':
error(filename, linenum, 'readability/constructors', 3,
'%s must be in the private: section' % matched.group(1))
else:
# Found DISALLOW* macro outside a class declaration, or perhaps it
# was used inside a function when it should have been part of the
# class declaration. We could issue a warning here, but it
# probably resulted in a compiler error already.
pass | [
"def",
"CheckAccess",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"nesting_state",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"# get rid of comments and strings",
"matched",
"=",
"Match",
"(",
"(",
"r'\\s*(DISALLOW_COPY_AND_ASSIGN|'",
"r'DISALLOW_EVIL_CONSTRUCTORS|'",
"r'DISALLOW_IMPLICIT_CONSTRUCTORS)'",
")",
",",
"line",
")",
"if",
"not",
"matched",
":",
"return",
"if",
"nesting_state",
".",
"stack",
"and",
"isinstance",
"(",
"nesting_state",
".",
"stack",
"[",
"-",
"1",
"]",
",",
"_ClassInfo",
")",
":",
"if",
"nesting_state",
".",
"stack",
"[",
"-",
"1",
"]",
".",
"access",
"!=",
"'private'",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/constructors'",
",",
"3",
",",
"'%s must be in the private: section'",
"%",
"matched",
".",
"group",
"(",
"1",
")",
")",
"else",
":",
"# Found DISALLOW* macro outside a class declaration, or perhaps it",
"# was used inside a function when it should have been part of the",
"# class declaration. We could issue a warning here, but it",
"# probably resulted in a compiler error already.",
"pass"
] | https://github.com/Evolving-AI-Lab/fooling/blob/66f097dd6bd2eb6794ade3e187a7adfdf1887688/caffe/scripts/cpp_lint.py#L2416-L2444 |
||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | Framework/PythonInterface/plugins/algorithms/DNSMergeRuns.py | python | DNSMergeRuns.category | (self) | return 'Workflow\\MLZ\\DNS' | Returns category | Returns category | [
"Returns",
"category"
] | def category(self):
"""
Returns category
"""
return 'Workflow\\MLZ\\DNS' | [
"def",
"category",
"(",
"self",
")",
":",
"return",
"'Workflow\\\\MLZ\\\\DNS'"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/DNSMergeRuns.py#L32-L36 |
|
stan-dev/math | 5fd79f89933269a4ca4d8dd1fde2a36d53d4768c | lib/boost_1.75.0/tools/build/src/build/targets.py | python | BasicTarget.generate_dependency_properties | (self, properties, ps) | return (result_properties, usage_requirements) | Takes a target reference, which might be either target id
or a dependency property, and generates that target using
'property_set' as build request.
Returns a tuple (result, usage_requirements). | Takes a target reference, which might be either target id
or a dependency property, and generates that target using
'property_set' as build request. | [
"Takes",
"a",
"target",
"reference",
"which",
"might",
"be",
"either",
"target",
"id",
"or",
"a",
"dependency",
"property",
"and",
"generates",
"that",
"target",
"using",
"property_set",
"as",
"build",
"request",
"."
] | def generate_dependency_properties(self, properties, ps):
""" Takes a target reference, which might be either target id
or a dependency property, and generates that target using
'property_set' as build request.
Returns a tuple (result, usage_requirements).
"""
assert is_iterable_typed(properties, property.Property)
assert isinstance(ps, property_set.PropertySet)
result_properties = []
usage_requirements = []
for p in properties:
result = generate_from_reference(p.value, self.project_, ps)
for t in result.targets():
result_properties.append(property.Property(p.feature, t))
usage_requirements += result.usage_requirements().all()
return (result_properties, usage_requirements) | [
"def",
"generate_dependency_properties",
"(",
"self",
",",
"properties",
",",
"ps",
")",
":",
"assert",
"is_iterable_typed",
"(",
"properties",
",",
"property",
".",
"Property",
")",
"assert",
"isinstance",
"(",
"ps",
",",
"property_set",
".",
"PropertySet",
")",
"result_properties",
"=",
"[",
"]",
"usage_requirements",
"=",
"[",
"]",
"for",
"p",
"in",
"properties",
":",
"result",
"=",
"generate_from_reference",
"(",
"p",
".",
"value",
",",
"self",
".",
"project_",
",",
"ps",
")",
"for",
"t",
"in",
"result",
".",
"targets",
"(",
")",
":",
"result_properties",
".",
"append",
"(",
"property",
".",
"Property",
"(",
"p",
".",
"feature",
",",
"t",
")",
")",
"usage_requirements",
"+=",
"result",
".",
"usage_requirements",
"(",
")",
".",
"all",
"(",
")",
"return",
"(",
"result_properties",
",",
"usage_requirements",
")"
] | https://github.com/stan-dev/math/blob/5fd79f89933269a4ca4d8dd1fde2a36d53d4768c/lib/boost_1.75.0/tools/build/src/build/targets.py#L1147-L1167 |
|
nileshkulkarni/csm | 0e6e0e7d4f725fd36f2414c0be4b9d83197aa1fc | csm/utils/transformations.py | python | _import_module | (name, package=None, warn=True, prefix='_py_', ignore='_') | Try import all public attributes from module into global namespace.
Existing attributes with name clashes are renamed with prefix.
Attributes starting with underscore are ignored by default.
Return True on successful import. | Try import all public attributes from module into global namespace. | [
"Try",
"import",
"all",
"public",
"attributes",
"from",
"module",
"into",
"global",
"namespace",
"."
] | def _import_module(name, package=None, warn=True, prefix='_py_', ignore='_'):
"""Try import all public attributes from module into global namespace.
Existing attributes with name clashes are renamed with prefix.
Attributes starting with underscore are ignored by default.
Return True on successful import.
"""
import warnings
from importlib import import_module
try:
if not package:
module = import_module(name)
else:
module = import_module('.' + name, package=package)
except ImportError:
if warn:
warnings.warn("failed to import module %s" % name)
else:
for attr in dir(module):
if ignore and attr.startswith(ignore):
continue
if prefix:
if attr in globals():
globals()[prefix + attr] = globals()[attr]
elif warn:
warnings.warn("no Python implementation of " + attr)
globals()[attr] = getattr(module, attr)
return True | [
"def",
"_import_module",
"(",
"name",
",",
"package",
"=",
"None",
",",
"warn",
"=",
"True",
",",
"prefix",
"=",
"'_py_'",
",",
"ignore",
"=",
"'_'",
")",
":",
"import",
"warnings",
"from",
"importlib",
"import",
"import_module",
"try",
":",
"if",
"not",
"package",
":",
"module",
"=",
"import_module",
"(",
"name",
")",
"else",
":",
"module",
"=",
"import_module",
"(",
"'.'",
"+",
"name",
",",
"package",
"=",
"package",
")",
"except",
"ImportError",
":",
"if",
"warn",
":",
"warnings",
".",
"warn",
"(",
"\"failed to import module %s\"",
"%",
"name",
")",
"else",
":",
"for",
"attr",
"in",
"dir",
"(",
"module",
")",
":",
"if",
"ignore",
"and",
"attr",
".",
"startswith",
"(",
"ignore",
")",
":",
"continue",
"if",
"prefix",
":",
"if",
"attr",
"in",
"globals",
"(",
")",
":",
"globals",
"(",
")",
"[",
"prefix",
"+",
"attr",
"]",
"=",
"globals",
"(",
")",
"[",
"attr",
"]",
"elif",
"warn",
":",
"warnings",
".",
"warn",
"(",
"\"no Python implementation of \"",
"+",
"attr",
")",
"globals",
"(",
")",
"[",
"attr",
"]",
"=",
"getattr",
"(",
"module",
",",
"attr",
")",
"return",
"True"
] | https://github.com/nileshkulkarni/csm/blob/0e6e0e7d4f725fd36f2414c0be4b9d83197aa1fc/csm/utils/transformations.py#L1893-L1922 |
||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/urllib2.py | python | AbstractHTTPHandler.do_open | (self, http_class, req) | return resp | Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code | Return an addinfourl object for the request, using http_class. | [
"Return",
"an",
"addinfourl",
"object",
"for",
"the",
"request",
"using",
"http_class",
"."
] | def do_open(self, http_class, req):
"""Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
host = req.get_host()
if not host:
raise URLError('no host given')
h = http_class(host, timeout=req.timeout) # will parse host:port
h.set_debuglevel(self._debuglevel)
headers = dict(req.unredirected_hdrs)
headers.update(dict((k, v) for k, v in req.headers.items()
if k not in headers))
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict(
(name.title(), val) for name, val in headers.items())
if req._tunnel_host:
tunnel_headers = {}
proxy_auth_hdr = "Proxy-Authorization"
if proxy_auth_hdr in headers:
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
# Proxy-Authorization should not be sent to origin
# server.
del headers[proxy_auth_hdr]
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
h.request(req.get_method(), req.get_selector(), req.data, headers)
except socket.error, err: # XXX what error?
h.close()
raise URLError(err)
else:
try:
r = h.getresponse(buffering=True)
except TypeError: # buffering kw not supported
r = h.getresponse()
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
# Wrap the HTTPResponse object in socket's file object adapter
# for Windows. That adapter calls recv(), so delegate recv()
# to read(). This weird wrapping allows the returned object to
# have readline() and readlines() methods.
# XXX It might be better to extract the read buffering code
# out of socket._fileobject() and into a base class.
r.recv = r.read
fp = socket._fileobject(r, close=True)
resp = addinfourl(fp, r.msg, req.get_full_url())
resp.code = r.status
resp.msg = r.reason
return resp | [
"def",
"do_open",
"(",
"self",
",",
"http_class",
",",
"req",
")",
":",
"host",
"=",
"req",
".",
"get_host",
"(",
")",
"if",
"not",
"host",
":",
"raise",
"URLError",
"(",
"'no host given'",
")",
"h",
"=",
"http_class",
"(",
"host",
",",
"timeout",
"=",
"req",
".",
"timeout",
")",
"# will parse host:port",
"h",
".",
"set_debuglevel",
"(",
"self",
".",
"_debuglevel",
")",
"headers",
"=",
"dict",
"(",
"req",
".",
"unredirected_hdrs",
")",
"headers",
".",
"update",
"(",
"dict",
"(",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"req",
".",
"headers",
".",
"items",
"(",
")",
"if",
"k",
"not",
"in",
"headers",
")",
")",
"# We want to make an HTTP/1.1 request, but the addinfourl",
"# class isn't prepared to deal with a persistent connection.",
"# It will try to read all remaining data from the socket,",
"# which will block while the server waits for the next request.",
"# So make sure the connection gets closed after the (only)",
"# request.",
"headers",
"[",
"\"Connection\"",
"]",
"=",
"\"close\"",
"headers",
"=",
"dict",
"(",
"(",
"name",
".",
"title",
"(",
")",
",",
"val",
")",
"for",
"name",
",",
"val",
"in",
"headers",
".",
"items",
"(",
")",
")",
"if",
"req",
".",
"_tunnel_host",
":",
"tunnel_headers",
"=",
"{",
"}",
"proxy_auth_hdr",
"=",
"\"Proxy-Authorization\"",
"if",
"proxy_auth_hdr",
"in",
"headers",
":",
"tunnel_headers",
"[",
"proxy_auth_hdr",
"]",
"=",
"headers",
"[",
"proxy_auth_hdr",
"]",
"# Proxy-Authorization should not be sent to origin",
"# server.",
"del",
"headers",
"[",
"proxy_auth_hdr",
"]",
"h",
".",
"set_tunnel",
"(",
"req",
".",
"_tunnel_host",
",",
"headers",
"=",
"tunnel_headers",
")",
"try",
":",
"h",
".",
"request",
"(",
"req",
".",
"get_method",
"(",
")",
",",
"req",
".",
"get_selector",
"(",
")",
",",
"req",
".",
"data",
",",
"headers",
")",
"except",
"socket",
".",
"error",
",",
"err",
":",
"# XXX what error?",
"h",
".",
"close",
"(",
")",
"raise",
"URLError",
"(",
"err",
")",
"else",
":",
"try",
":",
"r",
"=",
"h",
".",
"getresponse",
"(",
"buffering",
"=",
"True",
")",
"except",
"TypeError",
":",
"# buffering kw not supported",
"r",
"=",
"h",
".",
"getresponse",
"(",
")",
"# Pick apart the HTTPResponse object to get the addinfourl",
"# object initialized properly.",
"# Wrap the HTTPResponse object in socket's file object adapter",
"# for Windows. That adapter calls recv(), so delegate recv()",
"# to read(). This weird wrapping allows the returned object to",
"# have readline() and readlines() methods.",
"# XXX It might be better to extract the read buffering code",
"# out of socket._fileobject() and into a base class.",
"r",
".",
"recv",
"=",
"r",
".",
"read",
"fp",
"=",
"socket",
".",
"_fileobject",
"(",
"r",
",",
"close",
"=",
"True",
")",
"resp",
"=",
"addinfourl",
"(",
"fp",
",",
"r",
".",
"msg",
",",
"req",
".",
"get_full_url",
"(",
")",
")",
"resp",
".",
"code",
"=",
"r",
".",
"status",
"resp",
".",
"msg",
"=",
"r",
".",
"reason",
"return",
"resp"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/urllib2.py#L1139-L1208 |
|
intel-iot-devkit/how-to-code-samples | b4ea616f36bbfa2e042beb1698f968cfd651d79f | watering-system/python/iot_watering_system/runner.py | python | Runner.serve_index | (self) | return output | Serve the 'index.html' file. | Serve the 'index.html' file. | [
"Serve",
"the",
"index",
".",
"html",
"file",
"."
] | def serve_index(self):
"""
Serve the 'index.html' file.
"""
output = template("index", moisture=self.moisture)
return output | [
"def",
"serve_index",
"(",
"self",
")",
":",
"output",
"=",
"template",
"(",
"\"index\"",
",",
"moisture",
"=",
"self",
".",
"moisture",
")",
"return",
"output"
] | https://github.com/intel-iot-devkit/how-to-code-samples/blob/b4ea616f36bbfa2e042beb1698f968cfd651d79f/watering-system/python/iot_watering_system/runner.py#L150-L157 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | scripts/Inelastic/Direct/RunDescriptor.py | python | RunDescriptor.get_ws_clone | (self,clone_name='ws_clone') | return mtd[clone_name] | Get unbounded clone of existing Run workspace | Get unbounded clone of existing Run workspace | [
"Get",
"unbounded",
"clone",
"of",
"existing",
"Run",
"workspace"
] | def get_ws_clone(self,clone_name='ws_clone'):
"""Get unbounded clone of existing Run workspace"""
ws = self.get_workspace()
CloneWorkspace(InputWorkspace=ws,OutputWorkspace=clone_name)
mon_ws_name = ws.name() + '_monitors'
if mon_ws_name in mtd:
cl_mon_name = clone_name + '_monitors'
CloneWorkspace(InputWorkspace=mon_ws_name,OutputWorkspace=cl_mon_name)
return mtd[clone_name] | [
"def",
"get_ws_clone",
"(",
"self",
",",
"clone_name",
"=",
"'ws_clone'",
")",
":",
"ws",
"=",
"self",
".",
"get_workspace",
"(",
")",
"CloneWorkspace",
"(",
"InputWorkspace",
"=",
"ws",
",",
"OutputWorkspace",
"=",
"clone_name",
")",
"mon_ws_name",
"=",
"ws",
".",
"name",
"(",
")",
"+",
"'_monitors'",
"if",
"mon_ws_name",
"in",
"mtd",
":",
"cl_mon_name",
"=",
"clone_name",
"+",
"'_monitors'",
"CloneWorkspace",
"(",
"InputWorkspace",
"=",
"mon_ws_name",
",",
"OutputWorkspace",
"=",
"cl_mon_name",
")",
"return",
"mtd",
"[",
"clone_name",
"]"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/Inelastic/Direct/RunDescriptor.py#L822-L831 |
|
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/base64.py | python | standard_b64encode | (s) | return b64encode(s) | Encode a string using the standard Base64 alphabet.
s is the string to encode. The encoded string is returned. | Encode a string using the standard Base64 alphabet. | [
"Encode",
"a",
"string",
"using",
"the",
"standard",
"Base64",
"alphabet",
"."
] | def standard_b64encode(s):
"""Encode a string using the standard Base64 alphabet.
s is the string to encode. The encoded string is returned.
"""
return b64encode(s) | [
"def",
"standard_b64encode",
"(",
"s",
")",
":",
"return",
"b64encode",
"(",
"s",
")"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/base64.py#L79-L84 |
|
qgis/QGIS | 15a77662d4bb712184f6aa60d0bd663010a76a75 | python/pyplugin_installer/version_compare.py | python | compareElements | (s1, s2) | compare two particular elements | compare two particular elements | [
"compare",
"two",
"particular",
"elements"
] | def compareElements(s1, s2):
""" compare two particular elements """
# check if the matter is easy solvable:
if s1 == s2:
return 0
# try to compare as numeric values (but only if the first character is not 0):
if s1 and s2 and s1.isnumeric() and s2.isnumeric() and s1[0] != '0' and s2[0] != '0':
if float(s1) == float(s2):
return 0
elif float(s1) > float(s2):
return 1
else:
return 2
# if the strings aren't numeric or start from 0, compare them as a strings:
# but first, set ALPHA < BETA < PREVIEW < RC < TRUNK < [NOTHING] < [ANYTHING_ELSE]
if s1 not in ['ALPHA', 'BETA', 'PREVIEW', 'RC', 'TRUNK']:
s1 = 'Z' + s1
if s2 not in ['ALPHA', 'BETA', 'PREVIEW', 'RC', 'TRUNK']:
s2 = 'Z' + s2
# the final test:
if s1 > s2:
return 1
else:
return 2 | [
"def",
"compareElements",
"(",
"s1",
",",
"s2",
")",
":",
"# check if the matter is easy solvable:",
"if",
"s1",
"==",
"s2",
":",
"return",
"0",
"# try to compare as numeric values (but only if the first character is not 0):",
"if",
"s1",
"and",
"s2",
"and",
"s1",
".",
"isnumeric",
"(",
")",
"and",
"s2",
".",
"isnumeric",
"(",
")",
"and",
"s1",
"[",
"0",
"]",
"!=",
"'0'",
"and",
"s2",
"[",
"0",
"]",
"!=",
"'0'",
":",
"if",
"float",
"(",
"s1",
")",
"==",
"float",
"(",
"s2",
")",
":",
"return",
"0",
"elif",
"float",
"(",
"s1",
")",
">",
"float",
"(",
"s2",
")",
":",
"return",
"1",
"else",
":",
"return",
"2",
"# if the strings aren't numeric or start from 0, compare them as a strings:",
"# but first, set ALPHA < BETA < PREVIEW < RC < TRUNK < [NOTHING] < [ANYTHING_ELSE]",
"if",
"s1",
"not",
"in",
"[",
"'ALPHA'",
",",
"'BETA'",
",",
"'PREVIEW'",
",",
"'RC'",
",",
"'TRUNK'",
"]",
":",
"s1",
"=",
"'Z'",
"+",
"s1",
"if",
"s2",
"not",
"in",
"[",
"'ALPHA'",
",",
"'BETA'",
",",
"'PREVIEW'",
",",
"'RC'",
",",
"'TRUNK'",
"]",
":",
"s2",
"=",
"'Z'",
"+",
"s2",
"# the final test:",
"if",
"s1",
">",
"s2",
":",
"return",
"1",
"else",
":",
"return",
"2"
] | https://github.com/qgis/QGIS/blob/15a77662d4bb712184f6aa60d0bd663010a76a75/python/pyplugin_installer/version_compare.py#L97-L120 |
||
esa/pagmo | 80281d549c8f1b470e1489a5d37c8f06b2e429c0 | PyGMO/util/__init__.py | python | _hypervolume_exclusive | (
self,
p_idx=None,
r=None,
algorithm=None,
*args,
**kwargs) | return self._original_exclusive(*args) | Compute the exlusive contribution to the total hypervolume by the point at index p_idx, given a reference point and the provided hypervolume algorithm.
Type 'hv_algorithm?' for a list of available hypervolume algorithms.
USAGE:
hv.exclusive(p_idx=0, r=[5.0]*2)
hv.exclusive(p_idx=0, r=[5.0]*2, algorithm=hv_algorithm.hv2d())
* p_idx - index of the point
* r - reference point used for computation
* algorithm (optional) - hypervolume algorithm used for the computation, uses the best performing algorithm for given dimension by default | Compute the exlusive contribution to the total hypervolume by the point at index p_idx, given a reference point and the provided hypervolume algorithm.
Type 'hv_algorithm?' for a list of available hypervolume algorithms. | [
"Compute",
"the",
"exlusive",
"contribution",
"to",
"the",
"total",
"hypervolume",
"by",
"the",
"point",
"at",
"index",
"p_idx",
"given",
"a",
"reference",
"point",
"and",
"the",
"provided",
"hypervolume",
"algorithm",
".",
"Type",
"hv_algorithm?",
"for",
"a",
"list",
"of",
"available",
"hypervolume",
"algorithms",
"."
] | def _hypervolume_exclusive(
self,
p_idx=None,
r=None,
algorithm=None,
*args,
**kwargs):
"""
Compute the exlusive contribution to the total hypervolume by the point at index p_idx, given a reference point and the provided hypervolume algorithm.
Type 'hv_algorithm?' for a list of available hypervolume algorithms.
USAGE:
hv.exclusive(p_idx=0, r=[5.0]*2)
hv.exclusive(p_idx=0, r=[5.0]*2, algorithm=hv_algorithm.hv2d())
* p_idx - index of the point
* r - reference point used for computation
* algorithm (optional) - hypervolume algorithm used for the computation, uses the best performing algorithm for given dimension by default
"""
if p_idx is None:
raise TypeError(
"p_idx (non-negative integer) argument is required for computation, type 'hypervolume.exclusive?' for usage.")
if len(args) > 0 or len(kwargs) > 0:
raise TypeError(
"Incorrect combination of args/kwargs, type 'hypervolume.exclusive?' for usage.")
if not isinstance(p_idx, int) or p_idx < 0:
raise TypeError(
"individual index (p_idx) must be a non-negative integer")
r = _HypervolumeValidation.handle_refpoint(self, r)
args = []
args.append(p_idx)
args.append(r)
if algorithm:
algorithm = _HypervolumeValidation.validate_hv_algorithm(algorithm)
args.append(algorithm)
return self._original_exclusive(*args) | [
"def",
"_hypervolume_exclusive",
"(",
"self",
",",
"p_idx",
"=",
"None",
",",
"r",
"=",
"None",
",",
"algorithm",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"p_idx",
"is",
"None",
":",
"raise",
"TypeError",
"(",
"\"p_idx (non-negative integer) argument is required for computation, type 'hypervolume.exclusive?' for usage.\"",
")",
"if",
"len",
"(",
"args",
")",
">",
"0",
"or",
"len",
"(",
"kwargs",
")",
">",
"0",
":",
"raise",
"TypeError",
"(",
"\"Incorrect combination of args/kwargs, type 'hypervolume.exclusive?' for usage.\"",
")",
"if",
"not",
"isinstance",
"(",
"p_idx",
",",
"int",
")",
"or",
"p_idx",
"<",
"0",
":",
"raise",
"TypeError",
"(",
"\"individual index (p_idx) must be a non-negative integer\"",
")",
"r",
"=",
"_HypervolumeValidation",
".",
"handle_refpoint",
"(",
"self",
",",
"r",
")",
"args",
"=",
"[",
"]",
"args",
".",
"append",
"(",
"p_idx",
")",
"args",
".",
"append",
"(",
"r",
")",
"if",
"algorithm",
":",
"algorithm",
"=",
"_HypervolumeValidation",
".",
"validate_hv_algorithm",
"(",
"algorithm",
")",
"args",
".",
"append",
"(",
"algorithm",
")",
"return",
"self",
".",
"_original_exclusive",
"(",
"*",
"args",
")"
] | https://github.com/esa/pagmo/blob/80281d549c8f1b470e1489a5d37c8f06b2e429c0/PyGMO/util/__init__.py#L157-L194 |
|
zlgopen/awtk | 2c49e854a78749d9092907c027a7fba9062be549 | 3rd/mbedtls/scripts/abi_check.py | python | AbiChecker._abi_compliance_command | (self, mbed_module, output_path) | return abi_compliance_command | Build the command to run to analyze the library mbed_module.
The report will be placed in output_path. | Build the command to run to analyze the library mbed_module.
The report will be placed in output_path. | [
"Build",
"the",
"command",
"to",
"run",
"to",
"analyze",
"the",
"library",
"mbed_module",
".",
"The",
"report",
"will",
"be",
"placed",
"in",
"output_path",
"."
] | def _abi_compliance_command(self, mbed_module, output_path):
"""Build the command to run to analyze the library mbed_module.
The report will be placed in output_path."""
abi_compliance_command = [
"abi-compliance-checker",
"-l", mbed_module,
"-old", self.old_version.abi_dumps[mbed_module],
"-new", self.new_version.abi_dumps[mbed_module],
"-strict",
"-report-path", output_path,
]
if self.skip_file:
abi_compliance_command += ["-skip-symbols", self.skip_file,
"-skip-types", self.skip_file]
if self.brief:
abi_compliance_command += ["-report-format", "xml",
"-stdout"]
return abi_compliance_command | [
"def",
"_abi_compliance_command",
"(",
"self",
",",
"mbed_module",
",",
"output_path",
")",
":",
"abi_compliance_command",
"=",
"[",
"\"abi-compliance-checker\"",
",",
"\"-l\"",
",",
"mbed_module",
",",
"\"-old\"",
",",
"self",
".",
"old_version",
".",
"abi_dumps",
"[",
"mbed_module",
"]",
",",
"\"-new\"",
",",
"self",
".",
"new_version",
".",
"abi_dumps",
"[",
"mbed_module",
"]",
",",
"\"-strict\"",
",",
"\"-report-path\"",
",",
"output_path",
",",
"]",
"if",
"self",
".",
"skip_file",
":",
"abi_compliance_command",
"+=",
"[",
"\"-skip-symbols\"",
",",
"self",
".",
"skip_file",
",",
"\"-skip-types\"",
",",
"self",
".",
"skip_file",
"]",
"if",
"self",
".",
"brief",
":",
"abi_compliance_command",
"+=",
"[",
"\"-report-format\"",
",",
"\"xml\"",
",",
"\"-stdout\"",
"]",
"return",
"abi_compliance_command"
] | https://github.com/zlgopen/awtk/blob/2c49e854a78749d9092907c027a7fba9062be549/3rd/mbedtls/scripts/abi_check.py#L247-L264 |
|
linyouhappy/kongkongxiyou | 7a69b2913eb29f4be77f9a62fb90cdd72c4160f1 | cocosjs/frameworks/cocos2d-x/tools/bindings-generator/clang/cindex.py | python | AvailabilityKind.get_all_kinds | () | return filter(None, AvailabilityKind._kinds) | Return all AvailabilityKind enumeration instances. | Return all AvailabilityKind enumeration instances. | [
"Return",
"all",
"AvailabilityKind",
"enumeration",
"instances",
"."
] | def get_all_kinds():
"""Return all AvailabilityKind enumeration instances."""
return filter(None, AvailabilityKind._kinds) | [
"def",
"get_all_kinds",
"(",
")",
":",
"return",
"filter",
"(",
"None",
",",
"AvailabilityKind",
".",
"_kinds",
")"
] | https://github.com/linyouhappy/kongkongxiyou/blob/7a69b2913eb29f4be77f9a62fb90cdd72c4160f1/cocosjs/frameworks/cocos2d-x/tools/bindings-generator/clang/cindex.py#L480-L482 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/importlib/abc.py | python | SourceLoader.path_mtime | (self, path) | return int(self.path_stats(path)['mtime']) | Return the (int) modification time for the path (str). | Return the (int) modification time for the path (str). | [
"Return",
"the",
"(",
"int",
")",
"modification",
"time",
"for",
"the",
"path",
"(",
"str",
")",
"."
] | def path_mtime(self, path):
"""Return the (int) modification time for the path (str)."""
if self.path_stats.__func__ is SourceLoader.path_stats:
raise OSError
return int(self.path_stats(path)['mtime']) | [
"def",
"path_mtime",
"(",
"self",
",",
"path",
")",
":",
"if",
"self",
".",
"path_stats",
".",
"__func__",
"is",
"SourceLoader",
".",
"path_stats",
":",
"raise",
"OSError",
"return",
"int",
"(",
"self",
".",
"path_stats",
"(",
"path",
")",
"[",
"'mtime'",
"]",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/importlib/abc.py#L315-L319 |
|
CRYTEK/CRYENGINE | 232227c59a220cbbd311576f0fbeba7bb53b2a8c | Editor/Python/windows/Lib/site-packages/pip/_vendor/distlib/metadata.py | python | _best_version | (fields) | return '2.0' | Detect the best version depending on the fields used. | Detect the best version depending on the fields used. | [
"Detect",
"the",
"best",
"version",
"depending",
"on",
"the",
"fields",
"used",
"."
] | def _best_version(fields):
"""Detect the best version depending on the fields used."""
def _has_marker(keys, markers):
for marker in markers:
if marker in keys:
return True
return False
keys = []
for key, value in fields.items():
if value in ([], 'UNKNOWN', None):
continue
keys.append(key)
possible_versions = ['1.0', '1.1', '1.2', '2.0']
# first let's try to see if a field is not part of one of the version
for key in keys:
if key not in _241_FIELDS and '1.0' in possible_versions:
possible_versions.remove('1.0')
if key not in _314_FIELDS and '1.1' in possible_versions:
possible_versions.remove('1.1')
if key not in _345_FIELDS and '1.2' in possible_versions:
possible_versions.remove('1.2')
if key not in _426_FIELDS and '2.0' in possible_versions:
possible_versions.remove('2.0')
# possible_version contains qualified versions
if len(possible_versions) == 1:
return possible_versions[0] # found !
elif len(possible_versions) == 0:
raise MetadataConflictError('Unknown metadata set')
# let's see if one unique marker is found
is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
if int(is_1_1) + int(is_1_2) + int(is_2_0) > 1:
raise MetadataConflictError('You used incompatible 1.1/1.2/2.0 fields')
# we have the choice, 1.0, or 1.2, or 2.0
# - 1.0 has a broken Summary field but works with all tools
# - 1.1 is to avoid
# - 1.2 fixes Summary but has little adoption
# - 2.0 adds more features and is very new
if not is_1_1 and not is_1_2 and not is_2_0:
# we couldn't find any specific marker
if PKG_INFO_PREFERRED_VERSION in possible_versions:
return PKG_INFO_PREFERRED_VERSION
if is_1_1:
return '1.1'
if is_1_2:
return '1.2'
return '2.0' | [
"def",
"_best_version",
"(",
"fields",
")",
":",
"def",
"_has_marker",
"(",
"keys",
",",
"markers",
")",
":",
"for",
"marker",
"in",
"markers",
":",
"if",
"marker",
"in",
"keys",
":",
"return",
"True",
"return",
"False",
"keys",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"fields",
".",
"items",
"(",
")",
":",
"if",
"value",
"in",
"(",
"[",
"]",
",",
"'UNKNOWN'",
",",
"None",
")",
":",
"continue",
"keys",
".",
"append",
"(",
"key",
")",
"possible_versions",
"=",
"[",
"'1.0'",
",",
"'1.1'",
",",
"'1.2'",
",",
"'2.0'",
"]",
"# first let's try to see if a field is not part of one of the version",
"for",
"key",
"in",
"keys",
":",
"if",
"key",
"not",
"in",
"_241_FIELDS",
"and",
"'1.0'",
"in",
"possible_versions",
":",
"possible_versions",
".",
"remove",
"(",
"'1.0'",
")",
"if",
"key",
"not",
"in",
"_314_FIELDS",
"and",
"'1.1'",
"in",
"possible_versions",
":",
"possible_versions",
".",
"remove",
"(",
"'1.1'",
")",
"if",
"key",
"not",
"in",
"_345_FIELDS",
"and",
"'1.2'",
"in",
"possible_versions",
":",
"possible_versions",
".",
"remove",
"(",
"'1.2'",
")",
"if",
"key",
"not",
"in",
"_426_FIELDS",
"and",
"'2.0'",
"in",
"possible_versions",
":",
"possible_versions",
".",
"remove",
"(",
"'2.0'",
")",
"# possible_version contains qualified versions",
"if",
"len",
"(",
"possible_versions",
")",
"==",
"1",
":",
"return",
"possible_versions",
"[",
"0",
"]",
"# found !",
"elif",
"len",
"(",
"possible_versions",
")",
"==",
"0",
":",
"raise",
"MetadataConflictError",
"(",
"'Unknown metadata set'",
")",
"# let's see if one unique marker is found",
"is_1_1",
"=",
"'1.1'",
"in",
"possible_versions",
"and",
"_has_marker",
"(",
"keys",
",",
"_314_MARKERS",
")",
"is_1_2",
"=",
"'1.2'",
"in",
"possible_versions",
"and",
"_has_marker",
"(",
"keys",
",",
"_345_MARKERS",
")",
"is_2_0",
"=",
"'2.0'",
"in",
"possible_versions",
"and",
"_has_marker",
"(",
"keys",
",",
"_426_MARKERS",
")",
"if",
"int",
"(",
"is_1_1",
")",
"+",
"int",
"(",
"is_1_2",
")",
"+",
"int",
"(",
"is_2_0",
")",
">",
"1",
":",
"raise",
"MetadataConflictError",
"(",
"'You used incompatible 1.1/1.2/2.0 fields'",
")",
"# we have the choice, 1.0, or 1.2, or 2.0",
"# - 1.0 has a broken Summary field but works with all tools",
"# - 1.1 is to avoid",
"# - 1.2 fixes Summary but has little adoption",
"# - 2.0 adds more features and is very new",
"if",
"not",
"is_1_1",
"and",
"not",
"is_1_2",
"and",
"not",
"is_2_0",
":",
"# we couldn't find any specific marker",
"if",
"PKG_INFO_PREFERRED_VERSION",
"in",
"possible_versions",
":",
"return",
"PKG_INFO_PREFERRED_VERSION",
"if",
"is_1_1",
":",
"return",
"'1.1'",
"if",
"is_1_2",
":",
"return",
"'1.2'",
"return",
"'2.0'"
] | https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pip/_vendor/distlib/metadata.py#L114-L168 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/multiprocessing/sharedctypes.py | python | RawArray | (typecode_or_type, size_or_initializer) | Returns a ctypes array allocated from shared memory | Returns a ctypes array allocated from shared memory | [
"Returns",
"a",
"ctypes",
"array",
"allocated",
"from",
"shared",
"memory"
] | def RawArray(typecode_or_type, size_or_initializer):
'''
Returns a ctypes array allocated from shared memory
'''
type_ = typecode_to_type.get(typecode_or_type, typecode_or_type)
if isinstance(size_or_initializer, int):
type_ = type_ * size_or_initializer
obj = _new_value(type_)
ctypes.memset(ctypes.addressof(obj), 0, ctypes.sizeof(obj))
return obj
else:
type_ = type_ * len(size_or_initializer)
result = _new_value(type_)
result.__init__(*size_or_initializer)
return result | [
"def",
"RawArray",
"(",
"typecode_or_type",
",",
"size_or_initializer",
")",
":",
"type_",
"=",
"typecode_to_type",
".",
"get",
"(",
"typecode_or_type",
",",
"typecode_or_type",
")",
"if",
"isinstance",
"(",
"size_or_initializer",
",",
"int",
")",
":",
"type_",
"=",
"type_",
"*",
"size_or_initializer",
"obj",
"=",
"_new_value",
"(",
"type_",
")",
"ctypes",
".",
"memset",
"(",
"ctypes",
".",
"addressof",
"(",
"obj",
")",
",",
"0",
",",
"ctypes",
".",
"sizeof",
"(",
"obj",
")",
")",
"return",
"obj",
"else",
":",
"type_",
"=",
"type_",
"*",
"len",
"(",
"size_or_initializer",
")",
"result",
"=",
"_new_value",
"(",
"type_",
")",
"result",
".",
"__init__",
"(",
"*",
"size_or_initializer",
")",
"return",
"result"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/multiprocessing/sharedctypes.py#L54-L68 |
||
apache/parquet-cpp | 642da055adf009652689b20e68a198cffb857651 | build-support/cpplint.py | python | ExpectingFunctionArgs | (clean_lines, linenum) | return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or
(linenum >= 2 and
(Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$',
clean_lines.elided[linenum - 1]) or
Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$',
clean_lines.elided[linenum - 2]) or
Search(r'\bstd::m?function\s*\<\s*$',
clean_lines.elided[linenum - 1])))) | Checks whether where function type arguments are expected.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
Returns:
True if the line at 'linenum' is inside something that expects arguments
of function types. | Checks whether where function type arguments are expected. | [
"Checks",
"whether",
"where",
"function",
"type",
"arguments",
"are",
"expected",
"."
] | def ExpectingFunctionArgs(clean_lines, linenum):
"""Checks whether where function type arguments are expected.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
Returns:
True if the line at 'linenum' is inside something that expects arguments
of function types.
"""
line = clean_lines.elided[linenum]
return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or
(linenum >= 2 and
(Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$',
clean_lines.elided[linenum - 1]) or
Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$',
clean_lines.elided[linenum - 2]) or
Search(r'\bstd::m?function\s*\<\s*$',
clean_lines.elided[linenum - 1])))) | [
"def",
"ExpectingFunctionArgs",
"(",
"clean_lines",
",",
"linenum",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"return",
"(",
"Match",
"(",
"r'^\\s*MOCK_(CONST_)?METHOD\\d+(_T)?\\('",
",",
"line",
")",
"or",
"(",
"linenum",
">=",
"2",
"and",
"(",
"Match",
"(",
"r'^\\s*MOCK_(?:CONST_)?METHOD\\d+(?:_T)?\\((?:\\S+,)?\\s*$'",
",",
"clean_lines",
".",
"elided",
"[",
"linenum",
"-",
"1",
"]",
")",
"or",
"Match",
"(",
"r'^\\s*MOCK_(?:CONST_)?METHOD\\d+(?:_T)?\\(\\s*$'",
",",
"clean_lines",
".",
"elided",
"[",
"linenum",
"-",
"2",
"]",
")",
"or",
"Search",
"(",
"r'\\bstd::m?function\\s*\\<\\s*$'",
",",
"clean_lines",
".",
"elided",
"[",
"linenum",
"-",
"1",
"]",
")",
")",
")",
")"
] | https://github.com/apache/parquet-cpp/blob/642da055adf009652689b20e68a198cffb857651/build-support/cpplint.py#L5441-L5460 |
|
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | build/android/pylib/valgrind_tools.py | python | AddressSanitizerTool.GetUtilWrapper | (self) | return self.GetTestWrapper() | Returns the wrapper for utilities, such as forwarder.
AddressSanitizer wrapper must be added to all instrumented binaries,
including forwarder and the like. This can be removed if such binaries
were built without instrumentation. | Returns the wrapper for utilities, such as forwarder. | [
"Returns",
"the",
"wrapper",
"for",
"utilities",
"such",
"as",
"forwarder",
"."
] | def GetUtilWrapper(self):
"""Returns the wrapper for utilities, such as forwarder.
AddressSanitizer wrapper must be added to all instrumented binaries,
including forwarder and the like. This can be removed if such binaries
were built without instrumentation. """
return self.GetTestWrapper() | [
"def",
"GetUtilWrapper",
"(",
"self",
")",
":",
"return",
"self",
".",
"GetTestWrapper",
"(",
")"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/build/android/pylib/valgrind_tools.py#L113-L119 |
|
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | demo/DPU-for-RNN/rnn_u25_u50lv/apps/imdb_sentiment_detection/utils/hdf5_format.py | python | load_attributes_from_hdf5_group | (group, name) | return data | Loads attributes of the specified name from the HDF5 group.
This method deals with an inherent problem
of HDF5 file which is not able to store
data larger than HDF5_OBJECT_HEADER_LIMIT bytes.
Arguments:
group: A pointer to a HDF5 group.
name: A name of the attributes to load.
Returns:
data: Attributes data. | Loads attributes of the specified name from the HDF5 group. | [
"Loads",
"attributes",
"of",
"the",
"specified",
"name",
"from",
"the",
"HDF5",
"group",
"."
] | def load_attributes_from_hdf5_group(group, name):
"""Loads attributes of the specified name from the HDF5 group.
This method deals with an inherent problem
of HDF5 file which is not able to store
data larger than HDF5_OBJECT_HEADER_LIMIT bytes.
Arguments:
group: A pointer to a HDF5 group.
name: A name of the attributes to load.
Returns:
data: Attributes data.
"""
if name in group.attrs:
data = [n for n in group.attrs[name]]
else:
data = []
chunk_id = 0
while '%s%d' % (name, chunk_id) in group.attrs:
data.extend(
[n for n in group.attrs['%s%d' % (name, chunk_id)]])
chunk_id += 1
return data | [
"def",
"load_attributes_from_hdf5_group",
"(",
"group",
",",
"name",
")",
":",
"if",
"name",
"in",
"group",
".",
"attrs",
":",
"data",
"=",
"[",
"n",
"for",
"n",
"in",
"group",
".",
"attrs",
"[",
"name",
"]",
"]",
"else",
":",
"data",
"=",
"[",
"]",
"chunk_id",
"=",
"0",
"while",
"'%s%d'",
"%",
"(",
"name",
",",
"chunk_id",
")",
"in",
"group",
".",
"attrs",
":",
"data",
".",
"extend",
"(",
"[",
"n",
"for",
"n",
"in",
"group",
".",
"attrs",
"[",
"'%s%d'",
"%",
"(",
"name",
",",
"chunk_id",
")",
"]",
"]",
")",
"chunk_id",
"+=",
"1",
"return",
"data"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/demo/DPU-for-RNN/rnn_u25_u50lv/apps/imdb_sentiment_detection/utils/hdf5_format.py#L824-L847 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib2to3/fixer_util.py | python | Call | (func_name, args=None, prefix=None) | return node | A function call | A function call | [
"A",
"function",
"call"
] | def Call(func_name, args=None, prefix=None):
"""A function call"""
node = Node(syms.power, [func_name, ArgList(args)])
if prefix is not None:
node.prefix = prefix
return node | [
"def",
"Call",
"(",
"func_name",
",",
"args",
"=",
"None",
",",
"prefix",
"=",
"None",
")",
":",
"node",
"=",
"Node",
"(",
"syms",
".",
"power",
",",
"[",
"func_name",
",",
"ArgList",
"(",
"args",
")",
"]",
")",
"if",
"prefix",
"is",
"not",
"None",
":",
"node",
".",
"prefix",
"=",
"prefix",
"return",
"node"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib2to3/fixer_util.py#L61-L66 |
|
AojunZhou/Incremental-Network-Quantization | c7f6a609d5817d8424ce224209cf4c50f1e4de50 | python/caffe/io.py | python | Transformer.set_transpose | (self, in_, order) | Set the input channel order for e.g. RGB to BGR conversion
as needed for the reference ImageNet model.
Parameters
----------
in_ : which input to assign this channel order
order : the order to transpose the dimensions | Set the input channel order for e.g. RGB to BGR conversion
as needed for the reference ImageNet model. | [
"Set",
"the",
"input",
"channel",
"order",
"for",
"e",
".",
"g",
".",
"RGB",
"to",
"BGR",
"conversion",
"as",
"needed",
"for",
"the",
"reference",
"ImageNet",
"model",
"."
] | def set_transpose(self, in_, order):
"""
Set the input channel order for e.g. RGB to BGR conversion
as needed for the reference ImageNet model.
Parameters
----------
in_ : which input to assign this channel order
order : the order to transpose the dimensions
"""
self.__check_input(in_)
if len(order) != len(self.inputs[in_]) - 1:
raise Exception('Transpose order needs to have the same number of '
'dimensions as the input.')
self.transpose[in_] = order | [
"def",
"set_transpose",
"(",
"self",
",",
"in_",
",",
"order",
")",
":",
"self",
".",
"__check_input",
"(",
"in_",
")",
"if",
"len",
"(",
"order",
")",
"!=",
"len",
"(",
"self",
".",
"inputs",
"[",
"in_",
"]",
")",
"-",
"1",
":",
"raise",
"Exception",
"(",
"'Transpose order needs to have the same number of '",
"'dimensions as the input.'",
")",
"self",
".",
"transpose",
"[",
"in_",
"]",
"=",
"order"
] | https://github.com/AojunZhou/Incremental-Network-Quantization/blob/c7f6a609d5817d8424ce224209cf4c50f1e4de50/python/caffe/io.py#L187-L201 |
||
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/utils/cpp_extension/cpp_extension.py | python | BuildExtension._check_abi | (self) | Check ABI Compatibility. | Check ABI Compatibility. | [
"Check",
"ABI",
"Compatibility",
"."
] | def _check_abi(self):
"""
Check ABI Compatibility.
"""
if hasattr(self.compiler, 'compiler_cxx'):
compiler = self.compiler.compiler_cxx[0]
elif IS_WINDOWS:
compiler = os.environ.get('CXX', 'cl')
else:
compiler = os.environ.get('CXX', 'c++')
check_abi_compatibility(compiler)
# Warn user if VC env is activated but `DISTUTILS_USE_SDK` is not set.
if IS_WINDOWS and 'VSCMD_ARG_TGT_ARCH' in os.environ and 'DISTUTILS_USE_SDK' not in os.environ:
msg = (
'It seems that the VC environment is activated but DISTUTILS_USE_SDK is not set.'
'This may lead to multiple activations of the VC env.'
'Please run `set DISTUTILS_USE_SDK=1` and try again.')
raise UserWarning(msg) | [
"def",
"_check_abi",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
".",
"compiler",
",",
"'compiler_cxx'",
")",
":",
"compiler",
"=",
"self",
".",
"compiler",
".",
"compiler_cxx",
"[",
"0",
"]",
"elif",
"IS_WINDOWS",
":",
"compiler",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'CXX'",
",",
"'cl'",
")",
"else",
":",
"compiler",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'CXX'",
",",
"'c++'",
")",
"check_abi_compatibility",
"(",
"compiler",
")",
"# Warn user if VC env is activated but `DISTUTILS_USE_SDK` is not set.",
"if",
"IS_WINDOWS",
"and",
"'VSCMD_ARG_TGT_ARCH'",
"in",
"os",
".",
"environ",
"and",
"'DISTUTILS_USE_SDK'",
"not",
"in",
"os",
".",
"environ",
":",
"msg",
"=",
"(",
"'It seems that the VC environment is activated but DISTUTILS_USE_SDK is not set.'",
"'This may lead to multiple activations of the VC env.'",
"'Please run `set DISTUTILS_USE_SDK=1` and try again.'",
")",
"raise",
"UserWarning",
"(",
"msg",
")"
] | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/utils/cpp_extension/cpp_extension.py#L621-L639 |
||
vnpy/vnpy | f50f2535ed39dd33272e0985ed40c7078e4c19f6 | vnpy/trader/gateway.py | python | BaseGateway.write_log | (self, msg: str) | Write a log event from gateway. | Write a log event from gateway. | [
"Write",
"a",
"log",
"event",
"from",
"gateway",
"."
] | def write_log(self, msg: str) -> None:
"""
Write a log event from gateway.
"""
log = LogData(msg=msg, gateway_name=self.gateway_name)
self.on_log(log) | [
"def",
"write_log",
"(",
"self",
",",
"msg",
":",
"str",
")",
"->",
"None",
":",
"log",
"=",
"LogData",
"(",
"msg",
"=",
"msg",
",",
"gateway_name",
"=",
"self",
".",
"gateway_name",
")",
"self",
".",
"on_log",
"(",
"log",
")"
] | https://github.com/vnpy/vnpy/blob/f50f2535ed39dd33272e0985ed40c7078e4c19f6/vnpy/trader/gateway.py#L156-L161 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/encodings/hex_codec.py | python | hex_encode | (input,errors='strict') | return (output, len(input)) | Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec. | Encodes the object input and returns a tuple (output
object, length consumed). | [
"Encodes",
"the",
"object",
"input",
"and",
"returns",
"a",
"tuple",
"(",
"output",
"object",
"length",
"consumed",
")",
"."
] | def hex_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.b2a_hex(input)
return (output, len(input)) | [
"def",
"hex_encode",
"(",
"input",
",",
"errors",
"=",
"'strict'",
")",
":",
"assert",
"errors",
"==",
"'strict'",
"output",
"=",
"binascii",
".",
"b2a_hex",
"(",
"input",
")",
"return",
"(",
"output",
",",
"len",
"(",
"input",
")",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/encodings/hex_codec.py#L13-L25 |
|
rbgirshick/caffe-fast-rcnn | 28a579eaf0668850705598b3075b8969f22226d9 | scripts/cpp_lint.py | python | CheckForCopyright | (filename, lines, error) | Logs an error if a Copyright message appears at the top of the file. | Logs an error if a Copyright message appears at the top of the file. | [
"Logs",
"an",
"error",
"if",
"a",
"Copyright",
"message",
"appears",
"at",
"the",
"top",
"of",
"the",
"file",
"."
] | def CheckForCopyright(filename, lines, error):
"""Logs an error if a Copyright message appears at the top of the file."""
# We'll check up to line 10. Don't forget there's a
# dummy line at the front.
for line in xrange(1, min(len(lines), 11)):
if _RE_COPYRIGHT.search(lines[line], re.I):
error(filename, 0, 'legal/copyright', 5,
'Copyright message found. '
'You should not include a copyright line.') | [
"def",
"CheckForCopyright",
"(",
"filename",
",",
"lines",
",",
"error",
")",
":",
"# We'll check up to line 10. Don't forget there's a",
"# dummy line at the front.",
"for",
"line",
"in",
"xrange",
"(",
"1",
",",
"min",
"(",
"len",
"(",
"lines",
")",
",",
"11",
")",
")",
":",
"if",
"_RE_COPYRIGHT",
".",
"search",
"(",
"lines",
"[",
"line",
"]",
",",
"re",
".",
"I",
")",
":",
"error",
"(",
"filename",
",",
"0",
",",
"'legal/copyright'",
",",
"5",
",",
"'Copyright message found. '",
"'You should not include a copyright line.'",
")"
] | https://github.com/rbgirshick/caffe-fast-rcnn/blob/28a579eaf0668850705598b3075b8969f22226d9/scripts/cpp_lint.py#L1372-L1381 |
||
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | TUInt_JavaUIntToCppUInt | (*args) | return _snap.TUInt_JavaUIntToCppUInt(*args) | TUInt_JavaUIntToCppUInt(uint const & JavaUInt) -> uint
Parameters:
JavaUInt: uint const & | TUInt_JavaUIntToCppUInt(uint const & JavaUInt) -> uint | [
"TUInt_JavaUIntToCppUInt",
"(",
"uint",
"const",
"&",
"JavaUInt",
")",
"-",
">",
"uint"
] | def TUInt_JavaUIntToCppUInt(*args):
"""
TUInt_JavaUIntToCppUInt(uint const & JavaUInt) -> uint
Parameters:
JavaUInt: uint const &
"""
return _snap.TUInt_JavaUIntToCppUInt(*args) | [
"def",
"TUInt_JavaUIntToCppUInt",
"(",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"TUInt_JavaUIntToCppUInt",
"(",
"*",
"args",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L13892-L13900 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/decimal.py | python | Decimal._log10_exp_bound | (self) | return len(num) + e - (num < "231") - 1 | Compute a lower bound for the adjusted exponent of self.log10().
In other words, find r such that self.log10() >= 10**r.
Assumes that self is finite and positive and that self != 1. | Compute a lower bound for the adjusted exponent of self.log10().
In other words, find r such that self.log10() >= 10**r.
Assumes that self is finite and positive and that self != 1. | [
"Compute",
"a",
"lower",
"bound",
"for",
"the",
"adjusted",
"exponent",
"of",
"self",
".",
"log10",
"()",
".",
"In",
"other",
"words",
"find",
"r",
"such",
"that",
"self",
".",
"log10",
"()",
">",
"=",
"10",
"**",
"r",
".",
"Assumes",
"that",
"self",
"is",
"finite",
"and",
"positive",
"and",
"that",
"self",
"!",
"=",
"1",
"."
] | def _log10_exp_bound(self):
"""Compute a lower bound for the adjusted exponent of self.log10().
In other words, find r such that self.log10() >= 10**r.
Assumes that self is finite and positive and that self != 1.
"""
# For x >= 10 or x < 0.1 we only need a bound on the integer
# part of log10(self), and this comes directly from the
# exponent of x. For 0.1 <= x <= 10 we use the inequalities
# 1-1/x <= log(x) <= x-1. If x > 1 we have |log10(x)| >
# (1-1/x)/2.31 > 0. If x < 1 then |log10(x)| > (1-x)/2.31 > 0
adj = self._exp + len(self._int) - 1
if adj >= 1:
# self >= 10
return len(str(adj))-1
if adj <= -2:
# self < 0.1
return len(str(-1-adj))-1
op = _WorkRep(self)
c, e = op.int, op.exp
if adj == 0:
# 1 < self < 10
num = str(c-10**-e)
den = str(231*c)
return len(num) - len(den) - (num < den) + 2
# adj == -1, 0.1 <= self < 1
num = str(10**-e-c)
return len(num) + e - (num < "231") - 1 | [
"def",
"_log10_exp_bound",
"(",
"self",
")",
":",
"# For x >= 10 or x < 0.1 we only need a bound on the integer",
"# part of log10(self), and this comes directly from the",
"# exponent of x. For 0.1 <= x <= 10 we use the inequalities",
"# 1-1/x <= log(x) <= x-1. If x > 1 we have |log10(x)| >",
"# (1-1/x)/2.31 > 0. If x < 1 then |log10(x)| > (1-x)/2.31 > 0",
"adj",
"=",
"self",
".",
"_exp",
"+",
"len",
"(",
"self",
".",
"_int",
")",
"-",
"1",
"if",
"adj",
">=",
"1",
":",
"# self >= 10",
"return",
"len",
"(",
"str",
"(",
"adj",
")",
")",
"-",
"1",
"if",
"adj",
"<=",
"-",
"2",
":",
"# self < 0.1",
"return",
"len",
"(",
"str",
"(",
"-",
"1",
"-",
"adj",
")",
")",
"-",
"1",
"op",
"=",
"_WorkRep",
"(",
"self",
")",
"c",
",",
"e",
"=",
"op",
".",
"int",
",",
"op",
".",
"exp",
"if",
"adj",
"==",
"0",
":",
"# 1 < self < 10",
"num",
"=",
"str",
"(",
"c",
"-",
"10",
"**",
"-",
"e",
")",
"den",
"=",
"str",
"(",
"231",
"*",
"c",
")",
"return",
"len",
"(",
"num",
")",
"-",
"len",
"(",
"den",
")",
"-",
"(",
"num",
"<",
"den",
")",
"+",
"2",
"# adj == -1, 0.1 <= self < 1",
"num",
"=",
"str",
"(",
"10",
"**",
"-",
"e",
"-",
"c",
")",
"return",
"len",
"(",
"num",
")",
"+",
"e",
"-",
"(",
"num",
"<",
"\"231\"",
")",
"-",
"1"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/decimal.py#L3138-L3166 |
|
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/contrib/learn/python/learn/estimators/tensor_signature.py | python | create_signatures | (tensors) | return TensorSignature(tensors) | Creates TensorSignature objects for given tensors.
Args:
tensors: Dict of `Tensor` objects or single `Tensor`.
Returns:
Dict of `TensorSignature` objects or single `TensorSignature`. | Creates TensorSignature objects for given tensors. | [
"Creates",
"TensorSignature",
"objects",
"for",
"given",
"tensors",
"."
] | def create_signatures(tensors):
"""Creates TensorSignature objects for given tensors.
Args:
tensors: Dict of `Tensor` objects or single `Tensor`.
Returns:
Dict of `TensorSignature` objects or single `TensorSignature`.
"""
if isinstance(tensors, dict):
return {
key: TensorSignature(tensors[key]) for key in tensors}
if tensors is None:
return None
return TensorSignature(tensors) | [
"def",
"create_signatures",
"(",
"tensors",
")",
":",
"if",
"isinstance",
"(",
"tensors",
",",
"dict",
")",
":",
"return",
"{",
"key",
":",
"TensorSignature",
"(",
"tensors",
"[",
"key",
"]",
")",
"for",
"key",
"in",
"tensors",
"}",
"if",
"tensors",
"is",
"None",
":",
"return",
"None",
"return",
"TensorSignature",
"(",
"tensors",
")"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/learn/python/learn/estimators/tensor_signature.py#L123-L137 |
|
Illumina/hap.py | 84011695b2ff2406c16a335106db6831fb67fdfe | src/python/Somatic/__init__.py | python | PiscesHCCSNVFeatures.collect | (self, vcfname, tag) | return GenericFeatures.collectFeatures(vcfname, tag, features, processor=StrelkaAdmixSNVFeatures.processValue) | Return a data frame with features collected from the given VCF, tagged by given type | Return a data frame with features collected from the given VCF, tagged by given type | [
"Return",
"a",
"data",
"frame",
"with",
"features",
"collected",
"from",
"the",
"given",
"VCF",
"tagged",
"by",
"given",
"type"
] | def collect(self, vcfname, tag):
""" Return a data frame with features collected from the given VCF, tagged by given type """
if tag not in ["TP", "FN"]:
return extractPiscesSNVFeatures(vcfname, tag, self.chr_depth)
else:
features = ["CHROM", "POS", "REF", "ALT", "QUAL",
"I.MapQrange", "I.somatic", "I.filtered", "S.1.VT",
"I.T_ALT_RATE", "I.DP_normal", "I.DP_tumor", "I.tag", "I.count"]
return GenericFeatures.collectFeatures(vcfname, tag, features, processor=StrelkaAdmixSNVFeatures.processValue) | [
"def",
"collect",
"(",
"self",
",",
"vcfname",
",",
"tag",
")",
":",
"if",
"tag",
"not",
"in",
"[",
"\"TP\"",
",",
"\"FN\"",
"]",
":",
"return",
"extractPiscesSNVFeatures",
"(",
"vcfname",
",",
"tag",
",",
"self",
".",
"chr_depth",
")",
"else",
":",
"features",
"=",
"[",
"\"CHROM\"",
",",
"\"POS\"",
",",
"\"REF\"",
",",
"\"ALT\"",
",",
"\"QUAL\"",
",",
"\"I.MapQrange\"",
",",
"\"I.somatic\"",
",",
"\"I.filtered\"",
",",
"\"S.1.VT\"",
",",
"\"I.T_ALT_RATE\"",
",",
"\"I.DP_normal\"",
",",
"\"I.DP_tumor\"",
",",
"\"I.tag\"",
",",
"\"I.count\"",
"]",
"return",
"GenericFeatures",
".",
"collectFeatures",
"(",
"vcfname",
",",
"tag",
",",
"features",
",",
"processor",
"=",
"StrelkaAdmixSNVFeatures",
".",
"processValue",
")"
] | https://github.com/Illumina/hap.py/blob/84011695b2ff2406c16a335106db6831fb67fdfe/src/python/Somatic/__init__.py#L235-L243 |
|
fritzsedlazeck/Sniffles | 82d885e5a74b526e18dd87b52554aea8139dd3aa | src/sniffles/leadprov.py | python | CIGAR_listreadstart_fwd | (ops) | Position in query read where CIGAR alignment starts (i.e. taking into account start clipping) | Position in query read where CIGAR alignment starts (i.e. taking into account start clipping) | [
"Position",
"in",
"query",
"read",
"where",
"CIGAR",
"alignment",
"starts",
"(",
"i",
".",
"e",
".",
"taking",
"into",
"account",
"start",
"clipping",
")"
] | def CIGAR_listreadstart_fwd(ops):
#TODO: Obsolete (see CIGAR_analyze)
"""
Position in query read where CIGAR alignment starts (i.e. taking into account start clipping)
"""
op,oplen=ops[0]
op2,op2len=ops[1]
if op=="H" or op=="S":
assert(op2!="H" and op2!="S")
return oplen
else:
return 0 | [
"def",
"CIGAR_listreadstart_fwd",
"(",
"ops",
")",
":",
"#TODO: Obsolete (see CIGAR_analyze)",
"op",
",",
"oplen",
"=",
"ops",
"[",
"0",
"]",
"op2",
",",
"op2len",
"=",
"ops",
"[",
"1",
"]",
"if",
"op",
"==",
"\"H\"",
"or",
"op",
"==",
"\"S\"",
":",
"assert",
"(",
"op2",
"!=",
"\"H\"",
"and",
"op2",
"!=",
"\"S\"",
")",
"return",
"oplen",
"else",
":",
"return",
"0"
] | https://github.com/fritzsedlazeck/Sniffles/blob/82d885e5a74b526e18dd87b52554aea8139dd3aa/src/sniffles/leadprov.py#L111-L122 |
||
domino-team/openwrt-cc | 8b181297c34d14d3ca521cc9f31430d561dbc688 | package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/common.py | python | GetFlavor | (params) | return 'linux' | Returns |params.flavor| if it's set, the system's default flavor else. | Returns |params.flavor| if it's set, the system's default flavor else. | [
"Returns",
"|params",
".",
"flavor|",
"if",
"it",
"s",
"set",
"the",
"system",
"s",
"default",
"flavor",
"else",
"."
] | def GetFlavor(params):
"""Returns |params.flavor| if it's set, the system's default flavor else."""
flavors = {
'cygwin': 'win',
'win32': 'win',
'darwin': 'mac',
}
if 'flavor' in params:
return params['flavor']
if sys.platform in flavors:
return flavors[sys.platform]
if sys.platform.startswith('sunos'):
return 'solaris'
if sys.platform.startswith('freebsd'):
return 'freebsd'
if sys.platform.startswith('openbsd'):
return 'openbsd'
if sys.platform.startswith('aix'):
return 'aix'
return 'linux' | [
"def",
"GetFlavor",
"(",
"params",
")",
":",
"flavors",
"=",
"{",
"'cygwin'",
":",
"'win'",
",",
"'win32'",
":",
"'win'",
",",
"'darwin'",
":",
"'mac'",
",",
"}",
"if",
"'flavor'",
"in",
"params",
":",
"return",
"params",
"[",
"'flavor'",
"]",
"if",
"sys",
".",
"platform",
"in",
"flavors",
":",
"return",
"flavors",
"[",
"sys",
".",
"platform",
"]",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'sunos'",
")",
":",
"return",
"'solaris'",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'freebsd'",
")",
":",
"return",
"'freebsd'",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'openbsd'",
")",
":",
"return",
"'openbsd'",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'aix'",
")",
":",
"return",
"'aix'",
"return",
"'linux'"
] | https://github.com/domino-team/openwrt-cc/blob/8b181297c34d14d3ca521cc9f31430d561dbc688/package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/common.py#L410-L431 |
|
gnuradio/gnuradio | 09c3c4fa4bfb1a02caac74cb5334dfe065391e3b | tools/clang_format.py | python | Repo.get_candidates | (self, candidates) | return valid_files | Get the set of candidate files to check by querying the repository
Returns the full path to the file for clang-format to consume. | Get the set of candidate files to check by querying the repository | [
"Get",
"the",
"set",
"of",
"candidate",
"files",
"to",
"check",
"by",
"querying",
"the",
"repository"
] | def get_candidates(self, candidates):
"""Get the set of candidate files to check by querying the repository
Returns the full path to the file for clang-format to consume.
"""
if candidates is not None and len(candidates) > 0:
candidates = [self._get_local_dir(f) for f in candidates]
valid_files = list(
set(candidates).intersection(self.get_candidate_files()))
else:
valid_files = list(self.get_candidate_files())
# Get the full file name here
valid_files = [
os.path.normpath(os.path.join(self.root, f)) for f in valid_files
]
return valid_files | [
"def",
"get_candidates",
"(",
"self",
",",
"candidates",
")",
":",
"if",
"candidates",
"is",
"not",
"None",
"and",
"len",
"(",
"candidates",
")",
">",
"0",
":",
"candidates",
"=",
"[",
"self",
".",
"_get_local_dir",
"(",
"f",
")",
"for",
"f",
"in",
"candidates",
"]",
"valid_files",
"=",
"list",
"(",
"set",
"(",
"candidates",
")",
".",
"intersection",
"(",
"self",
".",
"get_candidate_files",
"(",
")",
")",
")",
"else",
":",
"valid_files",
"=",
"list",
"(",
"self",
".",
"get_candidate_files",
"(",
")",
")",
"# Get the full file name here",
"valid_files",
"=",
"[",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"root",
",",
"f",
")",
")",
"for",
"f",
"in",
"valid_files",
"]",
"return",
"valid_files"
] | https://github.com/gnuradio/gnuradio/blob/09c3c4fa4bfb1a02caac74cb5334dfe065391e3b/tools/clang_format.py#L334-L351 |
|
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/ops/composite/math_ops.py | python | _max | (*args) | return max(*args) | Returns the maximum value. | Returns the maximum value. | [
"Returns",
"the",
"maximum",
"value",
"."
] | def _max(*args):
"""Returns the maximum value."""
return max(*args) | [
"def",
"_max",
"(",
"*",
"args",
")",
":",
"return",
"max",
"(",
"*",
"args",
")"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/composite/math_ops.py#L683-L685 |
|
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | src/icebox/icebox_py/__init__.py | python | Symbols.string | (self, ptr) | return libicebox.symbols_string(self.proc, ptr) | Convert process virtual memory address to symbol string. | Convert process virtual memory address to symbol string. | [
"Convert",
"process",
"virtual",
"memory",
"address",
"to",
"symbol",
"string",
"."
] | def string(self, ptr):
"""Convert process virtual memory address to symbol string."""
return libicebox.symbols_string(self.proc, ptr) | [
"def",
"string",
"(",
"self",
",",
"ptr",
")",
":",
"return",
"libicebox",
".",
"symbols_string",
"(",
"self",
".",
"proc",
",",
"ptr",
")"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/src/icebox/icebox_py/__init__.py#L140-L142 |
|
koying/SPMC | beca52667112f2661204ebb42406115825512491 | tools/EventClients/lib/python/xbmcclient.py | python | XBMCClient.send_action | (self, actionmessage="", actiontype=ACTION_EXECBUILTIN) | return packet.send(self.sock, self.addr, self.uid) | Keyword arguments:
actionmessage -- the ActionString
actiontype -- The ActionType the ActionString should be sent to. | Keyword arguments:
actionmessage -- the ActionString
actiontype -- The ActionType the ActionString should be sent to. | [
"Keyword",
"arguments",
":",
"actionmessage",
"--",
"the",
"ActionString",
"actiontype",
"--",
"The",
"ActionType",
"the",
"ActionString",
"should",
"be",
"sent",
"to",
"."
] | def send_action(self, actionmessage="", actiontype=ACTION_EXECBUILTIN):
"""
Keyword arguments:
actionmessage -- the ActionString
actiontype -- The ActionType the ActionString should be sent to.
"""
packet = PacketACTION(actionmessage, actiontype)
return packet.send(self.sock, self.addr, self.uid) | [
"def",
"send_action",
"(",
"self",
",",
"actionmessage",
"=",
"\"\"",
",",
"actiontype",
"=",
"ACTION_EXECBUILTIN",
")",
":",
"packet",
"=",
"PacketACTION",
"(",
"actionmessage",
",",
"actiontype",
")",
"return",
"packet",
".",
"send",
"(",
"self",
".",
"sock",
",",
"self",
".",
"addr",
",",
"self",
".",
"uid",
")"
] | https://github.com/koying/SPMC/blob/beca52667112f2661204ebb42406115825512491/tools/EventClients/lib/python/xbmcclient.py#L621-L628 |
|
msracver/Deep-Image-Analogy | 632b9287b42552e32dad64922967c8c9ec7fc4d3 | scripts/cpp_lint.py | python | _GetTextInside | (text, start_pattern) | return text[start_position:position - 1] | r"""Retrieves all the text between matching open and close parentheses.
Given a string of lines and a regular expression string, retrieve all the text
following the expression and between opening punctuation symbols like
(, [, or {, and the matching close-punctuation symbol. This properly nested
occurrences of the punctuations, so for the text like
printf(a(), b(c()));
a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'.
start_pattern must match string having an open punctuation symbol at the end.
Args:
text: The lines to extract text. Its comments and strings must be elided.
It can be single line and can span multiple lines.
start_pattern: The regexp string indicating where to start extracting
the text.
Returns:
The extracted text.
None if either the opening string or ending punctuation could not be found. | r"""Retrieves all the text between matching open and close parentheses. | [
"r",
"Retrieves",
"all",
"the",
"text",
"between",
"matching",
"open",
"and",
"close",
"parentheses",
"."
] | def _GetTextInside(text, start_pattern):
r"""Retrieves all the text between matching open and close parentheses.
Given a string of lines and a regular expression string, retrieve all the text
following the expression and between opening punctuation symbols like
(, [, or {, and the matching close-punctuation symbol. This properly nested
occurrences of the punctuations, so for the text like
printf(a(), b(c()));
a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'.
start_pattern must match string having an open punctuation symbol at the end.
Args:
text: The lines to extract text. Its comments and strings must be elided.
It can be single line and can span multiple lines.
start_pattern: The regexp string indicating where to start extracting
the text.
Returns:
The extracted text.
None if either the opening string or ending punctuation could not be found.
"""
# TODO(sugawarayu): Audit cpplint.py to see what places could be profitably
# rewritten to use _GetTextInside (and use inferior regexp matching today).
# Give opening punctuations to get the matching close-punctuations.
matching_punctuation = {'(': ')', '{': '}', '[': ']'}
closing_punctuation = set(matching_punctuation.itervalues())
# Find the position to start extracting text.
match = re.search(start_pattern, text, re.M)
if not match: # start_pattern not found in text.
return None
start_position = match.end(0)
assert start_position > 0, (
'start_pattern must ends with an opening punctuation.')
assert text[start_position - 1] in matching_punctuation, (
'start_pattern must ends with an opening punctuation.')
# Stack of closing punctuations we expect to have in text after position.
punctuation_stack = [matching_punctuation[text[start_position - 1]]]
position = start_position
while punctuation_stack and position < len(text):
if text[position] == punctuation_stack[-1]:
punctuation_stack.pop()
elif text[position] in closing_punctuation:
# A closing punctuation without matching opening punctuations.
return None
elif text[position] in matching_punctuation:
punctuation_stack.append(matching_punctuation[text[position]])
position += 1
if punctuation_stack:
# Opening punctuations left without matching close-punctuations.
return None
# punctuations match.
return text[start_position:position - 1] | [
"def",
"_GetTextInside",
"(",
"text",
",",
"start_pattern",
")",
":",
"# TODO(sugawarayu): Audit cpplint.py to see what places could be profitably",
"# rewritten to use _GetTextInside (and use inferior regexp matching today).",
"# Give opening punctuations to get the matching close-punctuations.",
"matching_punctuation",
"=",
"{",
"'('",
":",
"')'",
",",
"'{'",
":",
"'}'",
",",
"'['",
":",
"']'",
"}",
"closing_punctuation",
"=",
"set",
"(",
"matching_punctuation",
".",
"itervalues",
"(",
")",
")",
"# Find the position to start extracting text.",
"match",
"=",
"re",
".",
"search",
"(",
"start_pattern",
",",
"text",
",",
"re",
".",
"M",
")",
"if",
"not",
"match",
":",
"# start_pattern not found in text.",
"return",
"None",
"start_position",
"=",
"match",
".",
"end",
"(",
"0",
")",
"assert",
"start_position",
">",
"0",
",",
"(",
"'start_pattern must ends with an opening punctuation.'",
")",
"assert",
"text",
"[",
"start_position",
"-",
"1",
"]",
"in",
"matching_punctuation",
",",
"(",
"'start_pattern must ends with an opening punctuation.'",
")",
"# Stack of closing punctuations we expect to have in text after position.",
"punctuation_stack",
"=",
"[",
"matching_punctuation",
"[",
"text",
"[",
"start_position",
"-",
"1",
"]",
"]",
"]",
"position",
"=",
"start_position",
"while",
"punctuation_stack",
"and",
"position",
"<",
"len",
"(",
"text",
")",
":",
"if",
"text",
"[",
"position",
"]",
"==",
"punctuation_stack",
"[",
"-",
"1",
"]",
":",
"punctuation_stack",
".",
"pop",
"(",
")",
"elif",
"text",
"[",
"position",
"]",
"in",
"closing_punctuation",
":",
"# A closing punctuation without matching opening punctuations.",
"return",
"None",
"elif",
"text",
"[",
"position",
"]",
"in",
"matching_punctuation",
":",
"punctuation_stack",
".",
"append",
"(",
"matching_punctuation",
"[",
"text",
"[",
"position",
"]",
"]",
")",
"position",
"+=",
"1",
"if",
"punctuation_stack",
":",
"# Opening punctuations left without matching close-punctuations.",
"return",
"None",
"# punctuations match.",
"return",
"text",
"[",
"start_position",
":",
"position",
"-",
"1",
"]"
] | https://github.com/msracver/Deep-Image-Analogy/blob/632b9287b42552e32dad64922967c8c9ec7fc4d3/scripts/cpp_lint.py#L3752-L3805 |
|
kamyu104/LeetCode-Solutions | 77605708a927ea3b85aee5a479db733938c7c211 | Python/lonely-pixel-ii.py | python | Solution2.findBlackPixel | (self, picture, N) | return sum(N * zip(row, cols).count(('B', N)) \
for row, cnt in lookup.iteritems() \
if cnt == N == row.count('B')) | :type picture: List[List[str]]
:type N: int
:rtype: int | :type picture: List[List[str]]
:type N: int
:rtype: int | [
":",
"type",
"picture",
":",
"List",
"[",
"List",
"[",
"str",
"]]",
":",
"type",
"N",
":",
"int",
":",
"rtype",
":",
"int"
] | def findBlackPixel(self, picture, N):
"""
:type picture: List[List[str]]
:type N: int
:rtype: int
"""
lookup = collections.Counter(map(tuple, picture))
cols = [col.count('B') for col in zip(*picture)]
return sum(N * zip(row, cols).count(('B', N)) \
for row, cnt in lookup.iteritems() \
if cnt == N == row.count('B')) | [
"def",
"findBlackPixel",
"(",
"self",
",",
"picture",
",",
"N",
")",
":",
"lookup",
"=",
"collections",
".",
"Counter",
"(",
"map",
"(",
"tuple",
",",
"picture",
")",
")",
"cols",
"=",
"[",
"col",
".",
"count",
"(",
"'B'",
")",
"for",
"col",
"in",
"zip",
"(",
"*",
"picture",
")",
"]",
"return",
"sum",
"(",
"N",
"*",
"zip",
"(",
"row",
",",
"cols",
")",
".",
"count",
"(",
"(",
"'B'",
",",
"N",
")",
")",
"for",
"row",
",",
"cnt",
"in",
"lookup",
".",
"iteritems",
"(",
")",
"if",
"cnt",
"==",
"N",
"==",
"row",
".",
"count",
"(",
"'B'",
")",
")"
] | https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/lonely-pixel-ii.py#L32-L42 |
|
facebookresearch/faiss | eb8781557f556505ca93f6f21fff932e17f0d9e0 | contrib/client_server.py | python | run_index_server | (index: faiss.Index, port: int, v6: bool = False) | serve requests for that index forerver | serve requests for that index forerver | [
"serve",
"requests",
"for",
"that",
"index",
"forerver"
] | def run_index_server(index: faiss.Index, port: int, v6: bool = False):
""" serve requests for that index forerver """
rpc.run_server(
lambda s: SearchServer(s, index),
port, v6=v6) | [
"def",
"run_index_server",
"(",
"index",
":",
"faiss",
".",
"Index",
",",
"port",
":",
"int",
",",
"v6",
":",
"bool",
"=",
"False",
")",
":",
"rpc",
".",
"run_server",
"(",
"lambda",
"s",
":",
"SearchServer",
"(",
"s",
",",
"index",
")",
",",
"port",
",",
"v6",
"=",
"v6",
")"
] | https://github.com/facebookresearch/faiss/blob/eb8781557f556505ca93f6f21fff932e17f0d9e0/contrib/client_server.py#L37-L41 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_windows.py | python | PageSetupDialogData.EnableOrientation | (*args, **kwargs) | return _windows_.PageSetupDialogData_EnableOrientation(*args, **kwargs) | EnableOrientation(self, bool flag) | EnableOrientation(self, bool flag) | [
"EnableOrientation",
"(",
"self",
"bool",
"flag",
")"
] | def EnableOrientation(*args, **kwargs):
"""EnableOrientation(self, bool flag)"""
return _windows_.PageSetupDialogData_EnableOrientation(*args, **kwargs) | [
"def",
"EnableOrientation",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"PageSetupDialogData_EnableOrientation",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L4882-L4884 |
|
lukasmonk/lucaschess | 13e2e5cb13b38a720ccf897af649054a64bcb914 | Code/Analisis.py | python | AnalizaPartida.grabaFNS | (self, fichero, fen) | Graba cada fen encontrado en el fichero "fichero" | Graba cada fen encontrado en el fichero "fichero" | [
"Graba",
"cada",
"fen",
"encontrado",
"en",
"el",
"fichero",
"fichero"
] | def grabaFNS(self, fichero, fen):
"""
Graba cada fen encontrado en el fichero "fichero"
"""
if not fichero:
return
f = open(fichero, "ab")
f.write("%s\r\n" % fen)
f.close()
self.procesador.entrenamientos.menu = None | [
"def",
"grabaFNS",
"(",
"self",
",",
"fichero",
",",
"fen",
")",
":",
"if",
"not",
"fichero",
":",
"return",
"f",
"=",
"open",
"(",
"fichero",
",",
"\"ab\"",
")",
"f",
".",
"write",
"(",
"\"%s\\r\\n\"",
"%",
"fen",
")",
"f",
".",
"close",
"(",
")",
"self",
".",
"procesador",
".",
"entrenamientos",
".",
"menu",
"=",
"None"
] | https://github.com/lukasmonk/lucaschess/blob/13e2e5cb13b38a720ccf897af649054a64bcb914/Code/Analisis.py#L135-L145 |
||
deeplearningais/CUV | 4e920ad1304af9de3e5f755cc2e9c5c96e06c324 | examples/mlp/weight_layer.py | python | weight_layer.backward | (self, learnrate=0.01, decay=0.0) | Backward pass, calculates the deltas of lower layer and updates the
weights.
@param learnrate how strongly the gradient influences the weights
@param decay large values result in a regularization with
to the squared weight value | Backward pass, calculates the deltas of lower layer and updates the
weights. | [
"Backward",
"pass",
"calculates",
"the",
"deltas",
"of",
"lower",
"layer",
"and",
"updates",
"the",
"weights",
"."
] | def backward(self, learnrate=0.01, decay=0.0):
"""Backward pass, calculates the deltas of lower layer and updates the
weights.
@param learnrate how strongly the gradient influences the weights
@param decay large values result in a regularization with
to the squared weight value"""
cp.prod(self.source.deltas, self.weight, self.target.deltas, 't', 'n')
h = self.source.activations.copy()
self.source.d_nonlinearity(h)
self.source.deltas *= h
h.dealloc()
batch_size = self.source.activations.shape[1]
dw = cp.prod(self.target.deltas, self.source.activations, 'n', 't')
cp.learn_step_weight_decay(self.weight, dw, learnrate / batch_size, decay)
dw.dealloc()
db = cp.sum(self.target.deltas, 1)
cp.learn_step_weight_decay(self.bias, db, learnrate / batch_size, decay)
db.dealloc() | [
"def",
"backward",
"(",
"self",
",",
"learnrate",
"=",
"0.01",
",",
"decay",
"=",
"0.0",
")",
":",
"cp",
".",
"prod",
"(",
"self",
".",
"source",
".",
"deltas",
",",
"self",
".",
"weight",
",",
"self",
".",
"target",
".",
"deltas",
",",
"'t'",
",",
"'n'",
")",
"h",
"=",
"self",
".",
"source",
".",
"activations",
".",
"copy",
"(",
")",
"self",
".",
"source",
".",
"d_nonlinearity",
"(",
"h",
")",
"self",
".",
"source",
".",
"deltas",
"*=",
"h",
"h",
".",
"dealloc",
"(",
")",
"batch_size",
"=",
"self",
".",
"source",
".",
"activations",
".",
"shape",
"[",
"1",
"]",
"dw",
"=",
"cp",
".",
"prod",
"(",
"self",
".",
"target",
".",
"deltas",
",",
"self",
".",
"source",
".",
"activations",
",",
"'n'",
",",
"'t'",
")",
"cp",
".",
"learn_step_weight_decay",
"(",
"self",
".",
"weight",
",",
"dw",
",",
"learnrate",
"/",
"batch_size",
",",
"decay",
")",
"dw",
".",
"dealloc",
"(",
")",
"db",
"=",
"cp",
".",
"sum",
"(",
"self",
".",
"target",
".",
"deltas",
",",
"1",
")",
"cp",
".",
"learn_step_weight_decay",
"(",
"self",
".",
"bias",
",",
"db",
",",
"learnrate",
"/",
"batch_size",
",",
"decay",
")",
"db",
".",
"dealloc",
"(",
")"
] | https://github.com/deeplearningais/CUV/blob/4e920ad1304af9de3e5f755cc2e9c5c96e06c324/examples/mlp/weight_layer.py#L32-L51 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.