nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
miyosuda/TensorFlowAndroidMNIST
|
7b5a4603d2780a8a2834575706e9001977524007
|
jni-build/jni/include/tensorflow/contrib/factorization/examples/mnist.py
|
python
|
run_training
|
()
|
Train MNIST for a number of steps.
|
Train MNIST for a number of steps.
|
[
"Train",
"MNIST",
"for",
"a",
"number",
"of",
"steps",
"."
] |
def run_training():
"""Train MNIST for a number of steps."""
# Get the sets of images and labels for training, validation, and
# test on MNIST.
train_dir = tempfile.mkdtemp()
data_sets = input_data.read_data_sets(train_dir, FLAGS.fake_data)
# Tell TensorFlow that the model will be built into the default Graph.
with tf.Graph().as_default():
# Generate placeholders for the images and labels.
images_placeholder, labels_placeholder = placeholder_inputs()
# Build a Graph that computes predictions from the inference model.
logits, clustering_loss, kmeans_training_op = inference(images_placeholder,
FLAGS.num_clusters,
FLAGS.hidden1,
FLAGS.hidden2)
# Add to the Graph the Ops for loss calculation.
loss = mnist.loss(logits, labels_placeholder)
# Add to the Graph the Ops that calculate and apply gradients.
train_op = tf.group(mnist.training(loss, FLAGS.learning_rate),
kmeans_training_op)
# Add the Op to compare the logits to the labels during evaluation.
eval_correct = mnist.evaluation(logits, labels_placeholder)
# Add the variable initializer Op.
init = tf.initialize_all_variables()
# Create a session for running Ops on the Graph.
sess = tf.Session()
feed_dict = fill_feed_dict(data_sets.train,
images_placeholder,
labels_placeholder,
batch_size=5000)
# Run the Op to initialize the variables.
sess.run(init, feed_dict=feed_dict)
# Start the training loop.
max_test_prec = 0
for step in xrange(FLAGS.max_steps):
start_time = time.time()
# Fill a feed dictionary with the actual set of images and labels
# for this particular training step.
feed_dict = fill_feed_dict(data_sets.train,
images_placeholder,
labels_placeholder,
FLAGS.batch_size)
# Run one step of the model.
_, loss_value, clustering_loss_value = sess.run([train_op,
loss,
clustering_loss],
feed_dict=feed_dict)
duration = time.time() - start_time
if step % 100 == 0:
# Print status to stdout.
print('Step %d: loss = %.2f, clustering_loss = %.2f (%.3f sec)' % (
step, loss_value, clustering_loss_value, duration))
# Save a checkpoint and evaluate the model periodically.
if (step + 1) % 1000 == 0 or (step + 1) == FLAGS.max_steps:
# Evaluate against the training set.
print('Training Data Eval:')
do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.train)
# Evaluate against the validation set.
print('Validation Data Eval:')
do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.validation)
# Evaluate against the test set.
print('Test Data Eval:')
test_prec = do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.test)
max_test_prec = max(max_test_prec, test_prec)
return max_test_prec
|
[
"def",
"run_training",
"(",
")",
":",
"# Get the sets of images and labels for training, validation, and",
"# test on MNIST.",
"train_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"data_sets",
"=",
"input_data",
".",
"read_data_sets",
"(",
"train_dir",
",",
"FLAGS",
".",
"fake_data",
")",
"# Tell TensorFlow that the model will be built into the default Graph.",
"with",
"tf",
".",
"Graph",
"(",
")",
".",
"as_default",
"(",
")",
":",
"# Generate placeholders for the images and labels.",
"images_placeholder",
",",
"labels_placeholder",
"=",
"placeholder_inputs",
"(",
")",
"# Build a Graph that computes predictions from the inference model.",
"logits",
",",
"clustering_loss",
",",
"kmeans_training_op",
"=",
"inference",
"(",
"images_placeholder",
",",
"FLAGS",
".",
"num_clusters",
",",
"FLAGS",
".",
"hidden1",
",",
"FLAGS",
".",
"hidden2",
")",
"# Add to the Graph the Ops for loss calculation.",
"loss",
"=",
"mnist",
".",
"loss",
"(",
"logits",
",",
"labels_placeholder",
")",
"# Add to the Graph the Ops that calculate and apply gradients.",
"train_op",
"=",
"tf",
".",
"group",
"(",
"mnist",
".",
"training",
"(",
"loss",
",",
"FLAGS",
".",
"learning_rate",
")",
",",
"kmeans_training_op",
")",
"# Add the Op to compare the logits to the labels during evaluation.",
"eval_correct",
"=",
"mnist",
".",
"evaluation",
"(",
"logits",
",",
"labels_placeholder",
")",
"# Add the variable initializer Op.",
"init",
"=",
"tf",
".",
"initialize_all_variables",
"(",
")",
"# Create a session for running Ops on the Graph.",
"sess",
"=",
"tf",
".",
"Session",
"(",
")",
"feed_dict",
"=",
"fill_feed_dict",
"(",
"data_sets",
".",
"train",
",",
"images_placeholder",
",",
"labels_placeholder",
",",
"batch_size",
"=",
"5000",
")",
"# Run the Op to initialize the variables.",
"sess",
".",
"run",
"(",
"init",
",",
"feed_dict",
"=",
"feed_dict",
")",
"# Start the training loop.",
"max_test_prec",
"=",
"0",
"for",
"step",
"in",
"xrange",
"(",
"FLAGS",
".",
"max_steps",
")",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"# Fill a feed dictionary with the actual set of images and labels",
"# for this particular training step.",
"feed_dict",
"=",
"fill_feed_dict",
"(",
"data_sets",
".",
"train",
",",
"images_placeholder",
",",
"labels_placeholder",
",",
"FLAGS",
".",
"batch_size",
")",
"# Run one step of the model.",
"_",
",",
"loss_value",
",",
"clustering_loss_value",
"=",
"sess",
".",
"run",
"(",
"[",
"train_op",
",",
"loss",
",",
"clustering_loss",
"]",
",",
"feed_dict",
"=",
"feed_dict",
")",
"duration",
"=",
"time",
".",
"time",
"(",
")",
"-",
"start_time",
"if",
"step",
"%",
"100",
"==",
"0",
":",
"# Print status to stdout.",
"print",
"(",
"'Step %d: loss = %.2f, clustering_loss = %.2f (%.3f sec)'",
"%",
"(",
"step",
",",
"loss_value",
",",
"clustering_loss_value",
",",
"duration",
")",
")",
"# Save a checkpoint and evaluate the model periodically.",
"if",
"(",
"step",
"+",
"1",
")",
"%",
"1000",
"==",
"0",
"or",
"(",
"step",
"+",
"1",
")",
"==",
"FLAGS",
".",
"max_steps",
":",
"# Evaluate against the training set.",
"print",
"(",
"'Training Data Eval:'",
")",
"do_eval",
"(",
"sess",
",",
"eval_correct",
",",
"images_placeholder",
",",
"labels_placeholder",
",",
"data_sets",
".",
"train",
")",
"# Evaluate against the validation set.",
"print",
"(",
"'Validation Data Eval:'",
")",
"do_eval",
"(",
"sess",
",",
"eval_correct",
",",
"images_placeholder",
",",
"labels_placeholder",
",",
"data_sets",
".",
"validation",
")",
"# Evaluate against the test set.",
"print",
"(",
"'Test Data Eval:'",
")",
"test_prec",
"=",
"do_eval",
"(",
"sess",
",",
"eval_correct",
",",
"images_placeholder",
",",
"labels_placeholder",
",",
"data_sets",
".",
"test",
")",
"max_test_prec",
"=",
"max",
"(",
"max_test_prec",
",",
"test_prec",
")",
"return",
"max_test_prec"
] |
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/factorization/examples/mnist.py#L192-L281
|
||
mickem/nscp
|
79f89fdbb6da63f91bc9dedb7aea202fe938f237
|
scripts/python/lib/google/protobuf/service_reflection.py
|
python
|
_ServiceBuilder.__init__
|
(self, service_descriptor)
|
Initializes an instance of the service class builder.
Args:
service_descriptor: ServiceDescriptor to use when constructing the
service class.
|
Initializes an instance of the service class builder.
|
[
"Initializes",
"an",
"instance",
"of",
"the",
"service",
"class",
"builder",
"."
] |
def __init__(self, service_descriptor):
"""Initializes an instance of the service class builder.
Args:
service_descriptor: ServiceDescriptor to use when constructing the
service class.
"""
self.descriptor = service_descriptor
|
[
"def",
"__init__",
"(",
"self",
",",
"service_descriptor",
")",
":",
"self",
".",
"descriptor",
"=",
"service_descriptor"
] |
https://github.com/mickem/nscp/blob/79f89fdbb6da63f91bc9dedb7aea202fe938f237/scripts/python/lib/google/protobuf/service_reflection.py#L124-L131
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py
|
python
|
CharDistributionAnalysis.reset
|
(self)
|
reset analyser, clear any state
|
reset analyser, clear any state
|
[
"reset",
"analyser",
"clear",
"any",
"state"
] |
def reset(self):
"""reset analyser, clear any state"""
# If this flag is set to True, detection is done and conclusion has
# been made
self._done = False
self._total_chars = 0 # Total characters encountered
# The number of characters whose frequency order is less than 512
self._freq_chars = 0
|
[
"def",
"reset",
"(",
"self",
")",
":",
"# If this flag is set to True, detection is done and conclusion has",
"# been made",
"self",
".",
"_done",
"=",
"False",
"self",
".",
"_total_chars",
"=",
"0",
"# Total characters encountered",
"# The number of characters whose frequency order is less than 512",
"self",
".",
"_freq_chars",
"=",
"0"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py#L121-L135
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/masked/maskededit.py
|
python
|
__test._onCheck1
|
(self,event)
|
Set required value on/off
|
Set required value on/off
|
[
"Set",
"required",
"value",
"on",
"/",
"off"
] |
def _onCheck1(self,event):
""" Set required value on/off """
value = event.IsChecked()
if value:
for control in self.editList:
control.SetCtrlParameters(emptyInvalid=True)
control.Refresh()
else:
for control in self.editList:
control.SetCtrlParameters(emptyInvalid=False)
control.Refresh()
self.panel.Refresh()
|
[
"def",
"_onCheck1",
"(",
"self",
",",
"event",
")",
":",
"value",
"=",
"event",
".",
"IsChecked",
"(",
")",
"if",
"value",
":",
"for",
"control",
"in",
"self",
".",
"editList",
":",
"control",
".",
"SetCtrlParameters",
"(",
"emptyInvalid",
"=",
"True",
")",
"control",
".",
"Refresh",
"(",
")",
"else",
":",
"for",
"control",
"in",
"self",
".",
"editList",
":",
"control",
".",
"SetCtrlParameters",
"(",
"emptyInvalid",
"=",
"False",
")",
"control",
".",
"Refresh",
"(",
")",
"self",
".",
"panel",
".",
"Refresh",
"(",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/masked/maskededit.py#L6637-L6648
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/def_function.py
|
python
|
run_functions_eagerly
|
(run_eagerly)
|
Enables / disables eager execution of `tf.function`s.
After calling `tf.config.experimental_run_functions_eagerly(True)` all
invocations of tf.function will run eagerly instead of running through a graph
function.
This can be useful for debugging or profiling.
Similarly, calling `tf.config.experimental_run_functions_eagerly(False)` will
revert the behavior of all functions to graph functions.
Args:
run_eagerly: Boolean. Whether to run functions eagerly.
|
Enables / disables eager execution of `tf.function`s.
|
[
"Enables",
"/",
"disables",
"eager",
"execution",
"of",
"tf",
".",
"function",
"s",
"."
] |
def run_functions_eagerly(run_eagerly):
"""Enables / disables eager execution of `tf.function`s.
After calling `tf.config.experimental_run_functions_eagerly(True)` all
invocations of tf.function will run eagerly instead of running through a graph
function.
This can be useful for debugging or profiling.
Similarly, calling `tf.config.experimental_run_functions_eagerly(False)` will
revert the behavior of all functions to graph functions.
Args:
run_eagerly: Boolean. Whether to run functions eagerly.
"""
global RUN_FUNCTIONS_EAGERLY
RUN_FUNCTIONS_EAGERLY = bool(run_eagerly)
|
[
"def",
"run_functions_eagerly",
"(",
"run_eagerly",
")",
":",
"global",
"RUN_FUNCTIONS_EAGERLY",
"RUN_FUNCTIONS_EAGERLY",
"=",
"bool",
"(",
"run_eagerly",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/def_function.py#L219-L235
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/tornado/tornado-6/tornado/gen.py
|
python
|
sleep
|
(duration: float)
|
return f
|
Return a `.Future` that resolves after the given number of seconds.
When used with ``yield`` in a coroutine, this is a non-blocking
analogue to `time.sleep` (which should not be used in coroutines
because it is blocking)::
yield gen.sleep(0.5)
Note that calling this function on its own does nothing; you must
wait on the `.Future` it returns (usually by yielding it).
.. versionadded:: 4.1
|
Return a `.Future` that resolves after the given number of seconds.
|
[
"Return",
"a",
".",
"Future",
"that",
"resolves",
"after",
"the",
"given",
"number",
"of",
"seconds",
"."
] |
def sleep(duration: float) -> "Future[None]":
"""Return a `.Future` that resolves after the given number of seconds.
When used with ``yield`` in a coroutine, this is a non-blocking
analogue to `time.sleep` (which should not be used in coroutines
because it is blocking)::
yield gen.sleep(0.5)
Note that calling this function on its own does nothing; you must
wait on the `.Future` it returns (usually by yielding it).
.. versionadded:: 4.1
"""
f = _create_future()
IOLoop.current().call_later(
duration, lambda: future_set_result_unless_cancelled(f, None)
)
return f
|
[
"def",
"sleep",
"(",
"duration",
":",
"float",
")",
"->",
"\"Future[None]\"",
":",
"f",
"=",
"_create_future",
"(",
")",
"IOLoop",
".",
"current",
"(",
")",
".",
"call_later",
"(",
"duration",
",",
"lambda",
":",
"future_set_result_unless_cancelled",
"(",
"f",
",",
"None",
")",
")",
"return",
"f"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/tornado/tornado-6/tornado/gen.py#L650-L668
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/plotting/_matplotlib/converter.py
|
python
|
TimeSeries_DateFormatter._set_default_format
|
(self, vmin, vmax)
|
return self.formatdict
|
Returns the default ticks spacing.
|
Returns the default ticks spacing.
|
[
"Returns",
"the",
"default",
"ticks",
"spacing",
"."
] |
def _set_default_format(self, vmin, vmax):
"Returns the default ticks spacing."
if self.plot_obj.date_axis_info is None:
self.plot_obj.date_axis_info = self.finder(vmin, vmax, self.freq)
info = self.plot_obj.date_axis_info
if self.isminor:
format = np.compress(info["min"] & np.logical_not(info["maj"]), info)
else:
format = np.compress(info["maj"], info)
self.formatdict = {x: f for (x, _, _, f) in format}
return self.formatdict
|
[
"def",
"_set_default_format",
"(",
"self",
",",
"vmin",
",",
"vmax",
")",
":",
"if",
"self",
".",
"plot_obj",
".",
"date_axis_info",
"is",
"None",
":",
"self",
".",
"plot_obj",
".",
"date_axis_info",
"=",
"self",
".",
"finder",
"(",
"vmin",
",",
"vmax",
",",
"self",
".",
"freq",
")",
"info",
"=",
"self",
".",
"plot_obj",
".",
"date_axis_info",
"if",
"self",
".",
"isminor",
":",
"format",
"=",
"np",
".",
"compress",
"(",
"info",
"[",
"\"min\"",
"]",
"&",
"np",
".",
"logical_not",
"(",
"info",
"[",
"\"maj\"",
"]",
")",
",",
"info",
")",
"else",
":",
"format",
"=",
"np",
".",
"compress",
"(",
"info",
"[",
"\"maj\"",
"]",
",",
"info",
")",
"self",
".",
"formatdict",
"=",
"{",
"x",
":",
"f",
"for",
"(",
"x",
",",
"_",
",",
"_",
",",
"f",
")",
"in",
"format",
"}",
"return",
"self",
".",
"formatdict"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/plotting/_matplotlib/converter.py#L1065-L1077
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/importlib/_bootstrap_external.py
|
python
|
FileFinder.path_hook
|
(cls, *loader_details)
|
return path_hook_for_FileFinder
|
A class method which returns a closure to use on sys.path_hook
which will return an instance using the specified loaders and the path
called on the closure.
If the path called on the closure is not a directory, ImportError is
raised.
|
A class method which returns a closure to use on sys.path_hook
which will return an instance using the specified loaders and the path
called on the closure.
|
[
"A",
"class",
"method",
"which",
"returns",
"a",
"closure",
"to",
"use",
"on",
"sys",
".",
"path_hook",
"which",
"will",
"return",
"an",
"instance",
"using",
"the",
"specified",
"loaders",
"and",
"the",
"path",
"called",
"on",
"the",
"closure",
"."
] |
def path_hook(cls, *loader_details):
"""A class method which returns a closure to use on sys.path_hook
which will return an instance using the specified loaders and the path
called on the closure.
If the path called on the closure is not a directory, ImportError is
raised.
"""
def path_hook_for_FileFinder(path):
"""Path hook for importlib.machinery.FileFinder."""
if not _path_isdir(path):
raise ImportError('only directories are supported', path=path)
return cls(path, *loader_details)
return path_hook_for_FileFinder
|
[
"def",
"path_hook",
"(",
"cls",
",",
"*",
"loader_details",
")",
":",
"def",
"path_hook_for_FileFinder",
"(",
"path",
")",
":",
"\"\"\"Path hook for importlib.machinery.FileFinder.\"\"\"",
"if",
"not",
"_path_isdir",
"(",
"path",
")",
":",
"raise",
"ImportError",
"(",
"'only directories are supported'",
",",
"path",
"=",
"path",
")",
"return",
"cls",
"(",
"path",
",",
"*",
"loader_details",
")",
"return",
"path_hook_for_FileFinder"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/importlib/_bootstrap_external.py#L1436-L1451
|
|
natanielruiz/android-yolo
|
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
|
jni-build/jni/include/tensorflow/contrib/learn/python/learn/dataframe/transform.py
|
python
|
Transform._check_output_tensors
|
(self, output_tensors)
|
Helper for `build(...)`; verifies the output of `_build_transform`.
Args:
output_tensors: value returned by a call to `_build_transform`.
Raises:
TypeError: `transform_output` is not a list.
ValueError: `transform_output` does not match `output_names`.
|
Helper for `build(...)`; verifies the output of `_build_transform`.
|
[
"Helper",
"for",
"build",
"(",
"...",
")",
";",
"verifies",
"the",
"output",
"of",
"_build_transform",
"."
] |
def _check_output_tensors(self, output_tensors):
"""Helper for `build(...)`; verifies the output of `_build_transform`.
Args:
output_tensors: value returned by a call to `_build_transform`.
Raises:
TypeError: `transform_output` is not a list.
ValueError: `transform_output` does not match `output_names`.
"""
if not isinstance(output_tensors, self.return_type):
raise TypeError(
"Expected a NamedTuple of Tensors with elements %s; got %s." %
(self.output_names, type(output_tensors).__name__))
|
[
"def",
"_check_output_tensors",
"(",
"self",
",",
"output_tensors",
")",
":",
"if",
"not",
"isinstance",
"(",
"output_tensors",
",",
"self",
".",
"return_type",
")",
":",
"raise",
"TypeError",
"(",
"\"Expected a NamedTuple of Tensors with elements %s; got %s.\"",
"%",
"(",
"self",
".",
"output_names",
",",
"type",
"(",
"output_tensors",
")",
".",
"__name__",
")",
")"
] |
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/learn/python/learn/dataframe/transform.py#L188-L201
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/telemetry/telemetry/internal/backends/chrome_inspector/inspector_backend.py
|
python
|
InspectorBackend._ConvertExceptionFromInspectorWebsocket
|
(self, error)
|
Converts an Exception from inspector_websocket.
This method always raises a Telemetry exception. It appends debugging
information. The exact exception raised depends on |error|.
Args:
error: An instance of socket.error or websocket.WebSocketException.
Raises:
exceptions.TimeoutException: A timeout occurred.
exceptions.DevtoolsTargetCrashException: On any other error, the most
likely explanation is that the devtool's target crashed.
|
Converts an Exception from inspector_websocket.
|
[
"Converts",
"an",
"Exception",
"from",
"inspector_websocket",
"."
] |
def _ConvertExceptionFromInspectorWebsocket(self, error):
"""Converts an Exception from inspector_websocket.
This method always raises a Telemetry exception. It appends debugging
information. The exact exception raised depends on |error|.
Args:
error: An instance of socket.error or websocket.WebSocketException.
Raises:
exceptions.TimeoutException: A timeout occurred.
exceptions.DevtoolsTargetCrashException: On any other error, the most
likely explanation is that the devtool's target crashed.
"""
if isinstance(error, websocket.WebSocketTimeoutException):
new_error = exceptions.TimeoutException()
new_error.AddDebuggingMessage(exceptions.AppCrashException(
self.app, 'The app is probably crashed:\n'))
else:
new_error = exceptions.DevtoolsTargetCrashException(self.app)
original_error_msg = 'Original exception:\n' + str(error)
new_error.AddDebuggingMessage(original_error_msg)
self._AddDebuggingInformation(new_error)
raise new_error, None, sys.exc_info()[2]
|
[
"def",
"_ConvertExceptionFromInspectorWebsocket",
"(",
"self",
",",
"error",
")",
":",
"if",
"isinstance",
"(",
"error",
",",
"websocket",
".",
"WebSocketTimeoutException",
")",
":",
"new_error",
"=",
"exceptions",
".",
"TimeoutException",
"(",
")",
"new_error",
".",
"AddDebuggingMessage",
"(",
"exceptions",
".",
"AppCrashException",
"(",
"self",
".",
"app",
",",
"'The app is probably crashed:\\n'",
")",
")",
"else",
":",
"new_error",
"=",
"exceptions",
".",
"DevtoolsTargetCrashException",
"(",
"self",
".",
"app",
")",
"original_error_msg",
"=",
"'Original exception:\\n'",
"+",
"str",
"(",
"error",
")",
"new_error",
".",
"AddDebuggingMessage",
"(",
"original_error_msg",
")",
"self",
".",
"_AddDebuggingInformation",
"(",
"new_error",
")",
"raise",
"new_error",
",",
"None",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/telemetry/internal/backends/chrome_inspector/inspector_backend.py#L368-L392
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/_vendor/pyparsing.py
|
python
|
withAttribute
|
(*args,**attrDict)
|
return pa
|
Helper to create a validating parse action to be used with start tags created
with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
with a required attribute value, to avoid false matches on common tags such as
C{<TD>} or C{<DIV>}.
Call C{withAttribute} with a series of attribute names and values. Specify the list
of filter attributes names and values as:
- keyword arguments, as in C{(align="right")}, or
- as an explicit dict with C{**} operator, when an attribute name is also a Python
reserved word, as in C{**{"class":"Customer", "align":"right"}}
- a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
For attribute names with a namespace prefix, you must use the second form. Attribute
names are matched insensitive to upper/lower case.
If just testing for C{class} (with or without a namespace), use C{L{withClass}}.
To verify that the attribute exists, but without specifying a value, pass
C{withAttribute.ANY_VALUE} as the value.
Example::
html = '''
<div>
Some text
<div type="grid">1 4 0 1 0</div>
<div type="graph">1,3 2,3 1,1</div>
<div>this has no type</div>
</div>
'''
div,div_end = makeHTMLTags("div")
# only match div tag having a type attribute with value "grid"
div_grid = div().setParseAction(withAttribute(type="grid"))
grid_expr = div_grid + SkipTo(div | div_end)("body")
for grid_header in grid_expr.searchString(html):
print(grid_header.body)
# construct a match with any div tag having a type attribute, regardless of the value
div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
div_expr = div_any_type + SkipTo(div | div_end)("body")
for div_header in div_expr.searchString(html):
print(div_header.body)
prints::
1 4 0 1 0
1 4 0 1 0
1,3 2,3 1,1
|
[] |
def withAttribute(*args,**attrDict):
"""
Helper to create a validating parse action to be used with start tags created
with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
with a required attribute value, to avoid false matches on common tags such as
C{<TD>} or C{<DIV>}.
Call C{withAttribute} with a series of attribute names and values. Specify the list
of filter attributes names and values as:
- keyword arguments, as in C{(align="right")}, or
- as an explicit dict with C{**} operator, when an attribute name is also a Python
reserved word, as in C{**{"class":"Customer", "align":"right"}}
- a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
For attribute names with a namespace prefix, you must use the second form. Attribute
names are matched insensitive to upper/lower case.
If just testing for C{class} (with or without a namespace), use C{L{withClass}}.
To verify that the attribute exists, but without specifying a value, pass
C{withAttribute.ANY_VALUE} as the value.
Example::
html = '''
<div>
Some text
<div type="grid">1 4 0 1 0</div>
<div type="graph">1,3 2,3 1,1</div>
<div>this has no type</div>
</div>
'''
div,div_end = makeHTMLTags("div")
# only match div tag having a type attribute with value "grid"
div_grid = div().setParseAction(withAttribute(type="grid"))
grid_expr = div_grid + SkipTo(div | div_end)("body")
for grid_header in grid_expr.searchString(html):
print(grid_header.body)
# construct a match with any div tag having a type attribute, regardless of the value
div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
div_expr = div_any_type + SkipTo(div | div_end)("body")
for div_header in div_expr.searchString(html):
print(div_header.body)
prints::
1 4 0 1 0
1 4 0 1 0
1,3 2,3 1,1
"""
if args:
attrs = args[:]
else:
attrs = attrDict.items()
attrs = [(k,v) for k,v in attrs]
def pa(s,l,tokens):
for attrName,attrValue in attrs:
if attrName not in tokens:
raise ParseException(s,l,"no matching attribute " + attrName)
if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %
(attrName, tokens[attrName], attrValue))
return pa
|
[
"def",
"withAttribute",
"(",
"*",
"args",
",",
"*",
"*",
"attrDict",
")",
":",
"if",
"args",
":",
"attrs",
"=",
"args",
"[",
":",
"]",
"else",
":",
"attrs",
"=",
"attrDict",
".",
"items",
"(",
")",
"attrs",
"=",
"[",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"attrs",
"]",
"def",
"pa",
"(",
"s",
",",
"l",
",",
"tokens",
")",
":",
"for",
"attrName",
",",
"attrValue",
"in",
"attrs",
":",
"if",
"attrName",
"not",
"in",
"tokens",
":",
"raise",
"ParseException",
"(",
"s",
",",
"l",
",",
"\"no matching attribute \"",
"+",
"attrName",
")",
"if",
"attrValue",
"!=",
"withAttribute",
".",
"ANY_VALUE",
"and",
"tokens",
"[",
"attrName",
"]",
"!=",
"attrValue",
":",
"raise",
"ParseException",
"(",
"s",
",",
"l",
",",
"\"attribute '%s' has value '%s', must be '%s'\"",
"%",
"(",
"attrName",
",",
"tokens",
"[",
"attrName",
"]",
",",
"attrValue",
")",
")",
"return",
"pa"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/_vendor/pyparsing.py#L9863-L9987
|
||
nci/drishti
|
89cd8b740239c5b2c8222dffd4e27432fde170a1
|
bin/assets/scripts/unet++/unet_collection/swin.py
|
python
|
swin_unet_2d
|
(input_size,
n_labels,
filter_num_begin,
depth,
stack_num_down, stack_num_up,
patch_size, num_heads,
window_size, num_mlp,
shift_window,
output_activation='sigmoid')
|
return model
|
filter_num_begin = 128 # number of channels in the first downsampling block;
it is also the number of embedded dimensions
depth = 4 # the depth of SwinUNET; depth=4 means three down/upsampling levels and a bottom level
stack_num_down = 2 # number of Swin Transformers per downsampling level
stack_num_up = 2 # number of Swin Transformers per upsampling level
patch_size = (4, 4) # Extract 2-by-2 patches from the input image. Height and width of the patch must be equal.
num_heads = [4, 8, 8, 8] # number of attention heads per down/upsampling level
window_size = [4, 2, 2, 2] # the size of attention window per down/upsampling level
num_mlp = 512 # number of MLP nodes within the Transformer
shift_window=True # Apply window shifting, i.e., Swin-MSA
|
filter_num_begin = 128 # number of channels in the first downsampling block;
it is also the number of embedded dimensions
depth = 4 # the depth of SwinUNET; depth=4 means three down/upsampling levels and a bottom level
stack_num_down = 2 # number of Swin Transformers per downsampling level
stack_num_up = 2 # number of Swin Transformers per upsampling level
patch_size = (4, 4) # Extract 2-by-2 patches from the input image. Height and width of the patch must be equal.
num_heads = [4, 8, 8, 8] # number of attention heads per down/upsampling level
window_size = [4, 2, 2, 2] # the size of attention window per down/upsampling level
num_mlp = 512 # number of MLP nodes within the Transformer
shift_window=True # Apply window shifting, i.e., Swin-MSA
|
[
"filter_num_begin",
"=",
"128",
"#",
"number",
"of",
"channels",
"in",
"the",
"first",
"downsampling",
"block",
";",
"it",
"is",
"also",
"the",
"number",
"of",
"embedded",
"dimensions",
"depth",
"=",
"4",
"#",
"the",
"depth",
"of",
"SwinUNET",
";",
"depth",
"=",
"4",
"means",
"three",
"down",
"/",
"upsampling",
"levels",
"and",
"a",
"bottom",
"level",
"stack_num_down",
"=",
"2",
"#",
"number",
"of",
"Swin",
"Transformers",
"per",
"downsampling",
"level",
"stack_num_up",
"=",
"2",
"#",
"number",
"of",
"Swin",
"Transformers",
"per",
"upsampling",
"level",
"patch_size",
"=",
"(",
"4",
"4",
")",
"#",
"Extract",
"2",
"-",
"by",
"-",
"2",
"patches",
"from",
"the",
"input",
"image",
".",
"Height",
"and",
"width",
"of",
"the",
"patch",
"must",
"be",
"equal",
".",
"num_heads",
"=",
"[",
"4",
"8",
"8",
"8",
"]",
"#",
"number",
"of",
"attention",
"heads",
"per",
"down",
"/",
"upsampling",
"level",
"window_size",
"=",
"[",
"4",
"2",
"2",
"2",
"]",
"#",
"the",
"size",
"of",
"attention",
"window",
"per",
"down",
"/",
"upsampling",
"level",
"num_mlp",
"=",
"512",
"#",
"number",
"of",
"MLP",
"nodes",
"within",
"the",
"Transformer",
"shift_window",
"=",
"True",
"#",
"Apply",
"window",
"shifting",
"i",
".",
"e",
".",
"Swin",
"-",
"MSA"
] |
def swin_unet_2d(input_size,
n_labels,
filter_num_begin,
depth,
stack_num_down, stack_num_up,
patch_size, num_heads,
window_size, num_mlp,
shift_window,
output_activation='sigmoid'):
'''
filter_num_begin = 128 # number of channels in the first downsampling block;
it is also the number of embedded dimensions
depth = 4 # the depth of SwinUNET; depth=4 means three down/upsampling levels and a bottom level
stack_num_down = 2 # number of Swin Transformers per downsampling level
stack_num_up = 2 # number of Swin Transformers per upsampling level
patch_size = (4, 4) # Extract 2-by-2 patches from the input image. Height and width of the patch must be equal.
num_heads = [4, 8, 8, 8] # number of attention heads per down/upsampling level
window_size = [4, 2, 2, 2] # the size of attention window per down/upsampling level
num_mlp = 512 # number of MLP nodes within the Transformer
shift_window=True # Apply window shifting, i.e., Swin-MSA
'''
inputs = Input(input_size)
X = inputs
X = swin_unet_2d_base(X, filter_num_begin, depth,
stack_num_down, stack_num_up,
patch_size, num_heads,
window_size, num_mlp,
shift_window=shift_window,
name='swin_unet')
output = Conv2D(filters=n_labels,
kernel_size=1,
padding='same',
activation=output_activation)(X)
model = Model(inputs, output)
return model
|
[
"def",
"swin_unet_2d",
"(",
"input_size",
",",
"n_labels",
",",
"filter_num_begin",
",",
"depth",
",",
"stack_num_down",
",",
"stack_num_up",
",",
"patch_size",
",",
"num_heads",
",",
"window_size",
",",
"num_mlp",
",",
"shift_window",
",",
"output_activation",
"=",
"'sigmoid'",
")",
":",
"inputs",
"=",
"Input",
"(",
"input_size",
")",
"X",
"=",
"inputs",
"X",
"=",
"swin_unet_2d_base",
"(",
"X",
",",
"filter_num_begin",
",",
"depth",
",",
"stack_num_down",
",",
"stack_num_up",
",",
"patch_size",
",",
"num_heads",
",",
"window_size",
",",
"num_mlp",
",",
"shift_window",
"=",
"shift_window",
",",
"name",
"=",
"'swin_unet'",
")",
"output",
"=",
"Conv2D",
"(",
"filters",
"=",
"n_labels",
",",
"kernel_size",
"=",
"1",
",",
"padding",
"=",
"'same'",
",",
"activation",
"=",
"output_activation",
")",
"(",
"X",
")",
"model",
"=",
"Model",
"(",
"inputs",
",",
"output",
")",
"return",
"model"
] |
https://github.com/nci/drishti/blob/89cd8b740239c5b2c8222dffd4e27432fde170a1/bin/assets/scripts/unet++/unet_collection/swin.py#L161-L200
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/richtext.py
|
python
|
RichTextPrinting.GetRichTextBufferPrinting
|
(*args, **kwargs)
|
return _richtext.RichTextPrinting_GetRichTextBufferPrinting(*args, **kwargs)
|
GetRichTextBufferPrinting(self) -> RichTextBuffer
|
GetRichTextBufferPrinting(self) -> RichTextBuffer
|
[
"GetRichTextBufferPrinting",
"(",
"self",
")",
"-",
">",
"RichTextBuffer"
] |
def GetRichTextBufferPrinting(*args, **kwargs):
"""GetRichTextBufferPrinting(self) -> RichTextBuffer"""
return _richtext.RichTextPrinting_GetRichTextBufferPrinting(*args, **kwargs)
|
[
"def",
"GetRichTextBufferPrinting",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextPrinting_GetRichTextBufferPrinting",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/richtext.py#L4580-L4582
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/aui.py
|
python
|
AuiToolBarEvent.GetItemRect
|
(*args, **kwargs)
|
return _aui.AuiToolBarEvent_GetItemRect(*args, **kwargs)
|
GetItemRect(self) -> Rect
|
GetItemRect(self) -> Rect
|
[
"GetItemRect",
"(",
"self",
")",
"-",
">",
"Rect"
] |
def GetItemRect(*args, **kwargs):
"""GetItemRect(self) -> Rect"""
return _aui.AuiToolBarEvent_GetItemRect(*args, **kwargs)
|
[
"def",
"GetItemRect",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_aui",
".",
"AuiToolBarEvent_GetItemRect",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/aui.py#L1697-L1699
|
|
klzgrad/naiveproxy
|
ed2c513637c77b18721fe428d7ed395b4d284c83
|
src/build/android/apk_operations.py
|
python
|
RunForBundle
|
(output_directory, bundle_path, bundle_apks_path,
additional_apk_paths, aapt2_path, keystore_path,
keystore_password, keystore_alias, package_name,
command_line_flags_file, proguard_mapping_path, target_cpu,
system_image_locales, default_modules)
|
Entry point for generated app bundle wrapper scripts.
Args:
output_dir: Chromium output directory path.
bundle_path: Input bundle path.
bundle_apks_path: Output bundle .apks archive path.
additional_apk_paths: Additional APKs to install prior to bundle install.
aapt2_path: Aapt2 tool path.
keystore_path: Keystore file path.
keystore_password: Keystore password.
keystore_alias: Signing key name alias in keystore file.
package_name: Application's package name.
command_line_flags_file: Optional. Name of an on-device file that will be
used to store command-line flags for this bundle.
proguard_mapping_path: Input path to the Proguard mapping file, used to
deobfuscate Java stack traces.
target_cpu: Chromium target CPU name, used by the 'gdb' command.
system_image_locales: List of Chromium locales that should be included in
system image APKs.
default_modules: List of modules that are installed in addition to those
given by the '-m' switch.
|
Entry point for generated app bundle wrapper scripts.
|
[
"Entry",
"point",
"for",
"generated",
"app",
"bundle",
"wrapper",
"scripts",
"."
] |
def RunForBundle(output_directory, bundle_path, bundle_apks_path,
additional_apk_paths, aapt2_path, keystore_path,
keystore_password, keystore_alias, package_name,
command_line_flags_file, proguard_mapping_path, target_cpu,
system_image_locales, default_modules):
"""Entry point for generated app bundle wrapper scripts.
Args:
output_dir: Chromium output directory path.
bundle_path: Input bundle path.
bundle_apks_path: Output bundle .apks archive path.
additional_apk_paths: Additional APKs to install prior to bundle install.
aapt2_path: Aapt2 tool path.
keystore_path: Keystore file path.
keystore_password: Keystore password.
keystore_alias: Signing key name alias in keystore file.
package_name: Application's package name.
command_line_flags_file: Optional. Name of an on-device file that will be
used to store command-line flags for this bundle.
proguard_mapping_path: Input path to the Proguard mapping file, used to
deobfuscate Java stack traces.
target_cpu: Chromium target CPU name, used by the 'gdb' command.
system_image_locales: List of Chromium locales that should be included in
system image APKs.
default_modules: List of modules that are installed in addition to those
given by the '-m' switch.
"""
constants.SetOutputDirectory(output_directory)
devil_chromium.Initialize(output_directory=output_directory)
bundle_generation_info = BundleGenerationInfo(
bundle_path=bundle_path,
bundle_apks_path=bundle_apks_path,
aapt2_path=aapt2_path,
keystore_path=keystore_path,
keystore_password=keystore_password,
keystore_alias=keystore_alias,
system_image_locales=system_image_locales)
_InstallCommand.default_modules = default_modules
parser = argparse.ArgumentParser()
parser.set_defaults(
package_name=package_name,
command_line_flags_file=command_line_flags_file,
proguard_mapping_path=proguard_mapping_path,
target_cpu=target_cpu)
_RunInternal(
parser,
output_directory=output_directory,
additional_apk_paths=additional_apk_paths,
bundle_generation_info=bundle_generation_info)
|
[
"def",
"RunForBundle",
"(",
"output_directory",
",",
"bundle_path",
",",
"bundle_apks_path",
",",
"additional_apk_paths",
",",
"aapt2_path",
",",
"keystore_path",
",",
"keystore_password",
",",
"keystore_alias",
",",
"package_name",
",",
"command_line_flags_file",
",",
"proguard_mapping_path",
",",
"target_cpu",
",",
"system_image_locales",
",",
"default_modules",
")",
":",
"constants",
".",
"SetOutputDirectory",
"(",
"output_directory",
")",
"devil_chromium",
".",
"Initialize",
"(",
"output_directory",
"=",
"output_directory",
")",
"bundle_generation_info",
"=",
"BundleGenerationInfo",
"(",
"bundle_path",
"=",
"bundle_path",
",",
"bundle_apks_path",
"=",
"bundle_apks_path",
",",
"aapt2_path",
"=",
"aapt2_path",
",",
"keystore_path",
"=",
"keystore_path",
",",
"keystore_password",
"=",
"keystore_password",
",",
"keystore_alias",
"=",
"keystore_alias",
",",
"system_image_locales",
"=",
"system_image_locales",
")",
"_InstallCommand",
".",
"default_modules",
"=",
"default_modules",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
")",
"parser",
".",
"set_defaults",
"(",
"package_name",
"=",
"package_name",
",",
"command_line_flags_file",
"=",
"command_line_flags_file",
",",
"proguard_mapping_path",
"=",
"proguard_mapping_path",
",",
"target_cpu",
"=",
"target_cpu",
")",
"_RunInternal",
"(",
"parser",
",",
"output_directory",
"=",
"output_directory",
",",
"additional_apk_paths",
"=",
"additional_apk_paths",
",",
"bundle_generation_info",
"=",
"bundle_generation_info",
")"
] |
https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/build/android/apk_operations.py#L1886-L1935
|
||
kevin-ssy/Optical-Flow-Guided-Feature
|
07d4501a29002ee7821c38c1820e4a64c1acf6e8
|
lib/caffe-action/scripts/cpp_lint.py
|
python
|
CleanseComments
|
(line)
|
return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
|
Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed.
|
Removes //-comments and single-line C-style /* */ comments.
|
[
"Removes",
"//",
"-",
"comments",
"and",
"single",
"-",
"line",
"C",
"-",
"style",
"/",
"*",
"*",
"/",
"comments",
"."
] |
def CleanseComments(line):
"""Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed.
"""
commentpos = line.find('//')
if commentpos != -1 and not IsCppString(line[:commentpos]):
line = line[:commentpos].rstrip()
# get rid of /* ... */
return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
|
[
"def",
"CleanseComments",
"(",
"line",
")",
":",
"commentpos",
"=",
"line",
".",
"find",
"(",
"'//'",
")",
"if",
"commentpos",
"!=",
"-",
"1",
"and",
"not",
"IsCppString",
"(",
"line",
"[",
":",
"commentpos",
"]",
")",
":",
"line",
"=",
"line",
"[",
":",
"commentpos",
"]",
".",
"rstrip",
"(",
")",
"# get rid of /* ... */",
"return",
"_RE_PATTERN_CLEANSE_LINE_C_COMMENTS",
".",
"sub",
"(",
"''",
",",
"line",
")"
] |
https://github.com/kevin-ssy/Optical-Flow-Guided-Feature/blob/07d4501a29002ee7821c38c1820e4a64c1acf6e8/lib/caffe-action/scripts/cpp_lint.py#L1167-L1180
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/tensorflow_model_optimization/python/core/internal/tensor_encoding/core/encoding_stage.py
|
python
|
_tf_style_get_params
|
(get_params_fn)
|
return actual_get_params_fn
|
Method decorator for `tf_style_encoding_stage`.
|
Method decorator for `tf_style_encoding_stage`.
|
[
"Method",
"decorator",
"for",
"tf_style_encoding_stage",
"."
] |
def _tf_style_get_params(get_params_fn):
"""Method decorator for `tf_style_encoding_stage`."""
def actual_get_params_fn(self, name=None):
"""Modified `get_params` method."""
with tf.compat.v1.name_scope(name, self.name + GET_PARAMS_SCOPE_SUFFIX):
return get_params_fn(self, name=name)
return actual_get_params_fn
|
[
"def",
"_tf_style_get_params",
"(",
"get_params_fn",
")",
":",
"def",
"actual_get_params_fn",
"(",
"self",
",",
"name",
"=",
"None",
")",
":",
"\"\"\"Modified `get_params` method.\"\"\"",
"with",
"tf",
".",
"compat",
".",
"v1",
".",
"name_scope",
"(",
"name",
",",
"self",
".",
"name",
"+",
"GET_PARAMS_SCOPE_SUFFIX",
")",
":",
"return",
"get_params_fn",
"(",
"self",
",",
"name",
"=",
"name",
")",
"return",
"actual_get_params_fn"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/tensorflow_model_optimization/python/core/internal/tensor_encoding/core/encoding_stage.py#L656-L664
|
|
MythTV/mythtv
|
d282a209cb8be85d036f85a62a8ec971b67d45f4
|
mythtv/contrib/imports/mirobridge/mirobridge.py
|
python
|
set_del_datatime
|
(rec_time)
|
return rec_time.astimezone(
rec_time.UTCTZ()).strftime("%Y-%m-%d %H:%M:%S")
|
Set the SQL datetime so that the delRecorded and delOldrecorded
methods use UTC datetime values.
return rec_time datetime string
|
Set the SQL datetime so that the delRecorded and delOldrecorded
methods use UTC datetime values.
return rec_time datetime string
|
[
"Set",
"the",
"SQL",
"datetime",
"so",
"that",
"the",
"delRecorded",
"and",
"delOldrecorded",
"methods",
"use",
"UTC",
"datetime",
"values",
".",
"return",
"rec_time",
"datetime",
"string"
] |
def set_del_datatime(rec_time):
''' Set the SQL datetime so that the delRecorded and delOldrecorded
methods use UTC datetime values.
return rec_time datetime string
'''
#
return rec_time.astimezone(
rec_time.UTCTZ()).strftime("%Y-%m-%d %H:%M:%S")
|
[
"def",
"set_del_datatime",
"(",
"rec_time",
")",
":",
"#",
"return",
"rec_time",
".",
"astimezone",
"(",
"rec_time",
".",
"UTCTZ",
"(",
")",
")",
".",
"strftime",
"(",
"\"%Y-%m-%d %H:%M:%S\"",
")"
] |
https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/contrib/imports/mirobridge/mirobridge.py#L728-L735
|
|
krishauser/Klampt
|
972cc83ea5befac3f653c1ba20f80155768ad519
|
Python/python2_version/klampt/robotsim.py
|
python
|
RobotModel.getComVelocity
|
(self)
|
return _robotsim.RobotModel_getComVelocity(self)
|
getComVelocity(RobotModel self)
Returns the 3D velocity of the center of mass at the current config / velocity.
|
getComVelocity(RobotModel self)
|
[
"getComVelocity",
"(",
"RobotModel",
"self",
")"
] |
def getComVelocity(self):
"""
getComVelocity(RobotModel self)
Returns the 3D velocity of the center of mass at the current config / velocity.
"""
return _robotsim.RobotModel_getComVelocity(self)
|
[
"def",
"getComVelocity",
"(",
"self",
")",
":",
"return",
"_robotsim",
".",
"RobotModel_getComVelocity",
"(",
"self",
")"
] |
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/robotsim.py#L4830-L4839
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/tix.py
|
python
|
CheckList.setstatus
|
(self, entrypath, mode='on')
|
Sets the status of entryPath to be status. A bitmap will be
displayed next to the entry its status is on, off or default.
|
Sets the status of entryPath to be status. A bitmap will be
displayed next to the entry its status is on, off or default.
|
[
"Sets",
"the",
"status",
"of",
"entryPath",
"to",
"be",
"status",
".",
"A",
"bitmap",
"will",
"be",
"displayed",
"next",
"to",
"the",
"entry",
"its",
"status",
"is",
"on",
"off",
"or",
"default",
"."
] |
def setstatus(self, entrypath, mode='on'):
'''Sets the status of entryPath to be status. A bitmap will be
displayed next to the entry its status is on, off or default.'''
self.tk.call(self._w, 'setstatus', entrypath, mode)
|
[
"def",
"setstatus",
"(",
"self",
",",
"entrypath",
",",
"mode",
"=",
"'on'",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'setstatus'",
",",
"entrypath",
",",
"mode",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/tix.py#L1591-L1594
|
||
envoyproxy/envoy-wasm
|
ab5d9381fdf92a1efa0b87cff80036b5b3e81198
|
tools/protodoc/protodoc.py
|
python
|
FileCrossRefLabel
|
(msg_name)
|
return 'envoy_api_file_%s' % msg_name
|
File cross reference label.
|
File cross reference label.
|
[
"File",
"cross",
"reference",
"label",
"."
] |
def FileCrossRefLabel(msg_name):
"""File cross reference label."""
return 'envoy_api_file_%s' % msg_name
|
[
"def",
"FileCrossRefLabel",
"(",
"msg_name",
")",
":",
"return",
"'envoy_api_file_%s'",
"%",
"msg_name"
] |
https://github.com/envoyproxy/envoy-wasm/blob/ab5d9381fdf92a1efa0b87cff80036b5b3e81198/tools/protodoc/protodoc.py#L381-L383
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnostics.py
|
python
|
_reportPeakDiagnostics
|
(reportWS, peakIntensityWS, diagnosticsWS)
|
return _reportDiagnostics(reportWS, peakIntensityWS, diagnosticsWS, 'ElasticIntensity', 'IntensityDiagnosed')
|
Return masked spectrum numbers and add elastic peak diagnostics information to a report workspace.
|
Return masked spectrum numbers and add elastic peak diagnostics information to a report workspace.
|
[
"Return",
"masked",
"spectrum",
"numbers",
"and",
"add",
"elastic",
"peak",
"diagnostics",
"information",
"to",
"a",
"report",
"workspace",
"."
] |
def _reportPeakDiagnostics(reportWS, peakIntensityWS, diagnosticsWS):
"""Return masked spectrum numbers and add elastic peak diagnostics information to a report workspace."""
return _reportDiagnostics(reportWS, peakIntensityWS, diagnosticsWS, 'ElasticIntensity', 'IntensityDiagnosed')
|
[
"def",
"_reportPeakDiagnostics",
"(",
"reportWS",
",",
"peakIntensityWS",
",",
"diagnosticsWS",
")",
":",
"return",
"_reportDiagnostics",
"(",
"reportWS",
",",
"peakIntensityWS",
",",
"diagnosticsWS",
",",
"'ElasticIntensity'",
",",
"'IntensityDiagnosed'",
")"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnostics.py#L286-L288
|
|
deepmind/open_spiel
|
4ca53bea32bb2875c7385d215424048ae92f78c8
|
open_spiel/python/algorithms/fictitious_play.py
|
python
|
XFPSolver.sample_episodes
|
(self, policies, num)
|
return totals / num
|
Samples episodes and averages their returns.
Args:
policies: A list of policies representing the policies executed by each
player.
num: Number of episodes to execute to estimate average return of policies.
Returns:
Average episode return over num episodes.
|
Samples episodes and averages their returns.
|
[
"Samples",
"episodes",
"and",
"averages",
"their",
"returns",
"."
] |
def sample_episodes(self, policies, num):
"""Samples episodes and averages their returns.
Args:
policies: A list of policies representing the policies executed by each
player.
num: Number of episodes to execute to estimate average return of policies.
Returns:
Average episode return over num episodes.
"""
totals = np.zeros(self._num_players)
for _ in range(num):
totals += self.sample_episode(self._game.new_initial_state(), policies)
return totals / num
|
[
"def",
"sample_episodes",
"(",
"self",
",",
"policies",
",",
"num",
")",
":",
"totals",
"=",
"np",
".",
"zeros",
"(",
"self",
".",
"_num_players",
")",
"for",
"_",
"in",
"range",
"(",
"num",
")",
":",
"totals",
"+=",
"self",
".",
"sample_episode",
"(",
"self",
".",
"_game",
".",
"new_initial_state",
"(",
")",
",",
"policies",
")",
"return",
"totals",
"/",
"num"
] |
https://github.com/deepmind/open_spiel/blob/4ca53bea32bb2875c7385d215424048ae92f78c8/open_spiel/python/algorithms/fictitious_play.py#L281-L296
|
|
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
deps/src/libxml2-2.9.1/python/libxml2.py
|
python
|
parserCtxt.htmlParseChunk
|
(self, chunk, size, terminate)
|
return ret
|
Parse a Chunk of memory
|
Parse a Chunk of memory
|
[
"Parse",
"a",
"Chunk",
"of",
"memory"
] |
def htmlParseChunk(self, chunk, size, terminate):
"""Parse a Chunk of memory """
ret = libxml2mod.htmlParseChunk(self._o, chunk, size, terminate)
return ret
|
[
"def",
"htmlParseChunk",
"(",
"self",
",",
"chunk",
",",
"size",
",",
"terminate",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"htmlParseChunk",
"(",
"self",
".",
"_o",
",",
"chunk",
",",
"size",
",",
"terminate",
")",
"return",
"ret"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L5005-L5008
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/framework/ops.py
|
python
|
internal_convert_to_tensor_or_composite
|
(value,
dtype=None,
name=None,
as_ref=False)
|
Converts the given object to a `Tensor` or `CompositeTensor`.
If `value` is a `CompositeTensor` it is returned unmodified. Otherwise, it
is converted to a `Tensor` using `convert_to_tensor()`.
Args:
value: A `CompositeTensor`, or an object that can be consumed by
`convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` or
`CompositeTensor`.
name: (Optional.) A name to use if a new `Tensor` is created.
as_ref: True if the caller wants the results as ref tensors.
Returns:
A `Tensor` or `CompositeTensor`, based on `value`.
Raises:
ValueError: If `dtype` does not match the element type of `value`.
|
Converts the given object to a `Tensor` or `CompositeTensor`.
|
[
"Converts",
"the",
"given",
"object",
"to",
"a",
"Tensor",
"or",
"CompositeTensor",
"."
] |
def internal_convert_to_tensor_or_composite(value,
dtype=None,
name=None,
as_ref=False):
"""Converts the given object to a `Tensor` or `CompositeTensor`.
If `value` is a `CompositeTensor` it is returned unmodified. Otherwise, it
is converted to a `Tensor` using `convert_to_tensor()`.
Args:
value: A `CompositeTensor`, or an object that can be consumed by
`convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` or
`CompositeTensor`.
name: (Optional.) A name to use if a new `Tensor` is created.
as_ref: True if the caller wants the results as ref tensors.
Returns:
A `Tensor` or `CompositeTensor`, based on `value`.
Raises:
ValueError: If `dtype` does not match the element type of `value`.
"""
if isinstance(value, composite_tensor.CompositeTensor):
value_dtype = getattr(value, "dtype", None)
if dtype and not dtypes.as_dtype(dtype).is_compatible_with(value_dtype):
raise ValueError(f"Tensor conversion dtype mismatch. "
f"Requested dtype is {dtypes.as_dtype(dtype).name}, "
f"Tensor has dtype {value.dtype.name}: {value!r}")
return value
else:
return convert_to_tensor(
value,
dtype=dtype,
name=name,
as_ref=as_ref,
accepted_result_types=(Tensor, composite_tensor.CompositeTensor))
|
[
"def",
"internal_convert_to_tensor_or_composite",
"(",
"value",
",",
"dtype",
"=",
"None",
",",
"name",
"=",
"None",
",",
"as_ref",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"composite_tensor",
".",
"CompositeTensor",
")",
":",
"value_dtype",
"=",
"getattr",
"(",
"value",
",",
"\"dtype\"",
",",
"None",
")",
"if",
"dtype",
"and",
"not",
"dtypes",
".",
"as_dtype",
"(",
"dtype",
")",
".",
"is_compatible_with",
"(",
"value_dtype",
")",
":",
"raise",
"ValueError",
"(",
"f\"Tensor conversion dtype mismatch. \"",
"f\"Requested dtype is {dtypes.as_dtype(dtype).name}, \"",
"f\"Tensor has dtype {value.dtype.name}: {value!r}\"",
")",
"return",
"value",
"else",
":",
"return",
"convert_to_tensor",
"(",
"value",
",",
"dtype",
"=",
"dtype",
",",
"name",
"=",
"name",
",",
"as_ref",
"=",
"as_ref",
",",
"accepted_result_types",
"=",
"(",
"Tensor",
",",
"composite_tensor",
".",
"CompositeTensor",
")",
")"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/framework/ops.py#L1816-L1852
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_misc.py
|
python
|
DateTime.__ge__
|
(*args, **kwargs)
|
return _misc_.DateTime___ge__(*args, **kwargs)
|
__ge__(self, DateTime other) -> bool
|
__ge__(self, DateTime other) -> bool
|
[
"__ge__",
"(",
"self",
"DateTime",
"other",
")",
"-",
">",
"bool"
] |
def __ge__(*args, **kwargs):
"""__ge__(self, DateTime other) -> bool"""
return _misc_.DateTime___ge__(*args, **kwargs)
|
[
"def",
"__ge__",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"DateTime___ge__",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_misc.py#L4118-L4120
|
|
Genius-x/genius-x
|
9fc9f194e6d1fb92dd0e33d43db19ddb67cda7b0
|
cocos2d/tools/bindings-generator/clang/cindex.py
|
python
|
Type.get_size
|
(self)
|
return conf.lib.clang_Type_getSizeOf(self)
|
Retrieve the size of the record.
|
Retrieve the size of the record.
|
[
"Retrieve",
"the",
"size",
"of",
"the",
"record",
"."
] |
def get_size(self):
"""
Retrieve the size of the record.
"""
return conf.lib.clang_Type_getSizeOf(self)
|
[
"def",
"get_size",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_Type_getSizeOf",
"(",
"self",
")"
] |
https://github.com/Genius-x/genius-x/blob/9fc9f194e6d1fb92dd0e33d43db19ddb67cda7b0/cocos2d/tools/bindings-generator/clang/cindex.py#L1806-L1810
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/framework/ops.py
|
python
|
device
|
(device_name_or_function)
|
Wrapper for `Graph.device()` using the default graph.
See `tf.Graph.device` for more details.
Args:
device_name_or_function: The device name or function to use in the context.
Returns:
A context manager that specifies the default device to use for newly
created ops.
Raises:
RuntimeError: If eager execution is enabled and a function is passed in.
|
Wrapper for `Graph.device()` using the default graph.
|
[
"Wrapper",
"for",
"Graph",
".",
"device",
"()",
"using",
"the",
"default",
"graph",
"."
] |
def device(device_name_or_function):
"""Wrapper for `Graph.device()` using the default graph.
See `tf.Graph.device` for more details.
Args:
device_name_or_function: The device name or function to use in the context.
Returns:
A context manager that specifies the default device to use for newly
created ops.
Raises:
RuntimeError: If eager execution is enabled and a function is passed in.
"""
if context.executing_eagerly():
if callable(device_name_or_function):
raise RuntimeError(
"tf.device does not support functions when eager execution "
"is enabled.")
return context.device(device_name_or_function)
elif executing_eagerly_outside_functions():
@tf_contextlib.contextmanager
def combined(device_name_or_function):
with get_default_graph().device(device_name_or_function):
if not callable(device_name_or_function):
with context.device(device_name_or_function):
yield
else:
yield
return combined(device_name_or_function)
else:
return get_default_graph().device(device_name_or_function)
|
[
"def",
"device",
"(",
"device_name_or_function",
")",
":",
"if",
"context",
".",
"executing_eagerly",
"(",
")",
":",
"if",
"callable",
"(",
"device_name_or_function",
")",
":",
"raise",
"RuntimeError",
"(",
"\"tf.device does not support functions when eager execution \"",
"\"is enabled.\"",
")",
"return",
"context",
".",
"device",
"(",
"device_name_or_function",
")",
"elif",
"executing_eagerly_outside_functions",
"(",
")",
":",
"@",
"tf_contextlib",
".",
"contextmanager",
"def",
"combined",
"(",
"device_name_or_function",
")",
":",
"with",
"get_default_graph",
"(",
")",
".",
"device",
"(",
"device_name_or_function",
")",
":",
"if",
"not",
"callable",
"(",
"device_name_or_function",
")",
":",
"with",
"context",
".",
"device",
"(",
"device_name_or_function",
")",
":",
"yield",
"else",
":",
"yield",
"return",
"combined",
"(",
"device_name_or_function",
")",
"else",
":",
"return",
"get_default_graph",
"(",
")",
".",
"device",
"(",
"device_name_or_function",
")"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/framework/ops.py#L5464-L5496
|
||
fasiondog/hikyuu
|
842751aa25283f9fdafc6f560ea262f79e67a307
|
hikyuu/draw/drawplot/matplotlib_draw.py
|
python
|
getMinLocatorAndFormatter
|
(dates)
|
return month_loc, month_fm
|
获取显示分钟线时使用的Major Locator和Major Formatter
|
获取显示分钟线时使用的Major Locator和Major Formatter
|
[
"获取显示分钟线时使用的Major",
"Locator和Major",
"Formatter"
] |
def getMinLocatorAndFormatter(dates):
"""获取显示分钟线时使用的Major Locator和Major Formatter"""
sep = len(dates) / 5
loc = [
(
i, str(d)
if i % sep != 0 else "{}-{}-{} {}:{}".format(d.year, d.month, d.day, d.hour, d.minute)
) for i, d in enumerate(dates)
]
fixed_loc = [i for i in range(len(dates)) if i != 0 and i % sep == 0]
month_loc = FixedLocator(fixed_loc)
month_fm = FuncFormatter(StockFuncFormatter(dict(loc)))
return month_loc, month_fm
|
[
"def",
"getMinLocatorAndFormatter",
"(",
"dates",
")",
":",
"sep",
"=",
"len",
"(",
"dates",
")",
"/",
"5",
"loc",
"=",
"[",
"(",
"i",
",",
"str",
"(",
"d",
")",
"if",
"i",
"%",
"sep",
"!=",
"0",
"else",
"\"{}-{}-{} {}:{}\"",
".",
"format",
"(",
"d",
".",
"year",
",",
"d",
".",
"month",
",",
"d",
".",
"day",
",",
"d",
".",
"hour",
",",
"d",
".",
"minute",
")",
")",
"for",
"i",
",",
"d",
"in",
"enumerate",
"(",
"dates",
")",
"]",
"fixed_loc",
"=",
"[",
"i",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"dates",
")",
")",
"if",
"i",
"!=",
"0",
"and",
"i",
"%",
"sep",
"==",
"0",
"]",
"month_loc",
"=",
"FixedLocator",
"(",
"fixed_loc",
")",
"month_fm",
"=",
"FuncFormatter",
"(",
"StockFuncFormatter",
"(",
"dict",
"(",
"loc",
")",
")",
")",
"return",
"month_loc",
",",
"month_fm"
] |
https://github.com/fasiondog/hikyuu/blob/842751aa25283f9fdafc6f560ea262f79e67a307/hikyuu/draw/drawplot/matplotlib_draw.py#L140-L153
|
|
smilehao/xlua-framework
|
a03801538be2b0e92d39332d445b22caca1ef61f
|
ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/build/lib/google/protobuf/service_reflection.py
|
python
|
_ServiceBuilder._GenerateNonImplementedMethod
|
(self, method)
|
return lambda inst, rpc_controller, request, callback: (
self._NonImplementedMethod(method.name, rpc_controller, callback))
|
Generates and returns a method that can be set for a service methods.
Args:
method: Descriptor of the service method for which a method is to be
generated.
Returns:
A method that can be added to the service class.
|
Generates and returns a method that can be set for a service methods.
|
[
"Generates",
"and",
"returns",
"a",
"method",
"that",
"can",
"be",
"set",
"for",
"a",
"service",
"methods",
"."
] |
def _GenerateNonImplementedMethod(self, method):
"""Generates and returns a method that can be set for a service methods.
Args:
method: Descriptor of the service method for which a method is to be
generated.
Returns:
A method that can be added to the service class.
"""
return lambda inst, rpc_controller, request, callback: (
self._NonImplementedMethod(method.name, rpc_controller, callback))
|
[
"def",
"_GenerateNonImplementedMethod",
"(",
"self",
",",
"method",
")",
":",
"return",
"lambda",
"inst",
",",
"rpc_controller",
",",
"request",
",",
"callback",
":",
"(",
"self",
".",
"_NonImplementedMethod",
"(",
"method",
".",
"name",
",",
"rpc_controller",
",",
"callback",
")",
")"
] |
https://github.com/smilehao/xlua-framework/blob/a03801538be2b0e92d39332d445b22caca1ef61f/ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/build/lib/google/protobuf/service_reflection.py#L205-L216
|
|
apple/swift-clang
|
d7403439fc6641751840b723e7165fb02f52db95
|
bindings/python/clang/cindex.py
|
python
|
Cursor.get_arguments
|
(self)
|
Return an iterator for accessing the arguments of this cursor.
|
Return an iterator for accessing the arguments of this cursor.
|
[
"Return",
"an",
"iterator",
"for",
"accessing",
"the",
"arguments",
"of",
"this",
"cursor",
"."
] |
def get_arguments(self):
"""Return an iterator for accessing the arguments of this cursor."""
num_args = conf.lib.clang_Cursor_getNumArguments(self)
for i in range(0, num_args):
yield conf.lib.clang_Cursor_getArgument(self, i)
|
[
"def",
"get_arguments",
"(",
"self",
")",
":",
"num_args",
"=",
"conf",
".",
"lib",
".",
"clang_Cursor_getNumArguments",
"(",
"self",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"num_args",
")",
":",
"yield",
"conf",
".",
"lib",
".",
"clang_Cursor_getArgument",
"(",
"self",
",",
"i",
")"
] |
https://github.com/apple/swift-clang/blob/d7403439fc6641751840b723e7165fb02f52db95/bindings/python/clang/cindex.py#L1800-L1804
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/_core.py
|
python
|
SizerFlags.Bottom
|
(*args, **kwargs)
|
return _core_.SizerFlags_Bottom(*args, **kwargs)
|
Bottom(self) -> SizerFlags
Aligns the object to the bottom of the available space, a shortcut for
calling Align(wx.ALIGN_BOTTOM)
|
Bottom(self) -> SizerFlags
|
[
"Bottom",
"(",
"self",
")",
"-",
">",
"SizerFlags"
] |
def Bottom(*args, **kwargs):
"""
Bottom(self) -> SizerFlags
Aligns the object to the bottom of the available space, a shortcut for
calling Align(wx.ALIGN_BOTTOM)
"""
return _core_.SizerFlags_Bottom(*args, **kwargs)
|
[
"def",
"Bottom",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"SizerFlags_Bottom",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L13841-L13848
|
|
MythTV/mythtv
|
d282a209cb8be85d036f85a62a8ec971b67d45f4
|
mythtv/programs/scripts/internetcontent/nv_python_libs/xsltfunctions/cinemarv_api.py
|
python
|
xpathFunctions.cinemarvLinkGeneration
|
(self, context, *args)
|
return self.persistence['cinemarvLinkGeneration']
|
Generate a link for the CinemaRV.com site. A read of the item's web page is required to
extract the flash video id.
Call example: 'mnvXpath:cinemarvLinkGeneration(string(link))'
return the url link
|
Generate a link for the CinemaRV.com site. A read of the item's web page is required to
extract the flash video id.
Call example: 'mnvXpath:cinemarvLinkGeneration(string(link))'
return the url link
|
[
"Generate",
"a",
"link",
"for",
"the",
"CinemaRV",
".",
"com",
"site",
".",
"A",
"read",
"of",
"the",
"item",
"s",
"web",
"page",
"is",
"required",
"to",
"extract",
"the",
"flash",
"video",
"id",
".",
"Call",
"example",
":",
"mnvXpath",
":",
"cinemarvLinkGeneration",
"(",
"string",
"(",
"link",
"))",
"return",
"the",
"url",
"link"
] |
def cinemarvLinkGeneration(self, context, *args):
'''Generate a link for the CinemaRV.com site. A read of the item's web page is required to
extract the flash video id.
Call example: 'mnvXpath:cinemarvLinkGeneration(string(link))'
return the url link
'''
webURL = args[0]
# If this is for the download then just return what was found for the "link" element
if 'cinemarvLinkGeneration' in self.persistence:
if self.persistence['cinemarvLinkGeneration'] is not None:
returnValue = self.persistence['cinemarvLinkGeneration']
self.persistence['cinemarvLinkGeneration'] = None
return returnValue
else:
self.persistence['cinemarvLinkGenerationVideoID'] = etree.XPath('//object[@id="flashObj"]//param[@name="flashVars"]/@value', namespaces=common.namespaces)
self.persistence['cinemarvLinkGenerationParser'] = etree.HTMLParser()
try:
webPageElement = etree.parse(webURL, self.persistence['cinemarvLinkGenerationParser'])
except Exception as errmsg:
sys.stderr.write('!Warning: The web page URL(%s) could not be read, error(%s)\n' % (webURL, errmsg))
return webURL
if webPageElement is None:
self.persistence['cinemarvLinkGeneration'] = webURL
return webURL
tmpVideoID = self.persistence['cinemarvLinkGenerationVideoID'](webPageElement)
if not len(tmpVideoID):
self.persistence['cinemarvLinkGeneration'] = webURL
return webURL
index = tmpVideoID[0].find('&')
if index == -1:
self.persistence['cinemarvLinkGeneration'] = webURL
return webURL
videocode = tmpVideoID[0][:index].replace('videoId=', '')
self.persistence['cinemarvLinkGeneration'] = common.linkWebPage('dummycontext', 'cinemarv')+videocode
return self.persistence['cinemarvLinkGeneration']
|
[
"def",
"cinemarvLinkGeneration",
"(",
"self",
",",
"context",
",",
"*",
"args",
")",
":",
"webURL",
"=",
"args",
"[",
"0",
"]",
"# If this is for the download then just return what was found for the \"link\" element",
"if",
"'cinemarvLinkGeneration'",
"in",
"self",
".",
"persistence",
":",
"if",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGeneration'",
"]",
"is",
"not",
"None",
":",
"returnValue",
"=",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGeneration'",
"]",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGeneration'",
"]",
"=",
"None",
"return",
"returnValue",
"else",
":",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGenerationVideoID'",
"]",
"=",
"etree",
".",
"XPath",
"(",
"'//object[@id=\"flashObj\"]//param[@name=\"flashVars\"]/@value'",
",",
"namespaces",
"=",
"common",
".",
"namespaces",
")",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGenerationParser'",
"]",
"=",
"etree",
".",
"HTMLParser",
"(",
")",
"try",
":",
"webPageElement",
"=",
"etree",
".",
"parse",
"(",
"webURL",
",",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGenerationParser'",
"]",
")",
"except",
"Exception",
"as",
"errmsg",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'!Warning: The web page URL(%s) could not be read, error(%s)\\n'",
"%",
"(",
"webURL",
",",
"errmsg",
")",
")",
"return",
"webURL",
"if",
"webPageElement",
"is",
"None",
":",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGeneration'",
"]",
"=",
"webURL",
"return",
"webURL",
"tmpVideoID",
"=",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGenerationVideoID'",
"]",
"(",
"webPageElement",
")",
"if",
"not",
"len",
"(",
"tmpVideoID",
")",
":",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGeneration'",
"]",
"=",
"webURL",
"return",
"webURL",
"index",
"=",
"tmpVideoID",
"[",
"0",
"]",
".",
"find",
"(",
"'&'",
")",
"if",
"index",
"==",
"-",
"1",
":",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGeneration'",
"]",
"=",
"webURL",
"return",
"webURL",
"videocode",
"=",
"tmpVideoID",
"[",
"0",
"]",
"[",
":",
"index",
"]",
".",
"replace",
"(",
"'videoId='",
",",
"''",
")",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGeneration'",
"]",
"=",
"common",
".",
"linkWebPage",
"(",
"'dummycontext'",
",",
"'cinemarv'",
")",
"+",
"videocode",
"return",
"self",
".",
"persistence",
"[",
"'cinemarvLinkGeneration'",
"]"
] |
https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/programs/scripts/internetcontent/nv_python_libs/xsltfunctions/cinemarv_api.py#L103-L139
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pkg_resources/__init__.py
|
python
|
ZipManifests.build
|
(cls, path)
|
Build a dictionary similar to the zipimport directory
caches, except instead of tuples, store ZipInfo objects.
Use a platform-specific path separator (os.sep) for the path keys
for compatibility with pypy on Windows.
|
Build a dictionary similar to the zipimport directory
caches, except instead of tuples, store ZipInfo objects.
|
[
"Build",
"a",
"dictionary",
"similar",
"to",
"the",
"zipimport",
"directory",
"caches",
"except",
"instead",
"of",
"tuples",
"store",
"ZipInfo",
"objects",
"."
] |
def build(cls, path):
"""
Build a dictionary similar to the zipimport directory
caches, except instead of tuples, store ZipInfo objects.
Use a platform-specific path separator (os.sep) for the path keys
for compatibility with pypy on Windows.
"""
with zipfile.ZipFile(path) as zfile:
items = (
(
name.replace('/', os.sep),
zfile.getinfo(name),
)
for name in zfile.namelist()
)
return dict(items)
|
[
"def",
"build",
"(",
"cls",
",",
"path",
")",
":",
"with",
"zipfile",
".",
"ZipFile",
"(",
"path",
")",
"as",
"zfile",
":",
"items",
"=",
"(",
"(",
"name",
".",
"replace",
"(",
"'/'",
",",
"os",
".",
"sep",
")",
",",
"zfile",
".",
"getinfo",
"(",
"name",
")",
",",
")",
"for",
"name",
"in",
"zfile",
".",
"namelist",
"(",
")",
")",
"return",
"dict",
"(",
"items",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pkg_resources/__init__.py#L1667-L1683
|
||
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/numbers.py
|
python
|
Complex.real
|
(self)
|
Retrieve the real component of this number.
This should subclass Real.
|
Retrieve the real component of this number.
|
[
"Retrieve",
"the",
"real",
"component",
"of",
"this",
"number",
"."
] |
def real(self):
"""Retrieve the real component of this number.
This should subclass Real.
"""
raise NotImplementedError
|
[
"def",
"real",
"(",
"self",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/numbers.py#L57-L62
|
||
kamyu104/LeetCode-Solutions
|
77605708a927ea3b85aee5a479db733938c7c211
|
Python/largest-number-after-mutating-substring.py
|
python
|
Solution.maximumNumber
|
(self, num, change)
|
return "".join(map(str, result))
|
:type num: str
:type change: List[int]
:rtype: str
|
:type num: str
:type change: List[int]
:rtype: str
|
[
":",
"type",
"num",
":",
"str",
":",
"type",
"change",
":",
"List",
"[",
"int",
"]",
":",
"rtype",
":",
"str"
] |
def maximumNumber(self, num, change):
"""
:type num: str
:type change: List[int]
:rtype: str
"""
mutated = False
result = map(int, list(num))
for i, d in enumerate(result):
if change[d] < d:
if mutated:
break
elif change[d] > d:
result[i] = str(change[d])
mutated = True
return "".join(map(str, result))
|
[
"def",
"maximumNumber",
"(",
"self",
",",
"num",
",",
"change",
")",
":",
"mutated",
"=",
"False",
"result",
"=",
"map",
"(",
"int",
",",
"list",
"(",
"num",
")",
")",
"for",
"i",
",",
"d",
"in",
"enumerate",
"(",
"result",
")",
":",
"if",
"change",
"[",
"d",
"]",
"<",
"d",
":",
"if",
"mutated",
":",
"break",
"elif",
"change",
"[",
"d",
"]",
">",
"d",
":",
"result",
"[",
"i",
"]",
"=",
"str",
"(",
"change",
"[",
"d",
"]",
")",
"mutated",
"=",
"True",
"return",
"\"\"",
".",
"join",
"(",
"map",
"(",
"str",
",",
"result",
")",
")"
] |
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/largest-number-after-mutating-substring.py#L5-L20
|
|
eclipse/sumo
|
7132a9b8b6eea734bdec38479026b4d8c4336d03
|
tools/traci/_trafficlight.py
|
python
|
TrafficLightDomain.setPhase
|
(self, tlsID, index)
|
setPhase(string, integer) -> None
Switches to the phase with the given index in the list of all phases for
the current program.
|
setPhase(string, integer) -> None
|
[
"setPhase",
"(",
"string",
"integer",
")",
"-",
">",
"None"
] |
def setPhase(self, tlsID, index):
"""setPhase(string, integer) -> None
Switches to the phase with the given index in the list of all phases for
the current program.
"""
self._setCmd(tc.TL_PHASE_INDEX, tlsID, "i", index)
|
[
"def",
"setPhase",
"(",
"self",
",",
"tlsID",
",",
"index",
")",
":",
"self",
".",
"_setCmd",
"(",
"tc",
".",
"TL_PHASE_INDEX",
",",
"tlsID",
",",
"\"i\"",
",",
"index",
")"
] |
https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/traci/_trafficlight.py#L306-L312
|
||
mongodb/mongo
|
d8ff665343ad29cf286ee2cf4a1960d29371937b
|
buildscripts/resmokelib/logging/loggers.py
|
python
|
configure_loggers
|
()
|
Configure the loggers and setup redirects.
|
Configure the loggers and setup redirects.
|
[
"Configure",
"the",
"loggers",
"and",
"setup",
"redirects",
"."
] |
def configure_loggers():
"""Configure the loggers and setup redirects."""
_setup_redirects()
buildlogger.BUILDLOGGER_FALLBACK = logging.Logger("buildlogger")
# The 'buildlogger' prefix is not added to the fallback logger since the prefix of the original
# logger will be there as part of the logged message.
buildlogger.BUILDLOGGER_FALLBACK.addHandler(
_fallback_buildlogger_handler(include_logger_name=False))
global BUILDLOGGER_SERVER # pylint: disable=global-statement
BUILDLOGGER_SERVER = _build_logger_server()
global ROOT_TESTS_LOGGER # pylint: disable=global-statement
ROOT_TESTS_LOGGER = new_root_logger(TESTS_LOGGER_NAME)
global ROOT_FIXTURE_LOGGER # pylint: disable=global-statement
ROOT_FIXTURE_LOGGER = new_root_logger(FIXTURE_LOGGER_NAME)
global ROOT_EXECUTOR_LOGGER # pylint: disable=global-statement
ROOT_EXECUTOR_LOGGER = new_root_logger(EXECUTOR_LOGGER_NAME)
|
[
"def",
"configure_loggers",
"(",
")",
":",
"_setup_redirects",
"(",
")",
"buildlogger",
".",
"BUILDLOGGER_FALLBACK",
"=",
"logging",
".",
"Logger",
"(",
"\"buildlogger\"",
")",
"# The 'buildlogger' prefix is not added to the fallback logger since the prefix of the original",
"# logger will be there as part of the logged message.",
"buildlogger",
".",
"BUILDLOGGER_FALLBACK",
".",
"addHandler",
"(",
"_fallback_buildlogger_handler",
"(",
"include_logger_name",
"=",
"False",
")",
")",
"global",
"BUILDLOGGER_SERVER",
"# pylint: disable=global-statement",
"BUILDLOGGER_SERVER",
"=",
"_build_logger_server",
"(",
")",
"global",
"ROOT_TESTS_LOGGER",
"# pylint: disable=global-statement",
"ROOT_TESTS_LOGGER",
"=",
"new_root_logger",
"(",
"TESTS_LOGGER_NAME",
")",
"global",
"ROOT_FIXTURE_LOGGER",
"# pylint: disable=global-statement",
"ROOT_FIXTURE_LOGGER",
"=",
"new_root_logger",
"(",
"FIXTURE_LOGGER_NAME",
")",
"global",
"ROOT_EXECUTOR_LOGGER",
"# pylint: disable=global-statement",
"ROOT_EXECUTOR_LOGGER",
"=",
"new_root_logger",
"(",
"EXECUTOR_LOGGER_NAME",
")"
] |
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/resmokelib/logging/loggers.py#L78-L96
|
||
indutny/candor
|
48e7260618f5091c80a3416828e2808cad3ea22e
|
tools/gyp/pylib/gyp/xcodeproj_file.py
|
python
|
PBXProject.AddOrGetFileInRootGroup
|
(self, path)
|
return group.AddOrGetFileByPath(path, hierarchical)
|
Returns a PBXFileReference corresponding to path in the correct group
according to RootGroupForPath's heuristics.
If an existing PBXFileReference for path exists, it will be returned.
Otherwise, one will be created and returned.
|
Returns a PBXFileReference corresponding to path in the correct group
according to RootGroupForPath's heuristics.
|
[
"Returns",
"a",
"PBXFileReference",
"corresponding",
"to",
"path",
"in",
"the",
"correct",
"group",
"according",
"to",
"RootGroupForPath",
"s",
"heuristics",
"."
] |
def AddOrGetFileInRootGroup(self, path):
"""Returns a PBXFileReference corresponding to path in the correct group
according to RootGroupForPath's heuristics.
If an existing PBXFileReference for path exists, it will be returned.
Otherwise, one will be created and returned.
"""
(group, hierarchical) = self.RootGroupForPath(path)
return group.AddOrGetFileByPath(path, hierarchical)
|
[
"def",
"AddOrGetFileInRootGroup",
"(",
"self",
",",
"path",
")",
":",
"(",
"group",
",",
"hierarchical",
")",
"=",
"self",
".",
"RootGroupForPath",
"(",
"path",
")",
"return",
"group",
".",
"AddOrGetFileByPath",
"(",
"path",
",",
"hierarchical",
")"
] |
https://github.com/indutny/candor/blob/48e7260618f5091c80a3416828e2808cad3ea22e/tools/gyp/pylib/gyp/xcodeproj_file.py#L2556-L2565
|
|
Harick1/caffe-yolo
|
eea92bf3ddfe4d0ff6b0b3ba9b15c029a83ed9a3
|
python/caffe/draw.py
|
python
|
get_pydot_graph
|
(caffe_net, rankdir, label_edges=True)
|
return pydot_graph
|
Create a data structure which represents the `caffe_net`.
Parameters
----------
caffe_net : object
rankdir : {'LR', 'TB', 'BT'}
Direction of graph layout.
label_edges : boolean, optional
Label the edges (default is True).
Returns
-------
pydot graph object
|
Create a data structure which represents the `caffe_net`.
|
[
"Create",
"a",
"data",
"structure",
"which",
"represents",
"the",
"caffe_net",
"."
] |
def get_pydot_graph(caffe_net, rankdir, label_edges=True):
"""Create a data structure which represents the `caffe_net`.
Parameters
----------
caffe_net : object
rankdir : {'LR', 'TB', 'BT'}
Direction of graph layout.
label_edges : boolean, optional
Label the edges (default is True).
Returns
-------
pydot graph object
"""
pydot_graph = pydot.Dot(caffe_net.name if caffe_net.name else 'Net',
graph_type='digraph',
rankdir=rankdir)
pydot_nodes = {}
pydot_edges = []
for layer in caffe_net.layer:
node_label = get_layer_label(layer, rankdir)
node_name = "%s_%s" % (layer.name, layer.type)
if (len(layer.bottom) == 1 and len(layer.top) == 1 and
layer.bottom[0] == layer.top[0]):
# We have an in-place neuron layer.
pydot_nodes[node_name] = pydot.Node(node_label,
**NEURON_LAYER_STYLE)
else:
layer_style = LAYER_STYLE_DEFAULT
layer_style['fillcolor'] = choose_color_by_layertype(layer.type)
pydot_nodes[node_name] = pydot.Node(node_label, **layer_style)
for bottom_blob in layer.bottom:
pydot_nodes[bottom_blob + '_blob'] = pydot.Node('%s' % bottom_blob,
**BLOB_STYLE)
edge_label = '""'
pydot_edges.append({'src': bottom_blob + '_blob',
'dst': node_name,
'label': edge_label})
for top_blob in layer.top:
pydot_nodes[top_blob + '_blob'] = pydot.Node('%s' % (top_blob))
if label_edges:
edge_label = get_edge_label(layer)
else:
edge_label = '""'
pydot_edges.append({'src': node_name,
'dst': top_blob + '_blob',
'label': edge_label})
# Now, add the nodes and edges to the graph.
for node in pydot_nodes.values():
pydot_graph.add_node(node)
for edge in pydot_edges:
pydot_graph.add_edge(
pydot.Edge(pydot_nodes[edge['src']],
pydot_nodes[edge['dst']],
label=edge['label']))
return pydot_graph
|
[
"def",
"get_pydot_graph",
"(",
"caffe_net",
",",
"rankdir",
",",
"label_edges",
"=",
"True",
")",
":",
"pydot_graph",
"=",
"pydot",
".",
"Dot",
"(",
"caffe_net",
".",
"name",
"if",
"caffe_net",
".",
"name",
"else",
"'Net'",
",",
"graph_type",
"=",
"'digraph'",
",",
"rankdir",
"=",
"rankdir",
")",
"pydot_nodes",
"=",
"{",
"}",
"pydot_edges",
"=",
"[",
"]",
"for",
"layer",
"in",
"caffe_net",
".",
"layer",
":",
"node_label",
"=",
"get_layer_label",
"(",
"layer",
",",
"rankdir",
")",
"node_name",
"=",
"\"%s_%s\"",
"%",
"(",
"layer",
".",
"name",
",",
"layer",
".",
"type",
")",
"if",
"(",
"len",
"(",
"layer",
".",
"bottom",
")",
"==",
"1",
"and",
"len",
"(",
"layer",
".",
"top",
")",
"==",
"1",
"and",
"layer",
".",
"bottom",
"[",
"0",
"]",
"==",
"layer",
".",
"top",
"[",
"0",
"]",
")",
":",
"# We have an in-place neuron layer.",
"pydot_nodes",
"[",
"node_name",
"]",
"=",
"pydot",
".",
"Node",
"(",
"node_label",
",",
"*",
"*",
"NEURON_LAYER_STYLE",
")",
"else",
":",
"layer_style",
"=",
"LAYER_STYLE_DEFAULT",
"layer_style",
"[",
"'fillcolor'",
"]",
"=",
"choose_color_by_layertype",
"(",
"layer",
".",
"type",
")",
"pydot_nodes",
"[",
"node_name",
"]",
"=",
"pydot",
".",
"Node",
"(",
"node_label",
",",
"*",
"*",
"layer_style",
")",
"for",
"bottom_blob",
"in",
"layer",
".",
"bottom",
":",
"pydot_nodes",
"[",
"bottom_blob",
"+",
"'_blob'",
"]",
"=",
"pydot",
".",
"Node",
"(",
"'%s'",
"%",
"bottom_blob",
",",
"*",
"*",
"BLOB_STYLE",
")",
"edge_label",
"=",
"'\"\"'",
"pydot_edges",
".",
"append",
"(",
"{",
"'src'",
":",
"bottom_blob",
"+",
"'_blob'",
",",
"'dst'",
":",
"node_name",
",",
"'label'",
":",
"edge_label",
"}",
")",
"for",
"top_blob",
"in",
"layer",
".",
"top",
":",
"pydot_nodes",
"[",
"top_blob",
"+",
"'_blob'",
"]",
"=",
"pydot",
".",
"Node",
"(",
"'%s'",
"%",
"(",
"top_blob",
")",
")",
"if",
"label_edges",
":",
"edge_label",
"=",
"get_edge_label",
"(",
"layer",
")",
"else",
":",
"edge_label",
"=",
"'\"\"'",
"pydot_edges",
".",
"append",
"(",
"{",
"'src'",
":",
"node_name",
",",
"'dst'",
":",
"top_blob",
"+",
"'_blob'",
",",
"'label'",
":",
"edge_label",
"}",
")",
"# Now, add the nodes and edges to the graph.",
"for",
"node",
"in",
"pydot_nodes",
".",
"values",
"(",
")",
":",
"pydot_graph",
".",
"add_node",
"(",
"node",
")",
"for",
"edge",
"in",
"pydot_edges",
":",
"pydot_graph",
".",
"add_edge",
"(",
"pydot",
".",
"Edge",
"(",
"pydot_nodes",
"[",
"edge",
"[",
"'src'",
"]",
"]",
",",
"pydot_nodes",
"[",
"edge",
"[",
"'dst'",
"]",
"]",
",",
"label",
"=",
"edge",
"[",
"'label'",
"]",
")",
")",
"return",
"pydot_graph"
] |
https://github.com/Harick1/caffe-yolo/blob/eea92bf3ddfe4d0ff6b0b3ba9b15c029a83ed9a3/python/caffe/draw.py#L130-L186
|
|
ricardoquesada/Spidermonkey
|
4a75ea2543408bd1b2c515aa95901523eeef7858
|
build/pymake/pymake/data.py
|
python
|
Target.resolvedeps
|
(self, makefile, targetstack, rulestack, recursive)
|
Resolve the actual path of this target, using vpath if necessary.
Recursively resolve dependencies of this target. This means finding implicit
rules which match the target, if appropriate.
Figure out whether this target needs to be rebuild, and set self.outofdate
appropriately.
@param targetstack is the current stack of dependencies being resolved. If
this target is already in targetstack, bail to prevent infinite
recursion.
@param rulestack is the current stack of implicit rules being used to resolve
dependencies. A rule chain cannot use the same implicit rule twice.
|
Resolve the actual path of this target, using vpath if necessary.
|
[
"Resolve",
"the",
"actual",
"path",
"of",
"this",
"target",
"using",
"vpath",
"if",
"necessary",
"."
] |
def resolvedeps(self, makefile, targetstack, rulestack, recursive):
"""
Resolve the actual path of this target, using vpath if necessary.
Recursively resolve dependencies of this target. This means finding implicit
rules which match the target, if appropriate.
Figure out whether this target needs to be rebuild, and set self.outofdate
appropriately.
@param targetstack is the current stack of dependencies being resolved. If
this target is already in targetstack, bail to prevent infinite
recursion.
@param rulestack is the current stack of implicit rules being used to resolve
dependencies. A rule chain cannot use the same implicit rule twice.
"""
assert makefile.parsingfinished
if self.target in targetstack:
raise ResolutionError("Recursive dependency: %s -> %s" % (
" -> ".join(targetstack), self.target))
targetstack = targetstack + [self.target]
indent = getindent(targetstack)
_log.info("%sConsidering target '%s'", indent, self.target)
self.resolvevpath(makefile)
# Sanity-check our rules. If we're single-colon, only one rule should have commands
ruleswithcommands = self.ruleswithcommands()
if len(self.rules) and not self.isdoublecolon():
if ruleswithcommands > 1:
# In GNU make this is a warning, not an error. I'm going to be stricter.
# TODO: provide locations
raise DataError("Target '%s' has multiple rules with commands." % self.target)
if ruleswithcommands == 0:
self.resolveimplicitrule(makefile, targetstack, rulestack)
# If a target is mentioned, but doesn't exist, has no commands and no
# prerequisites, it is special and exists just to say that targets which
# depend on it are always out of date. This is like .FORCE but more
# compatible with other makes.
# Otherwise, we don't know how to make it.
if not len(self.rules) and self.mtime is None and not util.any((len(rule.prerequisites) > 0
for rule in self.rules)):
raise ResolutionError("No rule to make target '%s' needed by %r" % (self.target,
targetstack))
if recursive:
for r in self.rules:
newrulestack = rulestack + [r]
for d in r.prerequisites:
dt = makefile.gettarget(d)
if dt.explicit:
continue
dt.resolvedeps(makefile, targetstack, newrulestack, True)
for v in makefile.getpatternvariablesfor(self.target):
self.variables.merge(v)
|
[
"def",
"resolvedeps",
"(",
"self",
",",
"makefile",
",",
"targetstack",
",",
"rulestack",
",",
"recursive",
")",
":",
"assert",
"makefile",
".",
"parsingfinished",
"if",
"self",
".",
"target",
"in",
"targetstack",
":",
"raise",
"ResolutionError",
"(",
"\"Recursive dependency: %s -> %s\"",
"%",
"(",
"\" -> \"",
".",
"join",
"(",
"targetstack",
")",
",",
"self",
".",
"target",
")",
")",
"targetstack",
"=",
"targetstack",
"+",
"[",
"self",
".",
"target",
"]",
"indent",
"=",
"getindent",
"(",
"targetstack",
")",
"_log",
".",
"info",
"(",
"\"%sConsidering target '%s'\"",
",",
"indent",
",",
"self",
".",
"target",
")",
"self",
".",
"resolvevpath",
"(",
"makefile",
")",
"# Sanity-check our rules. If we're single-colon, only one rule should have commands",
"ruleswithcommands",
"=",
"self",
".",
"ruleswithcommands",
"(",
")",
"if",
"len",
"(",
"self",
".",
"rules",
")",
"and",
"not",
"self",
".",
"isdoublecolon",
"(",
")",
":",
"if",
"ruleswithcommands",
">",
"1",
":",
"# In GNU make this is a warning, not an error. I'm going to be stricter.",
"# TODO: provide locations",
"raise",
"DataError",
"(",
"\"Target '%s' has multiple rules with commands.\"",
"%",
"self",
".",
"target",
")",
"if",
"ruleswithcommands",
"==",
"0",
":",
"self",
".",
"resolveimplicitrule",
"(",
"makefile",
",",
"targetstack",
",",
"rulestack",
")",
"# If a target is mentioned, but doesn't exist, has no commands and no",
"# prerequisites, it is special and exists just to say that targets which",
"# depend on it are always out of date. This is like .FORCE but more",
"# compatible with other makes.",
"# Otherwise, we don't know how to make it.",
"if",
"not",
"len",
"(",
"self",
".",
"rules",
")",
"and",
"self",
".",
"mtime",
"is",
"None",
"and",
"not",
"util",
".",
"any",
"(",
"(",
"len",
"(",
"rule",
".",
"prerequisites",
")",
">",
"0",
"for",
"rule",
"in",
"self",
".",
"rules",
")",
")",
":",
"raise",
"ResolutionError",
"(",
"\"No rule to make target '%s' needed by %r\"",
"%",
"(",
"self",
".",
"target",
",",
"targetstack",
")",
")",
"if",
"recursive",
":",
"for",
"r",
"in",
"self",
".",
"rules",
":",
"newrulestack",
"=",
"rulestack",
"+",
"[",
"r",
"]",
"for",
"d",
"in",
"r",
".",
"prerequisites",
":",
"dt",
"=",
"makefile",
".",
"gettarget",
"(",
"d",
")",
"if",
"dt",
".",
"explicit",
":",
"continue",
"dt",
".",
"resolvedeps",
"(",
"makefile",
",",
"targetstack",
",",
"newrulestack",
",",
"True",
")",
"for",
"v",
"in",
"makefile",
".",
"getpatternvariablesfor",
"(",
"self",
".",
"target",
")",
":",
"self",
".",
"variables",
".",
"merge",
"(",
"v",
")"
] |
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/build/pymake/pymake/data.py#L1093-L1155
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
build/android/pylib/utils/logging_utils.py
|
python
|
ColorStreamHandler.MakeDefault
|
(force_color=False)
|
Replaces the default logging handlers with a coloring handler. To use
a colorizing handler at the same time as others, either register them
after this call, or add the ColorStreamHandler on the logger using
Logger.addHandler()
Args:
force_color: Set to True to bypass the tty check and always colorize.
|
Replaces the default logging handlers with a coloring handler. To use
a colorizing handler at the same time as others, either register them
after this call, or add the ColorStreamHandler on the logger using
Logger.addHandler()
|
[
"Replaces",
"the",
"default",
"logging",
"handlers",
"with",
"a",
"coloring",
"handler",
".",
"To",
"use",
"a",
"colorizing",
"handler",
"at",
"the",
"same",
"time",
"as",
"others",
"either",
"register",
"them",
"after",
"this",
"call",
"or",
"add",
"the",
"ColorStreamHandler",
"on",
"the",
"logger",
"using",
"Logger",
".",
"addHandler",
"()"
] |
def MakeDefault(force_color=False):
"""
Replaces the default logging handlers with a coloring handler. To use
a colorizing handler at the same time as others, either register them
after this call, or add the ColorStreamHandler on the logger using
Logger.addHandler()
Args:
force_color: Set to True to bypass the tty check and always colorize.
"""
# If the existing handlers aren't removed, messages are duplicated
logging.getLogger().handlers = []
logging.getLogger().addHandler(ColorStreamHandler(force_color))
|
[
"def",
"MakeDefault",
"(",
"force_color",
"=",
"False",
")",
":",
"# If the existing handlers aren't removed, messages are duplicated",
"logging",
".",
"getLogger",
"(",
")",
".",
"handlers",
"=",
"[",
"]",
"logging",
".",
"getLogger",
"(",
")",
".",
"addHandler",
"(",
"ColorStreamHandler",
"(",
"force_color",
")",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/build/android/pylib/utils/logging_utils.py#L64-L76
|
||
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/SimpleXMLRPCServer.py
|
python
|
SimpleXMLRPCDispatcher.system_methodHelp
|
(self, method_name)
|
system.methodHelp('add') => "Adds two integers together"
Returns a string containing documentation for the specified method.
|
system.methodHelp('add') => "Adds two integers together"
|
[
"system",
".",
"methodHelp",
"(",
"add",
")",
"=",
">",
"Adds",
"two",
"integers",
"together"
] |
def system_methodHelp(self, method_name):
"""system.methodHelp('add') => "Adds two integers together"
Returns a string containing documentation for the specified method."""
method = None
if method_name in self.funcs:
method = self.funcs[method_name]
elif self.instance is not None:
# Instance can implement _methodHelp to return help for a method
if hasattr(self.instance, '_methodHelp'):
return self.instance._methodHelp(method_name)
# if the instance has a _dispatch method then we
# don't have enough information to provide help
elif not hasattr(self.instance, '_dispatch'):
try:
method = resolve_dotted_attribute(
self.instance,
method_name,
self.allow_dotted_names
)
except AttributeError:
pass
# Note that we aren't checking that the method actually
# be a callable object of some kind
if method is None:
return ""
else:
import pydoc
return pydoc.getdoc(method)
|
[
"def",
"system_methodHelp",
"(",
"self",
",",
"method_name",
")",
":",
"method",
"=",
"None",
"if",
"method_name",
"in",
"self",
".",
"funcs",
":",
"method",
"=",
"self",
".",
"funcs",
"[",
"method_name",
"]",
"elif",
"self",
".",
"instance",
"is",
"not",
"None",
":",
"# Instance can implement _methodHelp to return help for a method",
"if",
"hasattr",
"(",
"self",
".",
"instance",
",",
"'_methodHelp'",
")",
":",
"return",
"self",
".",
"instance",
".",
"_methodHelp",
"(",
"method_name",
")",
"# if the instance has a _dispatch method then we",
"# don't have enough information to provide help",
"elif",
"not",
"hasattr",
"(",
"self",
".",
"instance",
",",
"'_dispatch'",
")",
":",
"try",
":",
"method",
"=",
"resolve_dotted_attribute",
"(",
"self",
".",
"instance",
",",
"method_name",
",",
"self",
".",
"allow_dotted_names",
")",
"except",
"AttributeError",
":",
"pass",
"# Note that we aren't checking that the method actually",
"# be a callable object of some kind",
"if",
"method",
"is",
"None",
":",
"return",
"\"\"",
"else",
":",
"import",
"pydoc",
"return",
"pydoc",
".",
"getdoc",
"(",
"method",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/SimpleXMLRPCServer.py#L314-L344
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scipy/py3/scipy/optimize/_differentialevolution.py
|
python
|
DifferentialEvolutionSolver.converged
|
(self)
|
return (np.std(self.population_energies) <=
self.atol +
self.tol * np.abs(np.mean(self.population_energies)))
|
Return True if the solver has converged.
|
Return True if the solver has converged.
|
[
"Return",
"True",
"if",
"the",
"solver",
"has",
"converged",
"."
] |
def converged(self):
"""
Return True if the solver has converged.
"""
return (np.std(self.population_energies) <=
self.atol +
self.tol * np.abs(np.mean(self.population_energies)))
|
[
"def",
"converged",
"(",
"self",
")",
":",
"return",
"(",
"np",
".",
"std",
"(",
"self",
".",
"population_energies",
")",
"<=",
"self",
".",
"atol",
"+",
"self",
".",
"tol",
"*",
"np",
".",
"abs",
"(",
"np",
".",
"mean",
"(",
"self",
".",
"population_energies",
")",
")",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/optimize/_differentialevolution.py#L641-L647
|
|
falkTX/Carla
|
74a1ae82c90db85f20550ddcdc8a927b8fb7e414
|
source/modules/lilv/lilv-0.24.0/bindings/python/lilv.py
|
python
|
PluginClass.get_label
|
(self)
|
return Node.wrap(node_duplicate(plugin_class_get_label(self.plugin_class)))
|
Get the label of this plugin class, ie "Oscillators".
|
Get the label of this plugin class, ie "Oscillators".
|
[
"Get",
"the",
"label",
"of",
"this",
"plugin",
"class",
"ie",
"Oscillators",
"."
] |
def get_label(self):
"""Get the label of this plugin class, ie "Oscillators"."""
return Node.wrap(node_duplicate(plugin_class_get_label(self.plugin_class)))
|
[
"def",
"get_label",
"(",
"self",
")",
":",
"return",
"Node",
".",
"wrap",
"(",
"node_duplicate",
"(",
"plugin_class_get_label",
"(",
"self",
".",
"plugin_class",
")",
")",
")"
] |
https://github.com/falkTX/Carla/blob/74a1ae82c90db85f20550ddcdc8a927b8fb7e414/source/modules/lilv/lilv-0.24.0/bindings/python/lilv.py#L541-L543
|
|
cms-sw/cmssw
|
fd9de012d503d3405420bcbeec0ec879baa57cf2
|
Alignment/MillePedeAlignmentAlgorithm/scripts/mps_alisetup.py
|
python
|
SetupAlignment._create_tracker_tree
|
(self)
|
Method to create hidden 'TrackerTree.root'.
|
Method to create hidden 'TrackerTree.root'.
|
[
"Method",
"to",
"create",
"hidden",
"TrackerTree",
".",
"root",
"."
] |
def _create_tracker_tree(self):
"""Method to create hidden 'TrackerTree.root'."""
if self._global_tag is None or self._first_run is None:
print("Trying to create the tracker tree before setting the global", end=' ')
print("tag or the run to determine the geometry IOV.")
sys.exit(1)
config = mpsv_iniparser.ConfigData()
config.jobDataPath = "." # current directory
config.globalTag = self._global_tag
config.firstRun = self._first_run
self._tracker_tree_path = mpsv_trackerTree.check(config)
|
[
"def",
"_create_tracker_tree",
"(",
"self",
")",
":",
"if",
"self",
".",
"_global_tag",
"is",
"None",
"or",
"self",
".",
"_first_run",
"is",
"None",
":",
"print",
"(",
"\"Trying to create the tracker tree before setting the global\"",
",",
"end",
"=",
"' '",
")",
"print",
"(",
"\"tag or the run to determine the geometry IOV.\"",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"config",
"=",
"mpsv_iniparser",
".",
"ConfigData",
"(",
")",
"config",
".",
"jobDataPath",
"=",
"\".\"",
"# current directory",
"config",
".",
"globalTag",
"=",
"self",
".",
"_global_tag",
"config",
".",
"firstRun",
"=",
"self",
".",
"_first_run",
"self",
".",
"_tracker_tree_path",
"=",
"mpsv_trackerTree",
".",
"check",
"(",
"config",
")"
] |
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_alisetup.py#L643-L655
|
||
natanielruiz/android-yolo
|
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
|
jni-build/jni/include/tensorflow/contrib/framework/python/ops/sampling_ops.py
|
python
|
stratified_sample
|
(tensors, labels, target_probs, batch_size,
init_probs=None, enqueue_many=False, queue_capacity=16,
threads_per_queue=1, name=None)
|
Stochastically creates batches based on per-class probabilities.
This method discards examples. Internally, it creates one queue to amortize
the cost of disk reads, and one queue to hold the properly-proportioned
batch. See `stratified_sample_unknown_dist` for a function that performs
stratified sampling with one queue per class and doesn't require knowing the
class data-distribution ahead of time.
Args:
tensors: List of tensors for data. All tensors are either one item or a
batch, according to enqueue_many.
labels: Tensor for label of data. Label is a single integer or a batch,
depending on enqueue_many. It is not a one-hot vector.
target_probs: Target class proportions in batch. An object whose type has a
registered Tensor conversion function.
batch_size: Size of batch to be returned.
init_probs: Class proportions in the data. An object whose type has a
registered Tensor conversion function, or `None` for estimating the
initial distribution.
enqueue_many: Bool. If true, interpret input tensors as having a batch
dimension.
queue_capacity: Capacity of the large queue that holds input examples.
threads_per_queue: Number of threads for the large queue that holds input
examples and for the final queue with the proper class proportions.
name: Optional prefix for ops created by this function.
Raises:
ValueError: enqueue_many is True and labels doesn't have a batch
dimension, or if enqueue_many is False and labels isn't a scalar.
ValueError: enqueue_many is True, and batch dimension on data and labels
don't match.
ValueError: if probs don't sum to one.
ValueError: if a zero initial probability class has a nonzero target
probability.
TFAssertion: if labels aren't integers in [0, num classes).
Returns:
(data_batch, label_batch), where data_batch is a list of tensors of the same
length as `tensors`
Example:
# Get tensor for a single data and label example.
data, label = data_provider.Get(['data', 'label'])
# Get stratified batch according to per-class probabilities.
target_probs = [...distribution you want...]
[data_batch], labels = tf.contrib.framework.sampling_ops.stratified_sample(
[data], label, target_probs)
# Run batch through network.
...
|
Stochastically creates batches based on per-class probabilities.
|
[
"Stochastically",
"creates",
"batches",
"based",
"on",
"per",
"-",
"class",
"probabilities",
"."
] |
def stratified_sample(tensors, labels, target_probs, batch_size,
init_probs=None, enqueue_many=False, queue_capacity=16,
threads_per_queue=1, name=None):
"""Stochastically creates batches based on per-class probabilities.
This method discards examples. Internally, it creates one queue to amortize
the cost of disk reads, and one queue to hold the properly-proportioned
batch. See `stratified_sample_unknown_dist` for a function that performs
stratified sampling with one queue per class and doesn't require knowing the
class data-distribution ahead of time.
Args:
tensors: List of tensors for data. All tensors are either one item or a
batch, according to enqueue_many.
labels: Tensor for label of data. Label is a single integer or a batch,
depending on enqueue_many. It is not a one-hot vector.
target_probs: Target class proportions in batch. An object whose type has a
registered Tensor conversion function.
batch_size: Size of batch to be returned.
init_probs: Class proportions in the data. An object whose type has a
registered Tensor conversion function, or `None` for estimating the
initial distribution.
enqueue_many: Bool. If true, interpret input tensors as having a batch
dimension.
queue_capacity: Capacity of the large queue that holds input examples.
threads_per_queue: Number of threads for the large queue that holds input
examples and for the final queue with the proper class proportions.
name: Optional prefix for ops created by this function.
Raises:
ValueError: enqueue_many is True and labels doesn't have a batch
dimension, or if enqueue_many is False and labels isn't a scalar.
ValueError: enqueue_many is True, and batch dimension on data and labels
don't match.
ValueError: if probs don't sum to one.
ValueError: if a zero initial probability class has a nonzero target
probability.
TFAssertion: if labels aren't integers in [0, num classes).
Returns:
(data_batch, label_batch), where data_batch is a list of tensors of the same
length as `tensors`
Example:
# Get tensor for a single data and label example.
data, label = data_provider.Get(['data', 'label'])
# Get stratified batch according to per-class probabilities.
target_probs = [...distribution you want...]
[data_batch], labels = tf.contrib.framework.sampling_ops.stratified_sample(
[data], label, target_probs)
# Run batch through network.
...
"""
with ops.op_scope(tensors + [labels], name, 'stratified_sample'):
tensor_list = ops.convert_n_to_tensor_or_indexed_slices(tensors)
labels = ops.convert_to_tensor(labels)
target_probs = ops.convert_to_tensor(target_probs, dtype=dtypes.float32)
# Reduce the case of a single example to that of a batch of size 1.
if not enqueue_many:
tensor_list = [array_ops.expand_dims(tensor, 0) for tensor in tensor_list]
labels = array_ops.expand_dims(labels, 0)
# If `init_probs` is `None`, set up online estimation of data distribution.
if init_probs is None:
# We use `target_probs` to get the number of classes, so its shape must be
# fully defined at graph construction time.
target_probs.get_shape().assert_is_fully_defined()
init_probs = _estimate_data_distribution(
labels, target_probs.get_shape().num_elements())
else:
init_probs = ops.convert_to_tensor(init_probs, dtype=dtypes.float32)
# Validate that input is consistent.
tensor_list, labels, [init_probs, target_probs] = _verify_input(
tensor_list, labels, [init_probs, target_probs])
# Check that all zero initial probabilities also have zero target
# probabilities.
assert_op = logging_ops.Assert(
math_ops.reduce_all(math_ops.logical_or(
math_ops.not_equal(init_probs, 0),
math_ops.equal(target_probs, 0))),
['All classes with zero initial probability must also have zero target '
'probability: ', init_probs, target_probs])
init_probs = control_flow_ops.with_dependencies([assert_op], init_probs)
# Calculate acceptance sampling probabilities.
accept_probs = _calculate_acceptance_probabilities(init_probs, target_probs)
proportion_rejected = math_ops.reduce_sum((1 - accept_probs) * init_probs)
accept_probs = control_flow_ops.cond(
math_ops.less(proportion_rejected, .5),
lambda: accept_probs,
lambda: logging_ops.Print( # pylint: disable=g-long-lambda
accept_probs, [accept_probs],
message='Proportion of examples rejected by sampler is high.',
first_n=10))
# Make a single queue to hold input examples. Reshape output so examples
# don't have singleton batch dimension.
batched = input_ops.batch(tensor_list + [labels],
batch_size=1,
num_threads=threads_per_queue,
capacity=queue_capacity,
enqueue_many=True)
val_list = [array_ops.squeeze(x, [0]) for x in batched[:-1]]
label = array_ops.squeeze(batched[-1], [0])
# Set up second queue containing batches that have the desired class
# proportions.
batched = _get_stratified_batch_from_tensors(
val_list, label, accept_probs, batch_size, threads_per_queue)
return batched[:-1], batched[-1]
|
[
"def",
"stratified_sample",
"(",
"tensors",
",",
"labels",
",",
"target_probs",
",",
"batch_size",
",",
"init_probs",
"=",
"None",
",",
"enqueue_many",
"=",
"False",
",",
"queue_capacity",
"=",
"16",
",",
"threads_per_queue",
"=",
"1",
",",
"name",
"=",
"None",
")",
":",
"with",
"ops",
".",
"op_scope",
"(",
"tensors",
"+",
"[",
"labels",
"]",
",",
"name",
",",
"'stratified_sample'",
")",
":",
"tensor_list",
"=",
"ops",
".",
"convert_n_to_tensor_or_indexed_slices",
"(",
"tensors",
")",
"labels",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"labels",
")",
"target_probs",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"target_probs",
",",
"dtype",
"=",
"dtypes",
".",
"float32",
")",
"# Reduce the case of a single example to that of a batch of size 1.",
"if",
"not",
"enqueue_many",
":",
"tensor_list",
"=",
"[",
"array_ops",
".",
"expand_dims",
"(",
"tensor",
",",
"0",
")",
"for",
"tensor",
"in",
"tensor_list",
"]",
"labels",
"=",
"array_ops",
".",
"expand_dims",
"(",
"labels",
",",
"0",
")",
"# If `init_probs` is `None`, set up online estimation of data distribution.",
"if",
"init_probs",
"is",
"None",
":",
"# We use `target_probs` to get the number of classes, so its shape must be",
"# fully defined at graph construction time.",
"target_probs",
".",
"get_shape",
"(",
")",
".",
"assert_is_fully_defined",
"(",
")",
"init_probs",
"=",
"_estimate_data_distribution",
"(",
"labels",
",",
"target_probs",
".",
"get_shape",
"(",
")",
".",
"num_elements",
"(",
")",
")",
"else",
":",
"init_probs",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"init_probs",
",",
"dtype",
"=",
"dtypes",
".",
"float32",
")",
"# Validate that input is consistent.",
"tensor_list",
",",
"labels",
",",
"[",
"init_probs",
",",
"target_probs",
"]",
"=",
"_verify_input",
"(",
"tensor_list",
",",
"labels",
",",
"[",
"init_probs",
",",
"target_probs",
"]",
")",
"# Check that all zero initial probabilities also have zero target",
"# probabilities.",
"assert_op",
"=",
"logging_ops",
".",
"Assert",
"(",
"math_ops",
".",
"reduce_all",
"(",
"math_ops",
".",
"logical_or",
"(",
"math_ops",
".",
"not_equal",
"(",
"init_probs",
",",
"0",
")",
",",
"math_ops",
".",
"equal",
"(",
"target_probs",
",",
"0",
")",
")",
")",
",",
"[",
"'All classes with zero initial probability must also have zero target '",
"'probability: '",
",",
"init_probs",
",",
"target_probs",
"]",
")",
"init_probs",
"=",
"control_flow_ops",
".",
"with_dependencies",
"(",
"[",
"assert_op",
"]",
",",
"init_probs",
")",
"# Calculate acceptance sampling probabilities.",
"accept_probs",
"=",
"_calculate_acceptance_probabilities",
"(",
"init_probs",
",",
"target_probs",
")",
"proportion_rejected",
"=",
"math_ops",
".",
"reduce_sum",
"(",
"(",
"1",
"-",
"accept_probs",
")",
"*",
"init_probs",
")",
"accept_probs",
"=",
"control_flow_ops",
".",
"cond",
"(",
"math_ops",
".",
"less",
"(",
"proportion_rejected",
",",
".5",
")",
",",
"lambda",
":",
"accept_probs",
",",
"lambda",
":",
"logging_ops",
".",
"Print",
"(",
"# pylint: disable=g-long-lambda",
"accept_probs",
",",
"[",
"accept_probs",
"]",
",",
"message",
"=",
"'Proportion of examples rejected by sampler is high.'",
",",
"first_n",
"=",
"10",
")",
")",
"# Make a single queue to hold input examples. Reshape output so examples",
"# don't have singleton batch dimension.",
"batched",
"=",
"input_ops",
".",
"batch",
"(",
"tensor_list",
"+",
"[",
"labels",
"]",
",",
"batch_size",
"=",
"1",
",",
"num_threads",
"=",
"threads_per_queue",
",",
"capacity",
"=",
"queue_capacity",
",",
"enqueue_many",
"=",
"True",
")",
"val_list",
"=",
"[",
"array_ops",
".",
"squeeze",
"(",
"x",
",",
"[",
"0",
"]",
")",
"for",
"x",
"in",
"batched",
"[",
":",
"-",
"1",
"]",
"]",
"label",
"=",
"array_ops",
".",
"squeeze",
"(",
"batched",
"[",
"-",
"1",
"]",
",",
"[",
"0",
"]",
")",
"# Set up second queue containing batches that have the desired class",
"# proportions.",
"batched",
"=",
"_get_stratified_batch_from_tensors",
"(",
"val_list",
",",
"label",
",",
"accept_probs",
",",
"batch_size",
",",
"threads_per_queue",
")",
"return",
"batched",
"[",
":",
"-",
"1",
"]",
",",
"batched",
"[",
"-",
"1",
"]"
] |
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/framework/python/ops/sampling_ops.py#L38-L149
|
||
linyouhappy/kongkongxiyou
|
7a69b2913eb29f4be77f9a62fb90cdd72c4160f1
|
cocosjs/frameworks/cocos2d-x/tools/bindings-generator/clang/cindex.py
|
python
|
Cursor.enum_type
|
(self)
|
return self._enum_type
|
Return the integer type of an enum declaration.
Returns a Type corresponding to an integer. If the cursor is not for an
enum, this raises.
|
Return the integer type of an enum declaration.
|
[
"Return",
"the",
"integer",
"type",
"of",
"an",
"enum",
"declaration",
"."
] |
def enum_type(self):
"""Return the integer type of an enum declaration.
Returns a Type corresponding to an integer. If the cursor is not for an
enum, this raises.
"""
if not hasattr(self, '_enum_type'):
assert self.kind == CursorKind.ENUM_DECL
self._enum_type = conf.lib.clang_getEnumDeclIntegerType(self)
return self._enum_type
|
[
"def",
"enum_type",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_enum_type'",
")",
":",
"assert",
"self",
".",
"kind",
"==",
"CursorKind",
".",
"ENUM_DECL",
"self",
".",
"_enum_type",
"=",
"conf",
".",
"lib",
".",
"clang_getEnumDeclIntegerType",
"(",
"self",
")",
"return",
"self",
".",
"_enum_type"
] |
https://github.com/linyouhappy/kongkongxiyou/blob/7a69b2913eb29f4be77f9a62fb90cdd72c4160f1/cocosjs/frameworks/cocos2d-x/tools/bindings-generator/clang/cindex.py#L1349-L1359
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/numpy/py2/numpy/ma/extras.py
|
python
|
mask_cols
|
(a, axis=None)
|
return mask_rowcols(a, 1)
|
Mask columns of a 2D array that contain masked values.
This function is a shortcut to ``mask_rowcols`` with `axis` equal to 1.
See Also
--------
mask_rowcols : Mask rows and/or columns of a 2D array.
masked_where : Mask where a condition is met.
Examples
--------
>>> import numpy.ma as ma
>>> a = np.zeros((3, 3), dtype=int)
>>> a[1, 1] = 1
>>> a
array([[0, 0, 0],
[0, 1, 0],
[0, 0, 0]])
>>> a = ma.masked_equal(a, 1)
>>> a
masked_array(data =
[[0 0 0]
[0 -- 0]
[0 0 0]],
mask =
[[False False False]
[False True False]
[False False False]],
fill_value=999999)
>>> ma.mask_cols(a)
masked_array(data =
[[0 -- 0]
[0 -- 0]
[0 -- 0]],
mask =
[[False True False]
[False True False]
[False True False]],
fill_value=999999)
|
Mask columns of a 2D array that contain masked values.
|
[
"Mask",
"columns",
"of",
"a",
"2D",
"array",
"that",
"contain",
"masked",
"values",
"."
] |
def mask_cols(a, axis=None):
"""
Mask columns of a 2D array that contain masked values.
This function is a shortcut to ``mask_rowcols`` with `axis` equal to 1.
See Also
--------
mask_rowcols : Mask rows and/or columns of a 2D array.
masked_where : Mask where a condition is met.
Examples
--------
>>> import numpy.ma as ma
>>> a = np.zeros((3, 3), dtype=int)
>>> a[1, 1] = 1
>>> a
array([[0, 0, 0],
[0, 1, 0],
[0, 0, 0]])
>>> a = ma.masked_equal(a, 1)
>>> a
masked_array(data =
[[0 0 0]
[0 -- 0]
[0 0 0]],
mask =
[[False False False]
[False True False]
[False False False]],
fill_value=999999)
>>> ma.mask_cols(a)
masked_array(data =
[[0 -- 0]
[0 -- 0]
[0 -- 0]],
mask =
[[False True False]
[False True False]
[False True False]],
fill_value=999999)
"""
return mask_rowcols(a, 1)
|
[
"def",
"mask_cols",
"(",
"a",
",",
"axis",
"=",
"None",
")",
":",
"return",
"mask_rowcols",
"(",
"a",
",",
"1",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/ma/extras.py#L963-L1006
|
|
pmq20/node-packer
|
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
|
lts/tools/inspector_protocol/jinja2/environment.py
|
python
|
Environment.make_globals
|
(self, d)
|
return dict(self.globals, **d)
|
Return a dict for the globals.
|
Return a dict for the globals.
|
[
"Return",
"a",
"dict",
"for",
"the",
"globals",
"."
] |
def make_globals(self, d):
"""Return a dict for the globals."""
if not d:
return self.globals
return dict(self.globals, **d)
|
[
"def",
"make_globals",
"(",
"self",
",",
"d",
")",
":",
"if",
"not",
"d",
":",
"return",
"self",
".",
"globals",
"return",
"dict",
"(",
"self",
".",
"globals",
",",
"*",
"*",
"d",
")"
] |
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/tools/inspector_protocol/jinja2/environment.py#L882-L886
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/agw/supertooltip.py
|
python
|
ToolTipWindowBase.AlphaCycle
|
(self, event)
|
Handles the ``wx.EVT_TIMER`` event for :class:`SuperToolTip`.
:param `event`: a :class:`TimerEvent` event to be processed.
|
Handles the ``wx.EVT_TIMER`` event for :class:`SuperToolTip`.
|
[
"Handles",
"the",
"wx",
".",
"EVT_TIMER",
"event",
"for",
":",
"class",
":",
"SuperToolTip",
"."
] |
def AlphaCycle(self, event):
"""
Handles the ``wx.EVT_TIMER`` event for :class:`SuperToolTip`.
:param `event`: a :class:`TimerEvent` event to be processed.
"""
# Increase (or decrease) the alpha channel
self.amount += self.delta
if self.amount > 255 or self.amount < 0:
# We're done, stop the timer
self._alphaTimer.Stop()
if self.amount < 0:
# Destroy the SuperToolTip, we are fading out
self.Destroy()
return
# Make the SuperToolTip more or less transparent
self.MakeWindowTransparent(self.amount)
if not self.IsShown():
self.Show()
|
[
"def",
"AlphaCycle",
"(",
"self",
",",
"event",
")",
":",
"# Increase (or decrease) the alpha channel",
"self",
".",
"amount",
"+=",
"self",
".",
"delta",
"if",
"self",
".",
"amount",
">",
"255",
"or",
"self",
".",
"amount",
"<",
"0",
":",
"# We're done, stop the timer",
"self",
".",
"_alphaTimer",
".",
"Stop",
"(",
")",
"if",
"self",
".",
"amount",
"<",
"0",
":",
"# Destroy the SuperToolTip, we are fading out",
"self",
".",
"Destroy",
"(",
")",
"return",
"# Make the SuperToolTip more or less transparent",
"self",
".",
"MakeWindowTransparent",
"(",
"self",
".",
"amount",
")",
"if",
"not",
"self",
".",
"IsShown",
"(",
")",
":",
"self",
".",
"Show",
"(",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/supertooltip.py#L613-L634
|
||
klzgrad/naiveproxy
|
ed2c513637c77b18721fe428d7ed395b4d284c83
|
src/third_party/depot_tools/cpplint.py
|
python
|
IsErrorSuppressedByNolint
|
(category, linenum)
|
return (_global_error_suppressions.get(category, False) or
linenum in _error_suppressions.get(category, set()) or
linenum in _error_suppressions.get(None, set()))
|
Returns true if the specified error category is suppressed on this line.
Consults the global error_suppressions map populated by
ParseNolintSuppressions/ProcessGlobalSuppresions/ResetNolintSuppressions.
Args:
category: str, the category of the error.
linenum: int, the current line number.
Returns:
bool, True iff the error should be suppressed due to a NOLINT comment or
global suppression.
|
Returns true if the specified error category is suppressed on this line.
|
[
"Returns",
"true",
"if",
"the",
"specified",
"error",
"category",
"is",
"suppressed",
"on",
"this",
"line",
"."
] |
def IsErrorSuppressedByNolint(category, linenum):
"""Returns true if the specified error category is suppressed on this line.
Consults the global error_suppressions map populated by
ParseNolintSuppressions/ProcessGlobalSuppresions/ResetNolintSuppressions.
Args:
category: str, the category of the error.
linenum: int, the current line number.
Returns:
bool, True iff the error should be suppressed due to a NOLINT comment or
global suppression.
"""
return (_global_error_suppressions.get(category, False) or
linenum in _error_suppressions.get(category, set()) or
linenum in _error_suppressions.get(None, set()))
|
[
"def",
"IsErrorSuppressedByNolint",
"(",
"category",
",",
"linenum",
")",
":",
"return",
"(",
"_global_error_suppressions",
".",
"get",
"(",
"category",
",",
"False",
")",
"or",
"linenum",
"in",
"_error_suppressions",
".",
"get",
"(",
"category",
",",
"set",
"(",
")",
")",
"or",
"linenum",
"in",
"_error_suppressions",
".",
"get",
"(",
"None",
",",
"set",
"(",
")",
")",
")"
] |
https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/third_party/depot_tools/cpplint.py#L604-L619
|
|
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Source/ThirdParty/CEF3/cef_source/tools/cef_parser.py
|
python
|
obj_analysis.get_name
|
(self)
|
return self.name
|
Return the name.
|
Return the name.
|
[
"Return",
"the",
"name",
"."
] |
def get_name(self):
""" Return the name. """
return self.name
|
[
"def",
"get_name",
"(",
"self",
")",
":",
"return",
"self",
".",
"name"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Source/ThirdParty/CEF3/cef_source/tools/cef_parser.py#L1733-L1735
|
|
cvxpy/cvxpy
|
5165b4fb750dfd237de8659383ef24b4b2e33aaf
|
cvxpy/atoms/lambda_sum_largest.py
|
python
|
lambda_sum_largest.numeric
|
(self, values)
|
return sum_largest(eigs, self.k).value
|
Returns the largest eigenvalue of A.
Requires that A be symmetric.
|
Returns the largest eigenvalue of A.
|
[
"Returns",
"the",
"largest",
"eigenvalue",
"of",
"A",
"."
] |
def numeric(self, values):
"""Returns the largest eigenvalue of A.
Requires that A be symmetric.
"""
eigs = LA.eigvalsh(values[0])
return sum_largest(eigs, self.k).value
|
[
"def",
"numeric",
"(",
"self",
",",
"values",
")",
":",
"eigs",
"=",
"LA",
".",
"eigvalsh",
"(",
"values",
"[",
"0",
"]",
")",
"return",
"sum_largest",
"(",
"eigs",
",",
"self",
".",
"k",
")",
".",
"value"
] |
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/atoms/lambda_sum_largest.py#L42-L48
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/propgrid.py
|
python
|
SystemColourProperty.ColourToString
|
(*args, **kwargs)
|
return _propgrid.SystemColourProperty_ColourToString(*args, **kwargs)
|
ColourToString(self, Colour col, int index, int argFlags=0) -> String
|
ColourToString(self, Colour col, int index, int argFlags=0) -> String
|
[
"ColourToString",
"(",
"self",
"Colour",
"col",
"int",
"index",
"int",
"argFlags",
"=",
"0",
")",
"-",
">",
"String"
] |
def ColourToString(*args, **kwargs):
"""ColourToString(self, Colour col, int index, int argFlags=0) -> String"""
return _propgrid.SystemColourProperty_ColourToString(*args, **kwargs)
|
[
"def",
"ColourToString",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_propgrid",
".",
"SystemColourProperty_ColourToString",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/propgrid.py#L3310-L3312
|
|
bareos/bareos
|
56a10bb368b0a81e977bb51304033fe49d59efb0
|
contrib/fd-plugins/options-plugin-sample/bareos-fd-file-interact.py
|
python
|
load_bareos_plugin
|
(context, plugindef)
|
return bRCs['bRC_OK']
|
This function is called by the Bareos-FD to load the plugin
We use it to intantiate the plugin class
|
This function is called by the Bareos-FD to load the plugin
We use it to intantiate the plugin class
|
[
"This",
"function",
"is",
"called",
"by",
"the",
"Bareos",
"-",
"FD",
"to",
"load",
"the",
"plugin",
"We",
"use",
"it",
"to",
"intantiate",
"the",
"plugin",
"class"
] |
def load_bareos_plugin(context, plugindef):
'''
This function is called by the Bareos-FD to load the plugin
We use it to intantiate the plugin class
'''
# BareosFdWrapper.bareos_fd_plugin_object is the module attribute that holds the plugin class object
BareosFdWrapper.bareos_fd_plugin_object = BareosFdPluginFileInteract (context, plugindef);
return bRCs['bRC_OK'];
|
[
"def",
"load_bareos_plugin",
"(",
"context",
",",
"plugindef",
")",
":",
"# BareosFdWrapper.bareos_fd_plugin_object is the module attribute that holds the plugin class object",
"BareosFdWrapper",
".",
"bareos_fd_plugin_object",
"=",
"BareosFdPluginFileInteract",
"(",
"context",
",",
"plugindef",
")",
"return",
"bRCs",
"[",
"'bRC_OK'",
"]"
] |
https://github.com/bareos/bareos/blob/56a10bb368b0a81e977bb51304033fe49d59efb0/contrib/fd-plugins/options-plugin-sample/bareos-fd-file-interact.py#L17-L24
|
|
oracle/graaljs
|
36a56e8e993d45fc40939a3a4d9c0c24990720f1
|
graal-nodejs/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
|
python
|
EncodePOSIXShellList
|
(list)
|
return " ".join(encoded_arguments)
|
Encodes |list| suitably for consumption by POSIX shells.
Returns EncodePOSIXShellArgument for each item in list, and joins them
together using the space character as an argument separator.
|
Encodes |list| suitably for consumption by POSIX shells.
|
[
"Encodes",
"|list|",
"suitably",
"for",
"consumption",
"by",
"POSIX",
"shells",
"."
] |
def EncodePOSIXShellList(list):
"""Encodes |list| suitably for consumption by POSIX shells.
Returns EncodePOSIXShellArgument for each item in list, and joins them
together using the space character as an argument separator.
"""
encoded_arguments = []
for argument in list:
encoded_arguments.append(EncodePOSIXShellArgument(argument))
return " ".join(encoded_arguments)
|
[
"def",
"EncodePOSIXShellList",
"(",
"list",
")",
":",
"encoded_arguments",
"=",
"[",
"]",
"for",
"argument",
"in",
"list",
":",
"encoded_arguments",
".",
"append",
"(",
"EncodePOSIXShellArgument",
"(",
"argument",
")",
")",
"return",
"\" \"",
".",
"join",
"(",
"encoded_arguments",
")"
] |
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py#L290-L300
|
|
albertz/openlierox
|
d316c14a8eb57848ef56e9bfa7b23a56f694a51b
|
tools/DedicatedServerVideo/gdata/gauth.py
|
python
|
get_client_login_token_string
|
(http_body)
|
return None
|
Returns the token value for a ClientLoginToken.
Reads the token from the server's response to a Client Login request and
creates the token value string to use in requests.
Args:
http_body: str The body of the server's HTTP response to a Client Login
request
Returns:
The token value string for a ClientLoginToken.
|
Returns the token value for a ClientLoginToken.
|
[
"Returns",
"the",
"token",
"value",
"for",
"a",
"ClientLoginToken",
"."
] |
def get_client_login_token_string(http_body):
"""Returns the token value for a ClientLoginToken.
Reads the token from the server's response to a Client Login request and
creates the token value string to use in requests.
Args:
http_body: str The body of the server's HTTP response to a Client Login
request
Returns:
The token value string for a ClientLoginToken.
"""
for response_line in http_body.splitlines():
if response_line.startswith('Auth='):
# Strip off the leading Auth= and return the Authorization value.
return response_line[5:]
return None
|
[
"def",
"get_client_login_token_string",
"(",
"http_body",
")",
":",
"for",
"response_line",
"in",
"http_body",
".",
"splitlines",
"(",
")",
":",
"if",
"response_line",
".",
"startswith",
"(",
"'Auth='",
")",
":",
"# Strip off the leading Auth= and return the Authorization value.",
"return",
"response_line",
"[",
"5",
":",
"]",
"return",
"None"
] |
https://github.com/albertz/openlierox/blob/d316c14a8eb57848ef56e9bfa7b23a56f694a51b/tools/DedicatedServerVideo/gdata/gauth.py#L108-L125
|
|
apache/mesos
|
97d9a4063332aae3825d78de71611657e05cf5e2
|
support/verify-reviews.py
|
python
|
parse_time
|
(timestamp)
|
return datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%SZ")
|
Parse time in ReviewBoard date format.
|
Parse time in ReviewBoard date format.
|
[
"Parse",
"time",
"in",
"ReviewBoard",
"date",
"format",
"."
] |
def parse_time(timestamp):
"""Parse time in ReviewBoard date format."""
return datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%SZ")
|
[
"def",
"parse_time",
"(",
"timestamp",
")",
":",
"return",
"datetime",
".",
"strptime",
"(",
"timestamp",
",",
"\"%Y-%m-%dT%H:%M:%SZ\"",
")"
] |
https://github.com/apache/mesos/blob/97d9a4063332aae3825d78de71611657e05cf5e2/support/verify-reviews.py#L117-L119
|
|
kushview/Element
|
1cc16380caa2ab79461246ba758b9de1f46db2a5
|
waflib/Tools/icpc.py
|
python
|
find_icpc
|
(conf)
|
Finds the program icpc, and execute it to ensure it really is icpc
|
Finds the program icpc, and execute it to ensure it really is icpc
|
[
"Finds",
"the",
"program",
"icpc",
"and",
"execute",
"it",
"to",
"ensure",
"it",
"really",
"is",
"icpc"
] |
def find_icpc(conf):
"""
Finds the program icpc, and execute it to ensure it really is icpc
"""
cxx = conf.find_program('icpc', var='CXX')
conf.get_cc_version(cxx, icc=True)
conf.env.CXX_NAME = 'icc'
|
[
"def",
"find_icpc",
"(",
"conf",
")",
":",
"cxx",
"=",
"conf",
".",
"find_program",
"(",
"'icpc'",
",",
"var",
"=",
"'CXX'",
")",
"conf",
".",
"get_cc_version",
"(",
"cxx",
",",
"icc",
"=",
"True",
")",
"conf",
".",
"env",
".",
"CXX_NAME",
"=",
"'icc'"
] |
https://github.com/kushview/Element/blob/1cc16380caa2ab79461246ba758b9de1f46db2a5/waflib/Tools/icpc.py#L14-L20
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
tools/grit/grit/format/policy_templates/writers/template_writer.py
|
python
|
TemplateWriter.CanBeMandatory
|
(self, policy)
|
return policy.get('features', {}).get('can_be_mandatory', True)
|
Checks if the given policy can be mandatory.
|
Checks if the given policy can be mandatory.
|
[
"Checks",
"if",
"the",
"given",
"policy",
"can",
"be",
"mandatory",
"."
] |
def CanBeMandatory(self, policy):
'''Checks if the given policy can be mandatory.'''
return policy.get('features', {}).get('can_be_mandatory', True)
|
[
"def",
"CanBeMandatory",
"(",
"self",
",",
"policy",
")",
":",
"return",
"policy",
".",
"get",
"(",
"'features'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'can_be_mandatory'",
",",
"True",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/grit/grit/format/policy_templates/writers/template_writer.py#L90-L92
|
|
vslavik/poedit
|
f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a
|
deps/boost/tools/build/src/build/project.py
|
python
|
ProjectRegistry.push_current
|
(self, project)
|
Temporary changes the current project to 'project'. Should
be followed by 'pop-current'.
|
Temporary changes the current project to 'project'. Should
be followed by 'pop-current'.
|
[
"Temporary",
"changes",
"the",
"current",
"project",
"to",
"project",
".",
"Should",
"be",
"followed",
"by",
"pop",
"-",
"current",
"."
] |
def push_current(self, project):
"""Temporary changes the current project to 'project'. Should
be followed by 'pop-current'."""
if __debug__:
from .targets import ProjectTarget
assert isinstance(project, ProjectTarget)
self.saved_current_project.append(self.current_project)
self.current_project = project
|
[
"def",
"push_current",
"(",
"self",
",",
"project",
")",
":",
"if",
"__debug__",
":",
"from",
".",
"targets",
"import",
"ProjectTarget",
"assert",
"isinstance",
"(",
"project",
",",
"ProjectTarget",
")",
"self",
".",
"saved_current_project",
".",
"append",
"(",
"self",
".",
"current_project",
")",
"self",
".",
"current_project",
"=",
"project"
] |
https://github.com/vslavik/poedit/blob/f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a/deps/boost/tools/build/src/build/project.py#L572-L579
|
||
FreeCAD/FreeCAD
|
ba42231b9c6889b89e064d6d563448ed81e376ec
|
src/App/FreeCADInit.py
|
python
|
FCADLogger.error
|
(self,msg,*args,**kargs)
|
"Error" level log printer
* msg: message string. May contain new style Python string formatter.
This function accepts additional positional and keyword arguments,
which are forward to string.format() to generate the logging
message. It is strongly recommended to not directly use Python
string formatting, but pass additional arguments here, because the
printer can skip string evaluation in case the logging level is
disabled.
|
"Error" level log printer
|
[
"Error",
"level",
"log",
"printer"
] |
def error(self,msg,*args,**kargs):
'''"Error" level log printer
* msg: message string. May contain new style Python string formatter.
This function accepts additional positional and keyword arguments,
which are forward to string.format() to generate the logging
message. It is strongly recommended to not directly use Python
string formatting, but pass additional arguments here, because the
printer can skip string evaluation in case the logging level is
disabled.
'''
if self._isEnabledFor(0):
frame = kargs.pop('frame',0)+1
self._log(0,msg,frame,args,kargs)
|
[
"def",
"error",
"(",
"self",
",",
"msg",
",",
"*",
"args",
",",
"*",
"*",
"kargs",
")",
":",
"if",
"self",
".",
"_isEnabledFor",
"(",
"0",
")",
":",
"frame",
"=",
"kargs",
".",
"pop",
"(",
"'frame'",
",",
"0",
")",
"+",
"1",
"self",
".",
"_log",
"(",
"0",
",",
"msg",
",",
"frame",
",",
"args",
",",
"kargs",
")"
] |
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/App/FreeCADInit.py#L397-L411
|
||
giuspen/cherrytree
|
84712f206478fcf9acf30174009ad28c648c6344
|
pygtk2/modules/exports.py
|
python
|
rgb_any_to_24
|
(rgb_in)
|
return rgb_in
|
Convert any RGB to RRGGBB if needed
|
Convert any RGB to RRGGBB if needed
|
[
"Convert",
"any",
"RGB",
"to",
"RRGGBB",
"if",
"needed"
] |
def rgb_any_to_24(rgb_in):
"""Convert any RGB to RRGGBB if needed"""
if len(rgb_in) == 12:
r = int(rgb_in[:4], 16)
g = int(rgb_in[4:8], 16)
b = int(rgb_in[8:], 16)
r >>= 8
g >>= 8
b >>= 8
return "%.2x%.2x%.2x" % (r, g, b)
if len(rgb_in) == 6: return rgb_in
if len(rgb_in) == 3: return 2*rgb_in[0]+2*rgb_in[1]+2*rgb_in[2]
print "! rgb_any_to_24(%s)" % rgb_in
return rgb_in
|
[
"def",
"rgb_any_to_24",
"(",
"rgb_in",
")",
":",
"if",
"len",
"(",
"rgb_in",
")",
"==",
"12",
":",
"r",
"=",
"int",
"(",
"rgb_in",
"[",
":",
"4",
"]",
",",
"16",
")",
"g",
"=",
"int",
"(",
"rgb_in",
"[",
"4",
":",
"8",
"]",
",",
"16",
")",
"b",
"=",
"int",
"(",
"rgb_in",
"[",
"8",
":",
"]",
",",
"16",
")",
"r",
">>=",
"8",
"g",
">>=",
"8",
"b",
">>=",
"8",
"return",
"\"%.2x%.2x%.2x\"",
"%",
"(",
"r",
",",
"g",
",",
"b",
")",
"if",
"len",
"(",
"rgb_in",
")",
"==",
"6",
":",
"return",
"rgb_in",
"if",
"len",
"(",
"rgb_in",
")",
"==",
"3",
":",
"return",
"2",
"*",
"rgb_in",
"[",
"0",
"]",
"+",
"2",
"*",
"rgb_in",
"[",
"1",
"]",
"+",
"2",
"*",
"rgb_in",
"[",
"2",
"]",
"print",
"\"! rgb_any_to_24(%s)\"",
"%",
"rgb_in",
"return",
"rgb_in"
] |
https://github.com/giuspen/cherrytree/blob/84712f206478fcf9acf30174009ad28c648c6344/pygtk2/modules/exports.py#L45-L58
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
tools/auto_bisect/bisect_utils.py
|
python
|
RunGClientAndSync
|
(revisions=None, cwd=None)
|
return RunGClient(params, cwd=cwd)
|
Runs gclient and does a normal sync.
Args:
revisions: List of revisions that need to be synced.
E.g., "src@2ae43f...", "src/third_party/webkit@asr1234" etc.
cwd: Working directory to run from.
Returns:
The return code of the call.
|
Runs gclient and does a normal sync.
|
[
"Runs",
"gclient",
"and",
"does",
"a",
"normal",
"sync",
"."
] |
def RunGClientAndSync(revisions=None, cwd=None):
"""Runs gclient and does a normal sync.
Args:
revisions: List of revisions that need to be synced.
E.g., "src@2ae43f...", "src/third_party/webkit@asr1234" etc.
cwd: Working directory to run from.
Returns:
The return code of the call.
"""
params = ['sync', '--verbose', '--nohooks', '--force',
'--delete_unversioned_trees']
if revisions is not None:
for revision in revisions:
if revision is not None:
params.extend(['--revision', revision])
return RunGClient(params, cwd=cwd)
|
[
"def",
"RunGClientAndSync",
"(",
"revisions",
"=",
"None",
",",
"cwd",
"=",
"None",
")",
":",
"params",
"=",
"[",
"'sync'",
",",
"'--verbose'",
",",
"'--nohooks'",
",",
"'--force'",
",",
"'--delete_unversioned_trees'",
"]",
"if",
"revisions",
"is",
"not",
"None",
":",
"for",
"revision",
"in",
"revisions",
":",
"if",
"revision",
"is",
"not",
"None",
":",
"params",
".",
"extend",
"(",
"[",
"'--revision'",
",",
"revision",
"]",
")",
"return",
"RunGClient",
"(",
"params",
",",
"cwd",
"=",
"cwd",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/auto_bisect/bisect_utils.py#L358-L375
|
|
oracle/graaljs
|
36a56e8e993d45fc40939a3a4d9c0c24990720f1
|
graal-nodejs/tools/gyp/pylib/gyp/generator/ninja.py
|
python
|
NinjaWriter.GypPathToUniqueOutput
|
(self, path, qualified=True)
|
return os.path.normpath(
os.path.join(obj, self.base_dir, path_dir, path_basename)
)
|
Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions.
|
Translate a gyp path to a ninja path for writing output.
|
[
"Translate",
"a",
"gyp",
"path",
"to",
"a",
"ninja",
"path",
"for",
"writing",
"output",
"."
] |
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith("$"), path
# Translate the path following this scheme:
# Input: foo/bar.gyp, target targ, references baz/out.o
# Output: obj/foo/baz/targ.out.o (if qualified)
# obj/foo/baz/out.o (otherwise)
# (and obj.host instead of obj for cross-compiles)
#
# Why this scheme and not some other one?
# 1) for a given input, you can compute all derived outputs by matching
# its path, even if the input is brought via a gyp file with '..'.
# 2) simple files like libraries and stamps have a simple filename.
obj = "obj"
if self.toolset != "target":
obj += "." + self.toolset
path_dir, path_basename = os.path.split(path)
assert not os.path.isabs(path_dir), (
"'%s' can not be absolute path (see crbug.com/462153)." % path_dir
)
if qualified:
path_basename = self.name + "." + path_basename
return os.path.normpath(
os.path.join(obj, self.base_dir, path_dir, path_basename)
)
|
[
"def",
"GypPathToUniqueOutput",
"(",
"self",
",",
"path",
",",
"qualified",
"=",
"True",
")",
":",
"path",
"=",
"self",
".",
"ExpandSpecial",
"(",
"path",
")",
"assert",
"not",
"path",
".",
"startswith",
"(",
"\"$\"",
")",
",",
"path",
"# Translate the path following this scheme:",
"# Input: foo/bar.gyp, target targ, references baz/out.o",
"# Output: obj/foo/baz/targ.out.o (if qualified)",
"# obj/foo/baz/out.o (otherwise)",
"# (and obj.host instead of obj for cross-compiles)",
"#",
"# Why this scheme and not some other one?",
"# 1) for a given input, you can compute all derived outputs by matching",
"# its path, even if the input is brought via a gyp file with '..'.",
"# 2) simple files like libraries and stamps have a simple filename.",
"obj",
"=",
"\"obj\"",
"if",
"self",
".",
"toolset",
"!=",
"\"target\"",
":",
"obj",
"+=",
"\".\"",
"+",
"self",
".",
"toolset",
"path_dir",
",",
"path_basename",
"=",
"os",
".",
"path",
".",
"split",
"(",
"path",
")",
"assert",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"path_dir",
")",
",",
"(",
"\"'%s' can not be absolute path (see crbug.com/462153).\"",
"%",
"path_dir",
")",
"if",
"qualified",
":",
"path_basename",
"=",
"self",
".",
"name",
"+",
"\".\"",
"+",
"path_basename",
"return",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"obj",
",",
"self",
".",
"base_dir",
",",
"path_dir",
",",
"path_basename",
")",
")"
] |
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/tools/gyp/pylib/gyp/generator/ninja.py#L320-L356
|
|
cvxpy/cvxpy
|
5165b4fb750dfd237de8659383ef24b4b2e33aaf
|
cvxpy/atoms/gmatmul.py
|
python
|
gmatmul.sign_from_args
|
(self)
|
return (True, False)
|
Returns sign (is positive, is negative) of the expression.
|
Returns sign (is positive, is negative) of the expression.
|
[
"Returns",
"sign",
"(",
"is",
"positive",
"is",
"negative",
")",
"of",
"the",
"expression",
"."
] |
def sign_from_args(self) -> Tuple[bool, bool]:
"""Returns sign (is positive, is negative) of the expression.
"""
return (True, False)
|
[
"def",
"sign_from_args",
"(",
"self",
")",
"->",
"Tuple",
"[",
"bool",
",",
"bool",
"]",
":",
"return",
"(",
"True",
",",
"False",
")"
] |
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/atoms/gmatmul.py#L94-L97
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/indexes/multi.py
|
python
|
MultiIndex.__reduce__
|
(self)
|
return ibase._new_Index, (type(self), d), None
|
Necessary for making this object picklable
|
Necessary for making this object picklable
|
[
"Necessary",
"for",
"making",
"this",
"object",
"picklable"
] |
def __reduce__(self):
"""Necessary for making this object picklable"""
d = dict(
levels=list(self.levels),
codes=list(self.codes),
sortorder=self.sortorder,
names=list(self.names),
)
return ibase._new_Index, (type(self), d), None
|
[
"def",
"__reduce__",
"(",
"self",
")",
":",
"d",
"=",
"dict",
"(",
"levels",
"=",
"list",
"(",
"self",
".",
"levels",
")",
",",
"codes",
"=",
"list",
"(",
"self",
".",
"codes",
")",
",",
"sortorder",
"=",
"self",
".",
"sortorder",
",",
"names",
"=",
"list",
"(",
"self",
".",
"names",
")",
",",
")",
"return",
"ibase",
".",
"_new_Index",
",",
"(",
"type",
"(",
"self",
")",
",",
"d",
")",
",",
"None"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/indexes/multi.py#L1895-L1903
|
|
google/syzygy
|
8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5
|
third_party/numpy/files/numpy/lib/twodim_base.py
|
python
|
triu_indices_from
|
(arr, k=0)
|
return triu_indices(arr.shape[0],k)
|
Return the indices for the upper-triangle of an (n, n) array.
See `triu_indices` for full details.
Parameters
----------
arr : array_like
The indices will be valid for square arrays whose dimensions are
the same as arr.
k : int, optional
Diagonal offset (see `triu` for details).
See Also
--------
triu_indices, triu
Notes
-----
.. versionadded:: 1.4.0
|
Return the indices for the upper-triangle of an (n, n) array.
|
[
"Return",
"the",
"indices",
"for",
"the",
"upper",
"-",
"triangle",
"of",
"an",
"(",
"n",
"n",
")",
"array",
"."
] |
def triu_indices_from(arr, k=0):
"""
Return the indices for the upper-triangle of an (n, n) array.
See `triu_indices` for full details.
Parameters
----------
arr : array_like
The indices will be valid for square arrays whose dimensions are
the same as arr.
k : int, optional
Diagonal offset (see `triu` for details).
See Also
--------
triu_indices, triu
Notes
-----
.. versionadded:: 1.4.0
"""
if not (arr.ndim == 2 and arr.shape[0] == arr.shape[1]):
raise ValueError("input array must be 2-d and square")
return triu_indices(arr.shape[0],k)
|
[
"def",
"triu_indices_from",
"(",
"arr",
",",
"k",
"=",
"0",
")",
":",
"if",
"not",
"(",
"arr",
".",
"ndim",
"==",
"2",
"and",
"arr",
".",
"shape",
"[",
"0",
"]",
"==",
"arr",
".",
"shape",
"[",
"1",
"]",
")",
":",
"raise",
"ValueError",
"(",
"\"input array must be 2-d and square\"",
")",
"return",
"triu_indices",
"(",
"arr",
".",
"shape",
"[",
"0",
"]",
",",
"k",
")"
] |
https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/lib/twodim_base.py#L864-L889
|
|
facebook/hhvm
|
cd8d20db628e93583fffa0194aaca937af9b2692
|
hphp/tools/gdb/hhbc.py
|
python
|
op_table
|
(name)
|
Get the symbol `name' as an int8_t[].
|
Get the symbol `name' as an int8_t[].
|
[
"Get",
"the",
"symbol",
"name",
"as",
"an",
"int8_t",
"[]",
"."
] |
def op_table(name):
"""Get the symbol `name' as an int8_t[]."""
try:
return V(name).address.cast(T('int8_t').pointer())
except:
# for some reason some of these tables have no type
# information. for those cases, just take the address and
# cast to a pointer. Note that this *doesn't* work for the
# tables with types, because gdb objects to the '&'!
return gdb.parse_and_eval("(unsigned char*)&'%s'" % (name))
|
[
"def",
"op_table",
"(",
"name",
")",
":",
"try",
":",
"return",
"V",
"(",
"name",
")",
".",
"address",
".",
"cast",
"(",
"T",
"(",
"'int8_t'",
")",
".",
"pointer",
"(",
")",
")",
"except",
":",
"# for some reason some of these tables have no type",
"# information. for those cases, just take the address and",
"# cast to a pointer. Note that this *doesn't* work for the",
"# tables with types, because gdb objects to the '&'!",
"return",
"gdb",
".",
"parse_and_eval",
"(",
"\"(unsigned char*)&'%s'\"",
"%",
"(",
"name",
")",
")"
] |
https://github.com/facebook/hhvm/blob/cd8d20db628e93583fffa0194aaca937af9b2692/hphp/tools/gdb/hhbc.py#L27-L36
|
||
echronos/echronos
|
c996f1d2c8af6c6536205eb319c1bf1d4d84569c
|
external_tools/ply_info/example/ansic/cparse.py
|
python
|
p_jump_statement_4
|
(t)
|
jump_statement : RETURN expression_opt SEMI
|
jump_statement : RETURN expression_opt SEMI
|
[
"jump_statement",
":",
"RETURN",
"expression_opt",
"SEMI"
] |
def p_jump_statement_4(t):
'jump_statement : RETURN expression_opt SEMI'
pass
|
[
"def",
"p_jump_statement_4",
"(",
"t",
")",
":",
"pass"
] |
https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/external_tools/ply_info/example/ansic/cparse.py#L553-L555
|
||
PaddlePaddle/Anakin
|
5fd68a6cc4c4620cd1a30794c1bf06eebd3f4730
|
tools/external_converter_v2/parser/onnx/med_trans_util.py
|
python
|
np_2_ak_tensor
|
(np_tensor)
|
return ak_tensor
|
onnx np array to tensor
:param np_tensor:
:return:
|
onnx np array to tensor
:param np_tensor:
:return:
|
[
"onnx",
"np",
"array",
"to",
"tensor",
":",
"param",
"np_tensor",
":",
":",
"return",
":"
] |
def np_2_ak_tensor(np_tensor):
"""
onnx np array to tensor
:param np_tensor:
:return:
"""
data_type_map2 ={
np.dtype('float32'): 'float',
np.dtype('int32'): 'int',
np.dtype('bool'): 'bool'
}
data_type_map = {
'float32': 'float',
'int32': 'int',
'bool': 'bool'
}
# print 'np_tensor: ', np_tensor['dtype']
#exit()
type_str = data_type_map.get(np_tensor['dtype'])
#assert type_str != None
ak_tensor = TensorProtoIO()
ak_tensor.set_shape(shape_2_ak_shape(np_tensor['shape']))
# ak_tensor.set_data(np_tensor['data'], type_str)
# print('type: ', type(np_tensor['data']), np_tensor['shape'], np_tensor['dtype'], type_str)
if (len(np_tensor['shape']) == 1):
ak_tensor.set_data(np_tensor['data'], type_str)
else:
ak_tensor.set_data(np_tensor['data'].flatten(), type_str)
return ak_tensor
|
[
"def",
"np_2_ak_tensor",
"(",
"np_tensor",
")",
":",
"data_type_map2",
"=",
"{",
"np",
".",
"dtype",
"(",
"'float32'",
")",
":",
"'float'",
",",
"np",
".",
"dtype",
"(",
"'int32'",
")",
":",
"'int'",
",",
"np",
".",
"dtype",
"(",
"'bool'",
")",
":",
"'bool'",
"}",
"data_type_map",
"=",
"{",
"'float32'",
":",
"'float'",
",",
"'int32'",
":",
"'int'",
",",
"'bool'",
":",
"'bool'",
"}",
"# print 'np_tensor: ', np_tensor['dtype']",
"#exit()",
"type_str",
"=",
"data_type_map",
".",
"get",
"(",
"np_tensor",
"[",
"'dtype'",
"]",
")",
"#assert type_str != None",
"ak_tensor",
"=",
"TensorProtoIO",
"(",
")",
"ak_tensor",
".",
"set_shape",
"(",
"shape_2_ak_shape",
"(",
"np_tensor",
"[",
"'shape'",
"]",
")",
")",
"# ak_tensor.set_data(np_tensor['data'], type_str)",
"# print('type: ', type(np_tensor['data']), np_tensor['shape'], np_tensor['dtype'], type_str)",
"if",
"(",
"len",
"(",
"np_tensor",
"[",
"'shape'",
"]",
")",
"==",
"1",
")",
":",
"ak_tensor",
".",
"set_data",
"(",
"np_tensor",
"[",
"'data'",
"]",
",",
"type_str",
")",
"else",
":",
"ak_tensor",
".",
"set_data",
"(",
"np_tensor",
"[",
"'data'",
"]",
".",
"flatten",
"(",
")",
",",
"type_str",
")",
"return",
"ak_tensor"
] |
https://github.com/PaddlePaddle/Anakin/blob/5fd68a6cc4c4620cd1a30794c1bf06eebd3f4730/tools/external_converter_v2/parser/onnx/med_trans_util.py#L14-L42
|
|
IfcOpenShell/IfcOpenShell
|
2c2954b11a9c9d581bef03240836d4567e69ad0b
|
src/ifcopenshell-python/ifcopenshell/alignment.py
|
python
|
convert
|
(inst)
|
Looks up one of the conversion functions below in the global namespace
|
Looks up one of the conversion functions below in the global namespace
|
[
"Looks",
"up",
"one",
"of",
"the",
"conversion",
"functions",
"below",
"in",
"the",
"global",
"namespace"
] |
def convert(inst):
"""
Looks up one of the conversion functions below in the global namespace
"""
yield from globals()[f"convert_{inst.is_a()}_{inst.PredefinedType}"](inst)
|
[
"def",
"convert",
"(",
"inst",
")",
":",
"yield",
"from",
"globals",
"(",
")",
"[",
"f\"convert_{inst.is_a()}_{inst.PredefinedType}\"",
"]",
"(",
"inst",
")"
] |
https://github.com/IfcOpenShell/IfcOpenShell/blob/2c2954b11a9c9d581bef03240836d4567e69ad0b/src/ifcopenshell-python/ifcopenshell/alignment.py#L174-L178
|
||
oracle/graaljs
|
36a56e8e993d45fc40939a3a4d9c0c24990720f1
|
graal-nodejs/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
|
python
|
MsvsSettings._GetAndMunge
|
(self, field, path, default, prefix, append, map)
|
return _AppendOrReturn(append, result)
|
Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended.
|
Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended.
|
[
"Retrieve",
"a",
"value",
"from",
"|field|",
"at",
"|path|",
"or",
"return",
"|default|",
".",
"If",
"|append|",
"is",
"specified",
"and",
"the",
"item",
"is",
"found",
"it",
"will",
"be",
"appended",
"to",
"that",
"object",
"instead",
"of",
"returned",
".",
"If",
"|map|",
"is",
"specified",
"results",
"will",
"be",
"remapped",
"through",
"|map|",
"before",
"being",
"returned",
"or",
"appended",
"."
] |
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
|
[
"def",
"_GetAndMunge",
"(",
"self",
",",
"field",
",",
"path",
",",
"default",
",",
"prefix",
",",
"append",
",",
"map",
")",
":",
"result",
"=",
"_GenericRetrieve",
"(",
"field",
",",
"default",
",",
"path",
")",
"result",
"=",
"_DoRemapping",
"(",
"result",
",",
"map",
")",
"result",
"=",
"_AddPrefix",
"(",
"result",
",",
"prefix",
")",
"return",
"_AppendOrReturn",
"(",
"append",
",",
"result",
")"
] |
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py#L301-L309
|
|
omnisci/omniscidb
|
b9c95f1bd602b4ffc8b0edf18bfad61031e08d86
|
QueryEngine/scripts/generate_TableFunctionsFactory_init.py
|
python
|
Parser.parse_arg
|
(self)
|
return ArgNode(typ, annotations)
|
fmt: off
arg: type IDENTIFIER? ("|" annotation)*
fmt: on
|
fmt: off
|
[
"fmt",
":",
"off"
] |
def parse_arg(self):
"""fmt: off
arg: type IDENTIFIER? ("|" annotation)*
fmt: on
"""
typ = self.parse_type()
annotations = []
if not self.is_at_end() and self.match(Token.IDENTIFIER):
name = self.parse_identifier()
annotations.append(AnnotationNode('name', name))
while not self.is_at_end() and self.match(Token.VBAR):
ahead = self.lookahead()
if ahead.type == Token.IDENTIFIER and ahead.lexeme == 'output_row_size':
break
self.consume(Token.VBAR)
annotations.append(self.parse_annotation())
return ArgNode(typ, annotations)
|
[
"def",
"parse_arg",
"(",
"self",
")",
":",
"typ",
"=",
"self",
".",
"parse_type",
"(",
")",
"annotations",
"=",
"[",
"]",
"if",
"not",
"self",
".",
"is_at_end",
"(",
")",
"and",
"self",
".",
"match",
"(",
"Token",
".",
"IDENTIFIER",
")",
":",
"name",
"=",
"self",
".",
"parse_identifier",
"(",
")",
"annotations",
".",
"append",
"(",
"AnnotationNode",
"(",
"'name'",
",",
"name",
")",
")",
"while",
"not",
"self",
".",
"is_at_end",
"(",
")",
"and",
"self",
".",
"match",
"(",
"Token",
".",
"VBAR",
")",
":",
"ahead",
"=",
"self",
".",
"lookahead",
"(",
")",
"if",
"ahead",
".",
"type",
"==",
"Token",
".",
"IDENTIFIER",
"and",
"ahead",
".",
"lexeme",
"==",
"'output_row_size'",
":",
"break",
"self",
".",
"consume",
"(",
"Token",
".",
"VBAR",
")",
"annotations",
".",
"append",
"(",
"self",
".",
"parse_annotation",
"(",
")",
")",
"return",
"ArgNode",
"(",
"typ",
",",
"annotations",
")"
] |
https://github.com/omnisci/omniscidb/blob/b9c95f1bd602b4ffc8b0edf18bfad61031e08d86/QueryEngine/scripts/generate_TableFunctionsFactory_init.py#L1359-L1381
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_vendor/pyparsing.py
|
python
|
ParserElement.__add__
|
(self, other)
|
return And([self, other])
|
Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement
converts them to :class:`Literal`s by default.
Example::
greet = Word(alphas) + "," + Word(alphas) + "!"
hello = "Hello, World!"
print (hello, "->", greet.parseString(hello))
prints::
Hello, World! -> ['Hello', ',', 'World', '!']
``...`` may be used as a parse expression as a short form of :class:`SkipTo`.
Literal('start') + ... + Literal('end')
is equivalent to:
Literal('start') + SkipTo('end')("_skipped*") + Literal('end')
Note that the skipped text is returned with '_skipped' as a results name,
and to support having multiple skips in the same parser, the value returned is
a list of all skipped text.
|
Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement
converts them to :class:`Literal`s by default.
|
[
"Implementation",
"of",
"+",
"operator",
"-",
"returns",
":",
"class",
":",
"And",
".",
"Adding",
"strings",
"to",
"a",
"ParserElement",
"converts",
"them",
"to",
":",
"class",
":",
"Literal",
"s",
"by",
"default",
"."
] |
def __add__(self, other):
"""
Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement
converts them to :class:`Literal`s by default.
Example::
greet = Word(alphas) + "," + Word(alphas) + "!"
hello = "Hello, World!"
print (hello, "->", greet.parseString(hello))
prints::
Hello, World! -> ['Hello', ',', 'World', '!']
``...`` may be used as a parse expression as a short form of :class:`SkipTo`.
Literal('start') + ... + Literal('end')
is equivalent to:
Literal('start') + SkipTo('end')("_skipped*") + Literal('end')
Note that the skipped text is returned with '_skipped' as a results name,
and to support having multiple skips in the same parser, the value returned is
a list of all skipped text.
"""
if other is Ellipsis:
return _PendingSkip(self)
if isinstance(other, basestring):
other = self._literalStringClass(other)
if not isinstance(other, ParserElement):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return And([self, other])
|
[
"def",
"__add__",
"(",
"self",
",",
"other",
")",
":",
"if",
"other",
"is",
"Ellipsis",
":",
"return",
"_PendingSkip",
"(",
"self",
")",
"if",
"isinstance",
"(",
"other",
",",
"basestring",
")",
":",
"other",
"=",
"self",
".",
"_literalStringClass",
"(",
"other",
")",
"if",
"not",
"isinstance",
"(",
"other",
",",
"ParserElement",
")",
":",
"warnings",
".",
"warn",
"(",
"\"Cannot combine element of type %s with ParserElement\"",
"%",
"type",
"(",
"other",
")",
",",
"SyntaxWarning",
",",
"stacklevel",
"=",
"2",
")",
"return",
"None",
"return",
"And",
"(",
"[",
"self",
",",
"other",
"]",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_vendor/pyparsing.py#L2138-L2174
|
|
OpenLightingProject/ola
|
d1433a1bed73276fbe55ce18c03b1c208237decc
|
python/ola/ClientWrapper.py
|
python
|
_Event.HasExpired
|
(self, now)
|
return self._run_at < now
|
Return true if this event has expired.
|
Return true if this event has expired.
|
[
"Return",
"true",
"if",
"this",
"event",
"has",
"expired",
"."
] |
def HasExpired(self, now):
"""Return true if this event has expired."""
return self._run_at < now
|
[
"def",
"HasExpired",
"(",
"self",
",",
"now",
")",
":",
"return",
"self",
".",
"_run_at",
"<",
"now"
] |
https://github.com/OpenLightingProject/ola/blob/d1433a1bed73276fbe55ce18c03b1c208237decc/python/ola/ClientWrapper.py#L93-L95
|
|
google/mysql-protobuf
|
467cda676afaa49e762c5c9164a43f6ad31a1fbf
|
storage/ndb/mcc/request_handler.py
|
python
|
make_rep
|
(req, body=None)
|
return rep
|
Utility which creates a reply object based on the headers in the request
object.
|
Utility which creates a reply object based on the headers in the request
object.
|
[
"Utility",
"which",
"creates",
"a",
"reply",
"object",
"based",
"on",
"the",
"headers",
"in",
"the",
"request",
"object",
"."
] |
def make_rep(req, body=None):
"""Utility which creates a reply object based on the headers in the request
object."""
rep = { 'head': { 'seq': req['head']['seq'] +1,
'cmd': req['head']['cmd'],
'rSeq': req['head']['seq'] }}
if body:
rep['body'] = body
return rep
|
[
"def",
"make_rep",
"(",
"req",
",",
"body",
"=",
"None",
")",
":",
"rep",
"=",
"{",
"'head'",
":",
"{",
"'seq'",
":",
"req",
"[",
"'head'",
"]",
"[",
"'seq'",
"]",
"+",
"1",
",",
"'cmd'",
":",
"req",
"[",
"'head'",
"]",
"[",
"'cmd'",
"]",
",",
"'rSeq'",
":",
"req",
"[",
"'head'",
"]",
"[",
"'seq'",
"]",
"}",
"}",
"if",
"body",
":",
"rep",
"[",
"'body'",
"]",
"=",
"body",
"return",
"rep"
] |
https://github.com/google/mysql-protobuf/blob/467cda676afaa49e762c5c9164a43f6ad31a1fbf/storage/ndb/mcc/request_handler.py#L95-L104
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/richtext.py
|
python
|
RichTextObject_GetBoxRects
|
(*args, **kwargs)
|
return _richtext.RichTextObject_GetBoxRects(*args, **kwargs)
|
RichTextObject_GetBoxRects(DC dc, RichTextBuffer buffer, RichTextAttr attr, Rect marginRect,
Rect borderRect, Rect contentRect,
Rect paddingRect, Rect outlineRect) -> bool
|
RichTextObject_GetBoxRects(DC dc, RichTextBuffer buffer, RichTextAttr attr, Rect marginRect,
Rect borderRect, Rect contentRect,
Rect paddingRect, Rect outlineRect) -> bool
|
[
"RichTextObject_GetBoxRects",
"(",
"DC",
"dc",
"RichTextBuffer",
"buffer",
"RichTextAttr",
"attr",
"Rect",
"marginRect",
"Rect",
"borderRect",
"Rect",
"contentRect",
"Rect",
"paddingRect",
"Rect",
"outlineRect",
")",
"-",
">",
"bool"
] |
def RichTextObject_GetBoxRects(*args, **kwargs):
"""
RichTextObject_GetBoxRects(DC dc, RichTextBuffer buffer, RichTextAttr attr, Rect marginRect,
Rect borderRect, Rect contentRect,
Rect paddingRect, Rect outlineRect) -> bool
"""
return _richtext.RichTextObject_GetBoxRects(*args, **kwargs)
|
[
"def",
"RichTextObject_GetBoxRects",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextObject_GetBoxRects",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/richtext.py#L1478-L1484
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
catboost/python-package/catboost/core.py
|
python
|
CatBoost.drop_unused_features
|
(self)
|
Drop unused features information from model
|
Drop unused features information from model
|
[
"Drop",
"unused",
"features",
"information",
"from",
"model"
] |
def drop_unused_features(self):
"""
Drop unused features information from model
"""
self._base_drop_unused_features()
|
[
"def",
"drop_unused_features",
"(",
"self",
")",
":",
"self",
".",
"_base_drop_unused_features",
"(",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/catboost/python-package/catboost/core.py#L3108-L3112
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/richtext.py
|
python
|
RichTextBuffer.RemoveHandler
|
(*args, **kwargs)
|
return _richtext.RichTextBuffer_RemoveHandler(*args, **kwargs)
|
RemoveHandler(String name) -> bool
|
RemoveHandler(String name) -> bool
|
[
"RemoveHandler",
"(",
"String",
"name",
")",
"-",
">",
"bool"
] |
def RemoveHandler(*args, **kwargs):
"""RemoveHandler(String name) -> bool"""
return _richtext.RichTextBuffer_RemoveHandler(*args, **kwargs)
|
[
"def",
"RemoveHandler",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextBuffer_RemoveHandler",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/richtext.py#L2559-L2561
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/richtext.py
|
python
|
RichTextCtrl.GetCaretPosition
|
(*args, **kwargs)
|
return _richtext.RichTextCtrl_GetCaretPosition(*args, **kwargs)
|
GetCaretPosition(self) -> long
|
GetCaretPosition(self) -> long
|
[
"GetCaretPosition",
"(",
"self",
")",
"-",
">",
"long"
] |
def GetCaretPosition(*args, **kwargs):
"""GetCaretPosition(self) -> long"""
return _richtext.RichTextCtrl_GetCaretPosition(*args, **kwargs)
|
[
"def",
"GetCaretPosition",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextCtrl_GetCaretPosition",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/richtext.py#L4072-L4074
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemFramework/v1/AWS/common-code/lib/urllib3/contrib/pyopenssl.py
|
python
|
inject_into_urllib3
|
()
|
Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.
|
Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.
|
[
"Monkey",
"-",
"patch",
"urllib3",
"with",
"PyOpenSSL",
"-",
"backed",
"SSL",
"-",
"support",
"."
] |
def inject_into_urllib3():
"Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
_validate_dependencies_met()
util.SSLContext = PyOpenSSLContext
util.ssl_.SSLContext = PyOpenSSLContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_PYOPENSSL = True
util.ssl_.IS_PYOPENSSL = True
|
[
"def",
"inject_into_urllib3",
"(",
")",
":",
"_validate_dependencies_met",
"(",
")",
"util",
".",
"SSLContext",
"=",
"PyOpenSSLContext",
"util",
".",
"ssl_",
".",
"SSLContext",
"=",
"PyOpenSSLContext",
"util",
".",
"HAS_SNI",
"=",
"HAS_SNI",
"util",
".",
"ssl_",
".",
"HAS_SNI",
"=",
"HAS_SNI",
"util",
".",
"IS_PYOPENSSL",
"=",
"True",
"util",
".",
"ssl_",
".",
"IS_PYOPENSSL",
"=",
"True"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/common-code/lib/urllib3/contrib/pyopenssl.py#L115-L125
|
||
pgRouting/osm2pgrouting
|
8491929fc4037d308f271e84d59bb96da3c28aa2
|
tools/cpplint.py
|
python
|
_CppLintState.SetCountingStyle
|
(self, counting_style)
|
Sets the module's counting options.
|
Sets the module's counting options.
|
[
"Sets",
"the",
"module",
"s",
"counting",
"options",
"."
] |
def SetCountingStyle(self, counting_style):
"""Sets the module's counting options."""
self.counting = counting_style
|
[
"def",
"SetCountingStyle",
"(",
"self",
",",
"counting_style",
")",
":",
"self",
".",
"counting",
"=",
"counting_style"
] |
https://github.com/pgRouting/osm2pgrouting/blob/8491929fc4037d308f271e84d59bb96da3c28aa2/tools/cpplint.py#L783-L785
|
||
makefile/frcnn
|
8d9b9ebf8be8315ba2f374d460121b0adf1df29c
|
scripts/cpp_lint.py
|
python
|
CheckInvalidIncrement
|
(filename, clean_lines, linenum, error)
|
Checks for invalid increment *count++.
For example following function:
void increment_counter(int* count) {
*count++;
}
is invalid, because it effectively does count++, moving pointer, and should
be replaced with ++*count, (*count)++ or *count += 1.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
|
Checks for invalid increment *count++.
|
[
"Checks",
"for",
"invalid",
"increment",
"*",
"count",
"++",
"."
] |
def CheckInvalidIncrement(filename, clean_lines, linenum, error):
"""Checks for invalid increment *count++.
For example following function:
void increment_counter(int* count) {
*count++;
}
is invalid, because it effectively does count++, moving pointer, and should
be replaced with ++*count, (*count)++ or *count += 1.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
if _RE_PATTERN_INVALID_INCREMENT.match(line):
error(filename, linenum, 'runtime/invalid_increment', 5,
'Changing pointer instead of value (or unused value of operator*).')
|
[
"def",
"CheckInvalidIncrement",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"if",
"_RE_PATTERN_INVALID_INCREMENT",
".",
"match",
"(",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/invalid_increment'",
",",
"5",
",",
"'Changing pointer instead of value (or unused value of operator*).'",
")"
] |
https://github.com/makefile/frcnn/blob/8d9b9ebf8be8315ba2f374d460121b0adf1df29c/scripts/cpp_lint.py#L1733-L1752
|
||
RobotLocomotion/drake
|
0e18a34604c45ed65bc9018a54f7610f91cdad5b
|
tools/workspace/drake_visualizer/_drake_visualizer_builtin_scripts/experimental_show_deformable_mesh.py
|
python
|
ExperimentalDeformableMeshVisualizer.handle_update_message
|
(self, msg)
|
Updates vertex data for the deformable meshes specified in msg.
|
Updates vertex data for the deformable meshes specified in msg.
|
[
"Updates",
"vertex",
"data",
"for",
"the",
"deformable",
"meshes",
"specified",
"in",
"msg",
"."
] |
def handle_update_message(self, msg):
"""Updates vertex data for the deformable meshes specified in msg."""
if not len(self._poly_data_list) == msg.num_meshes:
print(
"Received a deformable mesh update message with '{}' meshes; "
"expected {} meshes.".format(
msg.num_meshes, len(self._poly_data_list)
)
)
return
for mesh_id in range(msg.num_meshes):
mesh = msg.meshes[mesh_id]
if mesh.name != self._names[mesh_id]:
print(
"The deformable mesh update message contains data for "
"a mesh named '{}', expected name '{}'.".format(
mesh.name, self._names[mesh_id]
)
)
return
if mesh.num_vertices != self._vertex_counts[mesh_id]:
print(
"The deformable mesh update message contains data for {} "
"vertices; expected {}.".format(
mesh.num_vertices, self._vertex_count
)
)
return
points = vtk.vtkPoints()
for i in range(0, mesh.num_vertices):
points.InsertNextPoint(
mesh.vertices_W[i][0],
mesh.vertices_W[i][1],
mesh.vertices_W[i][2]
)
# TODO(SeanCurtis-TRI): Instead of creating a new set of points and
# stomping on the old; can I just update the values? That might
# improve performance.
self._poly_data_list[mesh_id].SetPoints(points)
self._poly_item_list[mesh_id].setPolyData(
self._poly_data_list[mesh_id]
)
self._poly_item_list[mesh_id]._renderAllViews()
|
[
"def",
"handle_update_message",
"(",
"self",
",",
"msg",
")",
":",
"if",
"not",
"len",
"(",
"self",
".",
"_poly_data_list",
")",
"==",
"msg",
".",
"num_meshes",
":",
"print",
"(",
"\"Received a deformable mesh update message with '{}' meshes; \"",
"\"expected {} meshes.\"",
".",
"format",
"(",
"msg",
".",
"num_meshes",
",",
"len",
"(",
"self",
".",
"_poly_data_list",
")",
")",
")",
"return",
"for",
"mesh_id",
"in",
"range",
"(",
"msg",
".",
"num_meshes",
")",
":",
"mesh",
"=",
"msg",
".",
"meshes",
"[",
"mesh_id",
"]",
"if",
"mesh",
".",
"name",
"!=",
"self",
".",
"_names",
"[",
"mesh_id",
"]",
":",
"print",
"(",
"\"The deformable mesh update message contains data for \"",
"\"a mesh named '{}', expected name '{}'.\"",
".",
"format",
"(",
"mesh",
".",
"name",
",",
"self",
".",
"_names",
"[",
"mesh_id",
"]",
")",
")",
"return",
"if",
"mesh",
".",
"num_vertices",
"!=",
"self",
".",
"_vertex_counts",
"[",
"mesh_id",
"]",
":",
"print",
"(",
"\"The deformable mesh update message contains data for {} \"",
"\"vertices; expected {}.\"",
".",
"format",
"(",
"mesh",
".",
"num_vertices",
",",
"self",
".",
"_vertex_count",
")",
")",
"return",
"points",
"=",
"vtk",
".",
"vtkPoints",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"mesh",
".",
"num_vertices",
")",
":",
"points",
".",
"InsertNextPoint",
"(",
"mesh",
".",
"vertices_W",
"[",
"i",
"]",
"[",
"0",
"]",
",",
"mesh",
".",
"vertices_W",
"[",
"i",
"]",
"[",
"1",
"]",
",",
"mesh",
".",
"vertices_W",
"[",
"i",
"]",
"[",
"2",
"]",
")",
"# TODO(SeanCurtis-TRI): Instead of creating a new set of points and",
"# stomping on the old; can I just update the values? That might",
"# improve performance.",
"self",
".",
"_poly_data_list",
"[",
"mesh_id",
"]",
".",
"SetPoints",
"(",
"points",
")",
"self",
".",
"_poly_item_list",
"[",
"mesh_id",
"]",
".",
"setPolyData",
"(",
"self",
".",
"_poly_data_list",
"[",
"mesh_id",
"]",
")",
"self",
".",
"_poly_item_list",
"[",
"mesh_id",
"]",
".",
"_renderAllViews",
"(",
")"
] |
https://github.com/RobotLocomotion/drake/blob/0e18a34604c45ed65bc9018a54f7610f91cdad5b/tools/workspace/drake_visualizer/_drake_visualizer_builtin_scripts/experimental_show_deformable_mesh.py#L112-L154
|
||
aimerykong/Low-Rank-Bilinear-Pooling
|
487eb2c857fd9c95357a5166b0c15ad0fe135b28
|
caffe-20160312/python/caffe/draw.py
|
python
|
choose_color_by_layertype
|
(layertype)
|
return color
|
Define colors for nodes based on the layer type.
|
Define colors for nodes based on the layer type.
|
[
"Define",
"colors",
"for",
"nodes",
"based",
"on",
"the",
"layer",
"type",
"."
] |
def choose_color_by_layertype(layertype):
"""Define colors for nodes based on the layer type.
"""
color = '#6495ED' # Default
if layertype == 'Convolution' or layertype == 'Deconvolution':
color = '#FF5050'
elif layertype == 'Pooling':
color = '#FF9900'
elif layertype == 'InnerProduct':
color = '#CC33FF'
return color
|
[
"def",
"choose_color_by_layertype",
"(",
"layertype",
")",
":",
"color",
"=",
"'#6495ED'",
"# Default",
"if",
"layertype",
"==",
"'Convolution'",
"or",
"layertype",
"==",
"'Deconvolution'",
":",
"color",
"=",
"'#FF5050'",
"elif",
"layertype",
"==",
"'Pooling'",
":",
"color",
"=",
"'#FF9900'",
"elif",
"layertype",
"==",
"'InnerProduct'",
":",
"color",
"=",
"'#CC33FF'",
"return",
"color"
] |
https://github.com/aimerykong/Low-Rank-Bilinear-Pooling/blob/487eb2c857fd9c95357a5166b0c15ad0fe135b28/caffe-20160312/python/caffe/draw.py#L117-L127
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/boto3/dynamodb/conditions.py
|
python
|
Attr.ne
|
(self, value)
|
return NotEquals(self, value)
|
Creates a condition where the attribute is not equal to the value
:param value: The value that the attribute is not equal to.
|
Creates a condition where the attribute is not equal to the value
|
[
"Creates",
"a",
"condition",
"where",
"the",
"attribute",
"is",
"not",
"equal",
"to",
"the",
"value"
] |
def ne(self, value):
"""Creates a condition where the attribute is not equal to the value
:param value: The value that the attribute is not equal to.
"""
return NotEquals(self, value)
|
[
"def",
"ne",
"(",
"self",
",",
"value",
")",
":",
"return",
"NotEquals",
"(",
"self",
",",
"value",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/boto3/dynamodb/conditions.py#L243-L248
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/tools/Editra/src/extern/aui/auibook.py
|
python
|
AuiNotebook.GetSashDClickUnsplit
|
(self)
|
return self._sash_dclick_unsplit
|
Returns whether a splitted :class:`AuiNotebook` can be unsplitted by double-clicking
on the splitter sash.
|
Returns whether a splitted :class:`AuiNotebook` can be unsplitted by double-clicking
on the splitter sash.
|
[
"Returns",
"whether",
"a",
"splitted",
":",
"class",
":",
"AuiNotebook",
"can",
"be",
"unsplitted",
"by",
"double",
"-",
"clicking",
"on",
"the",
"splitter",
"sash",
"."
] |
def GetSashDClickUnsplit(self):
"""
Returns whether a splitted :class:`AuiNotebook` can be unsplitted by double-clicking
on the splitter sash.
"""
return self._sash_dclick_unsplit
|
[
"def",
"GetSashDClickUnsplit",
"(",
"self",
")",
":",
"return",
"self",
".",
"_sash_dclick_unsplit"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/aui/auibook.py#L3542-L3548
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/polynomial/_polybase.py
|
python
|
ABCPolyBase._get_coefficients
|
(self, other)
|
return other
|
Interpret other as polynomial coefficients.
The `other` argument is checked to see if it is of the same
class as self with identical domain and window. If so,
return its coefficients, otherwise return `other`.
.. versionadded:: 1.9.0
Parameters
----------
other : anything
Object to be checked.
Returns
-------
coef
The coefficients of`other` if it is a compatible instance,
of ABCPolyBase, otherwise `other`.
Raises
------
TypeError
When `other` is an incompatible instance of ABCPolyBase.
|
Interpret other as polynomial coefficients.
|
[
"Interpret",
"other",
"as",
"polynomial",
"coefficients",
"."
] |
def _get_coefficients(self, other):
"""Interpret other as polynomial coefficients.
The `other` argument is checked to see if it is of the same
class as self with identical domain and window. If so,
return its coefficients, otherwise return `other`.
.. versionadded:: 1.9.0
Parameters
----------
other : anything
Object to be checked.
Returns
-------
coef
The coefficients of`other` if it is a compatible instance,
of ABCPolyBase, otherwise `other`.
Raises
------
TypeError
When `other` is an incompatible instance of ABCPolyBase.
"""
if isinstance(other, ABCPolyBase):
if not isinstance(other, self.__class__):
raise TypeError("Polynomial types differ")
elif not np.all(self.domain == other.domain):
raise TypeError("Domains differ")
elif not np.all(self.window == other.window):
raise TypeError("Windows differ")
return other.coef
return other
|
[
"def",
"_get_coefficients",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"ABCPolyBase",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"self",
".",
"__class__",
")",
":",
"raise",
"TypeError",
"(",
"\"Polynomial types differ\"",
")",
"elif",
"not",
"np",
".",
"all",
"(",
"self",
".",
"domain",
"==",
"other",
".",
"domain",
")",
":",
"raise",
"TypeError",
"(",
"\"Domains differ\"",
")",
"elif",
"not",
"np",
".",
"all",
"(",
"self",
".",
"window",
"==",
"other",
".",
"window",
")",
":",
"raise",
"TypeError",
"(",
"\"Windows differ\"",
")",
"return",
"other",
".",
"coef",
"return",
"other"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/polynomial/_polybase.py#L229-L263
|
|
PrincetonUniversity/athena-public-version
|
9c266692b9423743d8e23509b3ab266a232a92d2
|
tst/style/cpplint.py
|
python
|
ParseNolintSuppressions
|
(filename, raw_line, linenum, error)
|
Updates the global list of line error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
|
Updates the global list of line error-suppressions.
|
[
"Updates",
"the",
"global",
"list",
"of",
"line",
"error",
"-",
"suppressions",
"."
] |
def ParseNolintSuppressions(filename, raw_line, linenum, error):
"""Updates the global list of line error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
"""
matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line)
if matched:
if matched.group(1):
suppressed_line = linenum + 1
else:
suppressed_line = linenum
category = matched.group(2)
if category in (None, '(*)'): # => "suppress all"
_error_suppressions.setdefault(None, set()).add(suppressed_line)
else:
if category.startswith('(') and category.endswith(')'):
category = category[1:-1]
if category in _ERROR_CATEGORIES:
_error_suppressions.setdefault(category, set()).add(suppressed_line)
elif category not in _LEGACY_ERROR_CATEGORIES:
error(filename, linenum, 'readability/nolint', 5,
'Unknown NOLINT error category: %s' % category)
|
[
"def",
"ParseNolintSuppressions",
"(",
"filename",
",",
"raw_line",
",",
"linenum",
",",
"error",
")",
":",
"matched",
"=",
"Search",
"(",
"r'\\bNOLINT(NEXTLINE)?\\b(\\([^)]+\\))?'",
",",
"raw_line",
")",
"if",
"matched",
":",
"if",
"matched",
".",
"group",
"(",
"1",
")",
":",
"suppressed_line",
"=",
"linenum",
"+",
"1",
"else",
":",
"suppressed_line",
"=",
"linenum",
"category",
"=",
"matched",
".",
"group",
"(",
"2",
")",
"if",
"category",
"in",
"(",
"None",
",",
"'(*)'",
")",
":",
"# => \"suppress all\"",
"_error_suppressions",
".",
"setdefault",
"(",
"None",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"suppressed_line",
")",
"else",
":",
"if",
"category",
".",
"startswith",
"(",
"'('",
")",
"and",
"category",
".",
"endswith",
"(",
"')'",
")",
":",
"category",
"=",
"category",
"[",
"1",
":",
"-",
"1",
"]",
"if",
"category",
"in",
"_ERROR_CATEGORIES",
":",
"_error_suppressions",
".",
"setdefault",
"(",
"category",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"suppressed_line",
")",
"elif",
"category",
"not",
"in",
"_LEGACY_ERROR_CATEGORIES",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/nolint'",
",",
"5",
",",
"'Unknown NOLINT error category: %s'",
"%",
"category",
")"
] |
https://github.com/PrincetonUniversity/athena-public-version/blob/9c266692b9423743d8e23509b3ab266a232a92d2/tst/style/cpplint.py#L721-L750
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/tools/Editra/src/autocomp/pycomp.py
|
python
|
Scope.local
|
(self, loc)
|
Add an object to the scopes locals
@param loc: local object to add to locals
|
Add an object to the scopes locals
@param loc: local object to add to locals
|
[
"Add",
"an",
"object",
"to",
"the",
"scopes",
"locals",
"@param",
"loc",
":",
"local",
"object",
"to",
"add",
"to",
"locals"
] |
def local(self, loc):
"""Add an object to the scopes locals
@param loc: local object to add to locals
"""
self._checkexisting(loc)
self.locals.append((self.NextObjId(), loc))
|
[
"def",
"local",
"(",
"self",
",",
"loc",
")",
":",
"self",
".",
"_checkexisting",
"(",
"loc",
")",
"self",
".",
"locals",
".",
"append",
"(",
"(",
"self",
".",
"NextObjId",
"(",
")",
",",
"loc",
")",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/autocomp/pycomp.py#L464-L470
|
||
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
deps/src/libxml2-2.9.1/python/libxml2.py
|
python
|
uCSIsLetterlikeSymbols
|
(code)
|
return ret
|
Check whether the character is part of LetterlikeSymbols
UCS Block
|
Check whether the character is part of LetterlikeSymbols
UCS Block
|
[
"Check",
"whether",
"the",
"character",
"is",
"part",
"of",
"LetterlikeSymbols",
"UCS",
"Block"
] |
def uCSIsLetterlikeSymbols(code):
"""Check whether the character is part of LetterlikeSymbols
UCS Block """
ret = libxml2mod.xmlUCSIsLetterlikeSymbols(code)
return ret
|
[
"def",
"uCSIsLetterlikeSymbols",
"(",
"code",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlUCSIsLetterlikeSymbols",
"(",
"code",
")",
"return",
"ret"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L2692-L2696
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/grid.py
|
python
|
GridTableBase.InsertCols
|
(*args, **kwargs)
|
return _grid.GridTableBase_InsertCols(*args, **kwargs)
|
InsertCols(self, size_t pos=0, size_t numCols=1) -> bool
|
InsertCols(self, size_t pos=0, size_t numCols=1) -> bool
|
[
"InsertCols",
"(",
"self",
"size_t",
"pos",
"=",
"0",
"size_t",
"numCols",
"=",
"1",
")",
"-",
">",
"bool"
] |
def InsertCols(*args, **kwargs):
"""InsertCols(self, size_t pos=0, size_t numCols=1) -> bool"""
return _grid.GridTableBase_InsertCols(*args, **kwargs)
|
[
"def",
"InsertCols",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"GridTableBase_InsertCols",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/grid.py#L874-L876
|
|
smilehao/xlua-framework
|
a03801538be2b0e92d39332d445b22caca1ef61f
|
ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/google/protobuf/internal/cpp_message.py
|
python
|
BuildFile
|
(content)
|
Registers a new proto file in the underlying C++ descriptor pool.
|
Registers a new proto file in the underlying C++ descriptor pool.
|
[
"Registers",
"a",
"new",
"proto",
"file",
"in",
"the",
"underlying",
"C",
"++",
"descriptor",
"pool",
"."
] |
def BuildFile(content):
"""Registers a new proto file in the underlying C++ descriptor pool."""
_net_proto2___python.BuildFile(content)
|
[
"def",
"BuildFile",
"(",
"content",
")",
":",
"_net_proto2___python",
".",
"BuildFile",
"(",
"content",
")"
] |
https://github.com/smilehao/xlua-framework/blob/a03801538be2b0e92d39332d445b22caca1ef61f/ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/google/protobuf/internal/cpp_message.py#L63-L65
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/training/saving/functional_saver.py
|
python
|
MultiDeviceSaver.__init__
|
(self, saveable_objects)
|
Specify a list of `SaveableObject`s to save and restore.
Args:
saveable_objects: A list of `SaveableObject`s.
|
Specify a list of `SaveableObject`s to save and restore.
|
[
"Specify",
"a",
"list",
"of",
"SaveableObject",
"s",
"to",
"save",
"and",
"restore",
"."
] |
def __init__(self, saveable_objects):
"""Specify a list of `SaveableObject`s to save and restore.
Args:
saveable_objects: A list of `SaveableObject`s.
"""
saveable_objects = list(saveable_objects)
saveables_by_device = {}
for saveable in saveable_objects:
if not isinstance(saveable, saveable_object.SaveableObject):
raise ValueError(
"Expected a dictionary of SaveableObjects, got {}."
.format(saveable))
saveables_by_device.setdefault(saveable.device, []).append(saveable)
self._single_device_savers = {
device: _SingleDeviceSaver(saveables)
for device, saveables in saveables_by_device.items()}
|
[
"def",
"__init__",
"(",
"self",
",",
"saveable_objects",
")",
":",
"saveable_objects",
"=",
"list",
"(",
"saveable_objects",
")",
"saveables_by_device",
"=",
"{",
"}",
"for",
"saveable",
"in",
"saveable_objects",
":",
"if",
"not",
"isinstance",
"(",
"saveable",
",",
"saveable_object",
".",
"SaveableObject",
")",
":",
"raise",
"ValueError",
"(",
"\"Expected a dictionary of SaveableObjects, got {}.\"",
".",
"format",
"(",
"saveable",
")",
")",
"saveables_by_device",
".",
"setdefault",
"(",
"saveable",
".",
"device",
",",
"[",
"]",
")",
".",
"append",
"(",
"saveable",
")",
"self",
".",
"_single_device_savers",
"=",
"{",
"device",
":",
"_SingleDeviceSaver",
"(",
"saveables",
")",
"for",
"device",
",",
"saveables",
"in",
"saveables_by_device",
".",
"items",
"(",
")",
"}"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/training/saving/functional_saver.py#L128-L144
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pexpect/pexpect/pty_spawn.py
|
python
|
spawn.__str__
|
(self)
|
return '\n'.join(s)
|
This returns a human-readable string that represents the state of
the object.
|
This returns a human-readable string that represents the state of
the object.
|
[
"This",
"returns",
"a",
"human",
"-",
"readable",
"string",
"that",
"represents",
"the",
"state",
"of",
"the",
"object",
"."
] |
def __str__(self):
'''This returns a human-readable string that represents the state of
the object. '''
s = []
s.append(repr(self))
s.append('command: ' + str(self.command))
s.append('args: %r' % (self.args,))
s.append('buffer (last %s chars): %r' % (self.str_last_chars,self.buffer[-self.str_last_chars:]))
s.append('before (last %s chars): %r' % (self.str_last_chars,self.before[-self.str_last_chars:] if self.before else ''))
s.append('after: %r' % (self.after,))
s.append('match: %r' % (self.match,))
s.append('match_index: ' + str(self.match_index))
s.append('exitstatus: ' + str(self.exitstatus))
if hasattr(self, 'ptyproc'):
s.append('flag_eof: ' + str(self.flag_eof))
s.append('pid: ' + str(self.pid))
s.append('child_fd: ' + str(self.child_fd))
s.append('closed: ' + str(self.closed))
s.append('timeout: ' + str(self.timeout))
s.append('delimiter: ' + str(self.delimiter))
s.append('logfile: ' + str(self.logfile))
s.append('logfile_read: ' + str(self.logfile_read))
s.append('logfile_send: ' + str(self.logfile_send))
s.append('maxread: ' + str(self.maxread))
s.append('ignorecase: ' + str(self.ignorecase))
s.append('searchwindowsize: ' + str(self.searchwindowsize))
s.append('delaybeforesend: ' + str(self.delaybeforesend))
s.append('delayafterclose: ' + str(self.delayafterclose))
s.append('delayafterterminate: ' + str(self.delayafterterminate))
return '\n'.join(s)
|
[
"def",
"__str__",
"(",
"self",
")",
":",
"s",
"=",
"[",
"]",
"s",
".",
"append",
"(",
"repr",
"(",
"self",
")",
")",
"s",
".",
"append",
"(",
"'command: '",
"+",
"str",
"(",
"self",
".",
"command",
")",
")",
"s",
".",
"append",
"(",
"'args: %r'",
"%",
"(",
"self",
".",
"args",
",",
")",
")",
"s",
".",
"append",
"(",
"'buffer (last %s chars): %r'",
"%",
"(",
"self",
".",
"str_last_chars",
",",
"self",
".",
"buffer",
"[",
"-",
"self",
".",
"str_last_chars",
":",
"]",
")",
")",
"s",
".",
"append",
"(",
"'before (last %s chars): %r'",
"%",
"(",
"self",
".",
"str_last_chars",
",",
"self",
".",
"before",
"[",
"-",
"self",
".",
"str_last_chars",
":",
"]",
"if",
"self",
".",
"before",
"else",
"''",
")",
")",
"s",
".",
"append",
"(",
"'after: %r'",
"%",
"(",
"self",
".",
"after",
",",
")",
")",
"s",
".",
"append",
"(",
"'match: %r'",
"%",
"(",
"self",
".",
"match",
",",
")",
")",
"s",
".",
"append",
"(",
"'match_index: '",
"+",
"str",
"(",
"self",
".",
"match_index",
")",
")",
"s",
".",
"append",
"(",
"'exitstatus: '",
"+",
"str",
"(",
"self",
".",
"exitstatus",
")",
")",
"if",
"hasattr",
"(",
"self",
",",
"'ptyproc'",
")",
":",
"s",
".",
"append",
"(",
"'flag_eof: '",
"+",
"str",
"(",
"self",
".",
"flag_eof",
")",
")",
"s",
".",
"append",
"(",
"'pid: '",
"+",
"str",
"(",
"self",
".",
"pid",
")",
")",
"s",
".",
"append",
"(",
"'child_fd: '",
"+",
"str",
"(",
"self",
".",
"child_fd",
")",
")",
"s",
".",
"append",
"(",
"'closed: '",
"+",
"str",
"(",
"self",
".",
"closed",
")",
")",
"s",
".",
"append",
"(",
"'timeout: '",
"+",
"str",
"(",
"self",
".",
"timeout",
")",
")",
"s",
".",
"append",
"(",
"'delimiter: '",
"+",
"str",
"(",
"self",
".",
"delimiter",
")",
")",
"s",
".",
"append",
"(",
"'logfile: '",
"+",
"str",
"(",
"self",
".",
"logfile",
")",
")",
"s",
".",
"append",
"(",
"'logfile_read: '",
"+",
"str",
"(",
"self",
".",
"logfile_read",
")",
")",
"s",
".",
"append",
"(",
"'logfile_send: '",
"+",
"str",
"(",
"self",
".",
"logfile_send",
")",
")",
"s",
".",
"append",
"(",
"'maxread: '",
"+",
"str",
"(",
"self",
".",
"maxread",
")",
")",
"s",
".",
"append",
"(",
"'ignorecase: '",
"+",
"str",
"(",
"self",
".",
"ignorecase",
")",
")",
"s",
".",
"append",
"(",
"'searchwindowsize: '",
"+",
"str",
"(",
"self",
".",
"searchwindowsize",
")",
")",
"s",
".",
"append",
"(",
"'delaybeforesend: '",
"+",
"str",
"(",
"self",
".",
"delaybeforesend",
")",
")",
"s",
".",
"append",
"(",
"'delayafterclose: '",
"+",
"str",
"(",
"self",
".",
"delayafterclose",
")",
")",
"s",
".",
"append",
"(",
"'delayafterterminate: '",
"+",
"str",
"(",
"self",
".",
"delayafterterminate",
")",
")",
"return",
"'\\n'",
".",
"join",
"(",
"s",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pexpect/pexpect/pty_spawn.py#L208-L238
|
|
SFTtech/openage
|
d6a08c53c48dc1e157807471df92197f6ca9e04d
|
openage/convert/processor/conversion/hd/media_subprocessor.py
|
python
|
HDMediaSubprocessor.create_graphics_requests
|
(full_data_set)
|
Create export requests for graphics referenced by CombinedSprite objects.
|
Create export requests for graphics referenced by CombinedSprite objects.
|
[
"Create",
"export",
"requests",
"for",
"graphics",
"referenced",
"by",
"CombinedSprite",
"objects",
"."
] |
def create_graphics_requests(full_data_set):
"""
Create export requests for graphics referenced by CombinedSprite objects.
"""
combined_sprites = full_data_set.combined_sprites.values()
handled_graphic_ids = set()
for sprite in combined_sprites:
ref_graphics = sprite.get_graphics()
graphic_targetdirs = sprite.resolve_graphics_location()
metadata_filename = f"{sprite.get_filename()}.{'sprite'}"
metadata_export = SpriteMetadataExport(sprite.resolve_sprite_location(),
metadata_filename)
full_data_set.metadata_exports.append(metadata_export)
for graphic in ref_graphics:
graphic_id = graphic.get_id()
if graphic_id in handled_graphic_ids:
continue
targetdir = graphic_targetdirs[graphic_id]
source_filename = f"{str(graphic['slp_id'].get_value())}.slp"
target_filename = "%s_%s.png" % (sprite.get_filename(),
str(graphic["slp_id"].get_value()))
export_request = MediaExportRequest(MediaType.GRAPHICS,
targetdir,
source_filename,
target_filename)
full_data_set.graphics_exports.update({graphic_id: export_request})
# Metadata from graphics
sequence_type = graphic["sequence_type"].get_value()
if sequence_type == 0x00:
layer_mode = LayerMode.OFF
elif sequence_type & 0x08:
layer_mode = LayerMode.ONCE
else:
layer_mode = LayerMode.LOOP
layer_pos = graphic["layer"].get_value()
frame_rate = round(graphic["frame_rate"].get_value(), ndigits=6)
if frame_rate < 0.000001:
frame_rate = None
replay_delay = round(graphic["replay_delay"].get_value(), ndigits=6)
if replay_delay < 0.000001:
replay_delay = None
frame_count = graphic["frame_count"].get_value()
angle_count = graphic["angle_count"].get_value()
mirror_mode = graphic["mirroring_mode"].get_value()
metadata_export.add_graphics_metadata(target_filename,
layer_mode,
layer_pos,
frame_rate,
replay_delay,
frame_count,
angle_count,
mirror_mode)
# Notify metadata export about SLP metadata when the file is exported
export_request.add_observer(metadata_export)
handled_graphic_ids.add(graphic_id)
combined_terrains = full_data_set.combined_terrains.values()
for texture in combined_terrains:
slp_id = texture.get_terrain()["slp_id"].get_value()
srcfile_prefix = texture.get_terrain()["filename"].get_value()
targetdir = texture.resolve_graphics_location()
source_filename = f"{str(srcfile_prefix)}_00_color.png"
target_filename = f"{texture.get_filename()}.png"
export_request = MediaExportRequest(MediaType.TERRAIN,
targetdir,
source_filename,
target_filename)
full_data_set.graphics_exports.update({slp_id: export_request})
|
[
"def",
"create_graphics_requests",
"(",
"full_data_set",
")",
":",
"combined_sprites",
"=",
"full_data_set",
".",
"combined_sprites",
".",
"values",
"(",
")",
"handled_graphic_ids",
"=",
"set",
"(",
")",
"for",
"sprite",
"in",
"combined_sprites",
":",
"ref_graphics",
"=",
"sprite",
".",
"get_graphics",
"(",
")",
"graphic_targetdirs",
"=",
"sprite",
".",
"resolve_graphics_location",
"(",
")",
"metadata_filename",
"=",
"f\"{sprite.get_filename()}.{'sprite'}\"",
"metadata_export",
"=",
"SpriteMetadataExport",
"(",
"sprite",
".",
"resolve_sprite_location",
"(",
")",
",",
"metadata_filename",
")",
"full_data_set",
".",
"metadata_exports",
".",
"append",
"(",
"metadata_export",
")",
"for",
"graphic",
"in",
"ref_graphics",
":",
"graphic_id",
"=",
"graphic",
".",
"get_id",
"(",
")",
"if",
"graphic_id",
"in",
"handled_graphic_ids",
":",
"continue",
"targetdir",
"=",
"graphic_targetdirs",
"[",
"graphic_id",
"]",
"source_filename",
"=",
"f\"{str(graphic['slp_id'].get_value())}.slp\"",
"target_filename",
"=",
"\"%s_%s.png\"",
"%",
"(",
"sprite",
".",
"get_filename",
"(",
")",
",",
"str",
"(",
"graphic",
"[",
"\"slp_id\"",
"]",
".",
"get_value",
"(",
")",
")",
")",
"export_request",
"=",
"MediaExportRequest",
"(",
"MediaType",
".",
"GRAPHICS",
",",
"targetdir",
",",
"source_filename",
",",
"target_filename",
")",
"full_data_set",
".",
"graphics_exports",
".",
"update",
"(",
"{",
"graphic_id",
":",
"export_request",
"}",
")",
"# Metadata from graphics",
"sequence_type",
"=",
"graphic",
"[",
"\"sequence_type\"",
"]",
".",
"get_value",
"(",
")",
"if",
"sequence_type",
"==",
"0x00",
":",
"layer_mode",
"=",
"LayerMode",
".",
"OFF",
"elif",
"sequence_type",
"&",
"0x08",
":",
"layer_mode",
"=",
"LayerMode",
".",
"ONCE",
"else",
":",
"layer_mode",
"=",
"LayerMode",
".",
"LOOP",
"layer_pos",
"=",
"graphic",
"[",
"\"layer\"",
"]",
".",
"get_value",
"(",
")",
"frame_rate",
"=",
"round",
"(",
"graphic",
"[",
"\"frame_rate\"",
"]",
".",
"get_value",
"(",
")",
",",
"ndigits",
"=",
"6",
")",
"if",
"frame_rate",
"<",
"0.000001",
":",
"frame_rate",
"=",
"None",
"replay_delay",
"=",
"round",
"(",
"graphic",
"[",
"\"replay_delay\"",
"]",
".",
"get_value",
"(",
")",
",",
"ndigits",
"=",
"6",
")",
"if",
"replay_delay",
"<",
"0.000001",
":",
"replay_delay",
"=",
"None",
"frame_count",
"=",
"graphic",
"[",
"\"frame_count\"",
"]",
".",
"get_value",
"(",
")",
"angle_count",
"=",
"graphic",
"[",
"\"angle_count\"",
"]",
".",
"get_value",
"(",
")",
"mirror_mode",
"=",
"graphic",
"[",
"\"mirroring_mode\"",
"]",
".",
"get_value",
"(",
")",
"metadata_export",
".",
"add_graphics_metadata",
"(",
"target_filename",
",",
"layer_mode",
",",
"layer_pos",
",",
"frame_rate",
",",
"replay_delay",
",",
"frame_count",
",",
"angle_count",
",",
"mirror_mode",
")",
"# Notify metadata export about SLP metadata when the file is exported",
"export_request",
".",
"add_observer",
"(",
"metadata_export",
")",
"handled_graphic_ids",
".",
"add",
"(",
"graphic_id",
")",
"combined_terrains",
"=",
"full_data_set",
".",
"combined_terrains",
".",
"values",
"(",
")",
"for",
"texture",
"in",
"combined_terrains",
":",
"slp_id",
"=",
"texture",
".",
"get_terrain",
"(",
")",
"[",
"\"slp_id\"",
"]",
".",
"get_value",
"(",
")",
"srcfile_prefix",
"=",
"texture",
".",
"get_terrain",
"(",
")",
"[",
"\"filename\"",
"]",
".",
"get_value",
"(",
")",
"targetdir",
"=",
"texture",
".",
"resolve_graphics_location",
"(",
")",
"source_filename",
"=",
"f\"{str(srcfile_prefix)}_00_color.png\"",
"target_filename",
"=",
"f\"{texture.get_filename()}.png\"",
"export_request",
"=",
"MediaExportRequest",
"(",
"MediaType",
".",
"TERRAIN",
",",
"targetdir",
",",
"source_filename",
",",
"target_filename",
")",
"full_data_set",
".",
"graphics_exports",
".",
"update",
"(",
"{",
"slp_id",
":",
"export_request",
"}",
")"
] |
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/processor/conversion/hd/media_subprocessor.py#L29-L111
|
||
p4lang/PI
|
38d87e81253feff9fff0660d662c885be78fb719
|
tools/cpplint.py
|
python
|
_CppLintState.IncrementErrorCount
|
(self, category)
|
Bumps the module's error statistic.
|
Bumps the module's error statistic.
|
[
"Bumps",
"the",
"module",
"s",
"error",
"statistic",
"."
] |
def IncrementErrorCount(self, category):
"""Bumps the module's error statistic."""
self.error_count += 1
if self.counting in ('toplevel', 'detailed'):
if self.counting != 'detailed':
category = category.split('/')[0]
if category not in self.errors_by_category:
self.errors_by_category[category] = 0
self.errors_by_category[category] += 1
|
[
"def",
"IncrementErrorCount",
"(",
"self",
",",
"category",
")",
":",
"self",
".",
"error_count",
"+=",
"1",
"if",
"self",
".",
"counting",
"in",
"(",
"'toplevel'",
",",
"'detailed'",
")",
":",
"if",
"self",
".",
"counting",
"!=",
"'detailed'",
":",
"category",
"=",
"category",
".",
"split",
"(",
"'/'",
")",
"[",
"0",
"]",
"if",
"category",
"not",
"in",
"self",
".",
"errors_by_category",
":",
"self",
".",
"errors_by_category",
"[",
"category",
"]",
"=",
"0",
"self",
".",
"errors_by_category",
"[",
"category",
"]",
"+=",
"1"
] |
https://github.com/p4lang/PI/blob/38d87e81253feff9fff0660d662c885be78fb719/tools/cpplint.py#L1335-L1343
|
||
thalium/icebox
|
99d147d5b9269222225443ce171b4fd46d8985d4
|
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
|
python
|
xmlNode.schemaValidateOneElement
|
(self, ctxt)
|
return ret
|
Validate a branch of a tree, starting with the given @elem.
|
Validate a branch of a tree, starting with the given
|
[
"Validate",
"a",
"branch",
"of",
"a",
"tree",
"starting",
"with",
"the",
"given"
] |
def schemaValidateOneElement(self, ctxt):
"""Validate a branch of a tree, starting with the given @elem. """
if ctxt is None: ctxt__o = None
else: ctxt__o = ctxt._o
ret = libxml2mod.xmlSchemaValidateOneElement(ctxt__o, self._o)
return ret
|
[
"def",
"schemaValidateOneElement",
"(",
"self",
",",
"ctxt",
")",
":",
"if",
"ctxt",
"is",
"None",
":",
"ctxt__o",
"=",
"None",
"else",
":",
"ctxt__o",
"=",
"ctxt",
".",
"_o",
"ret",
"=",
"libxml2mod",
".",
"xmlSchemaValidateOneElement",
"(",
"ctxt__o",
",",
"self",
".",
"_o",
")",
"return",
"ret"
] |
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L2909-L2914
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.