text
stringlengths 89
104k
| code_tokens
list | avg_line_len
float64 7.91
980
| score
float64 0
630
|
---|---|---|---|
def stop_gracefully(self):
'''Refuse to start more processes.
This runs in response to SIGINT or SIGTERM; if this isn't a
background process, control-C and a normal ``kill`` command
cause this.
'''
if self.shutting_down:
self.log(logging.INFO,
'second shutdown request, shutting down now')
self.scram()
else:
self.log(logging.INFO, 'shutting down after current jobs finish')
self.shutting_down = True
|
[
"def",
"stop_gracefully",
"(",
"self",
")",
":",
"if",
"self",
".",
"shutting_down",
":",
"self",
".",
"log",
"(",
"logging",
".",
"INFO",
",",
"'second shutdown request, shutting down now'",
")",
"self",
".",
"scram",
"(",
")",
"else",
":",
"self",
".",
"log",
"(",
"logging",
".",
"INFO",
",",
"'shutting down after current jobs finish'",
")",
"self",
".",
"shutting_down",
"=",
"True"
] | 34.2 | 21.666667 |
def post_request(self, endpoint, body=None, timeout=-1):
"""
Perform a POST request to a given endpoint in UpCloud's API.
"""
return self.request('POST', endpoint, body, timeout)
|
[
"def",
"post_request",
"(",
"self",
",",
"endpoint",
",",
"body",
"=",
"None",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"request",
"(",
"'POST'",
",",
"endpoint",
",",
"body",
",",
"timeout",
")"
] | 41.2 | 12.8 |
def update_activity(self, activity_form):
"""Updates an existing activity,.
arg: activity_form (osid.learning.ActivityForm): the form
containing the elements to be updated
raise: IllegalState - ``activity_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``activity_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``activity_form`` did not originate from
``get_activity_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.update_resource_template
collection = JSONClientValidated('learning',
collection='Activity',
runtime=self._runtime)
if not isinstance(activity_form, ABCActivityForm):
raise errors.InvalidArgument('argument type is not an ActivityForm')
if not activity_form.is_for_update():
raise errors.InvalidArgument('the ActivityForm is for update only, not create')
try:
if self._forms[activity_form.get_id().get_identifier()] == UPDATED:
raise errors.IllegalState('activity_form already used in an update transaction')
except KeyError:
raise errors.Unsupported('activity_form did not originate from this session')
if not activity_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
collection.save(activity_form._my_map)
self._forms[activity_form.get_id().get_identifier()] = UPDATED
# Note: this is out of spec. The OSIDs don't require an object to be returned:
return objects.Activity(
osid_object_map=activity_form._my_map,
runtime=self._runtime,
proxy=self._proxy)
|
[
"def",
"update_activity",
"(",
"self",
",",
"activity_form",
")",
":",
"# Implemented from template for",
"# osid.resource.ResourceAdminSession.update_resource_template",
"collection",
"=",
"JSONClientValidated",
"(",
"'learning'",
",",
"collection",
"=",
"'Activity'",
",",
"runtime",
"=",
"self",
".",
"_runtime",
")",
"if",
"not",
"isinstance",
"(",
"activity_form",
",",
"ABCActivityForm",
")",
":",
"raise",
"errors",
".",
"InvalidArgument",
"(",
"'argument type is not an ActivityForm'",
")",
"if",
"not",
"activity_form",
".",
"is_for_update",
"(",
")",
":",
"raise",
"errors",
".",
"InvalidArgument",
"(",
"'the ActivityForm is for update only, not create'",
")",
"try",
":",
"if",
"self",
".",
"_forms",
"[",
"activity_form",
".",
"get_id",
"(",
")",
".",
"get_identifier",
"(",
")",
"]",
"==",
"UPDATED",
":",
"raise",
"errors",
".",
"IllegalState",
"(",
"'activity_form already used in an update transaction'",
")",
"except",
"KeyError",
":",
"raise",
"errors",
".",
"Unsupported",
"(",
"'activity_form did not originate from this session'",
")",
"if",
"not",
"activity_form",
".",
"is_valid",
"(",
")",
":",
"raise",
"errors",
".",
"InvalidArgument",
"(",
"'one or more of the form elements is invalid'",
")",
"collection",
".",
"save",
"(",
"activity_form",
".",
"_my_map",
")",
"self",
".",
"_forms",
"[",
"activity_form",
".",
"get_id",
"(",
")",
".",
"get_identifier",
"(",
")",
"]",
"=",
"UPDATED",
"# Note: this is out of spec. The OSIDs don't require an object to be returned:",
"return",
"objects",
".",
"Activity",
"(",
"osid_object_map",
"=",
"activity_form",
".",
"_my_map",
",",
"runtime",
"=",
"self",
".",
"_runtime",
",",
"proxy",
"=",
"self",
".",
"_proxy",
")"
] | 50.756098 | 22.902439 |
def create_option(cls, option, **kwargs):
"""Create Option
Create a new Option
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_option(option, async=True)
>>> result = thread.get()
:param async bool
:param Option option: Attributes of option to create (required)
:return: Option
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_option_with_http_info(option, **kwargs)
else:
(data) = cls._create_option_with_http_info(option, **kwargs)
return data
|
[
"def",
"create_option",
"(",
"cls",
",",
"option",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async'",
")",
":",
"return",
"cls",
".",
"_create_option_with_http_info",
"(",
"option",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"(",
"data",
")",
"=",
"cls",
".",
"_create_option_with_http_info",
"(",
"option",
",",
"*",
"*",
"kwargs",
")",
"return",
"data"
] | 38.142857 | 17.904762 |
def plot_fit_individuals_lens_plane_only(
fit, should_plot_mask=True, extract_array_from_mask=False, zoom_around_mask=False, positions=None,
should_plot_image_plane_pix=False,
should_plot_image=False,
should_plot_noise_map=False,
should_plot_signal_to_noise_map=False,
should_plot_model_image=False,
should_plot_residual_map=False,
should_plot_chi_squared_map=False,
units='arcsec',
output_path=None, output_format='show'):
"""Plot the model datas_ of an analysis, using the *Fitter* class object.
The visualization and output type can be fully customized.
Parameters
-----------
fit : autolens.lens.fitting.Fitter
Class containing fit between the model datas_ and observed lens datas_ (including residual_map, chi_squared_map etc.)
output_path : str
The path where the datas_ is output if the output_type is a file format (e.g. png, fits)
output_format : str
How the datas_ is output. File formats (e.g. png, fits) output the datas_ to harddisk. 'show' displays the datas_ \
in the python interpreter window.
"""
mask = lens_plotter_util.get_mask(fit=fit, should_plot_mask=should_plot_mask)
kpc_per_arcsec = fit.tracer.image_plane.kpc_per_arcsec
if should_plot_image:
image_plane_pix_grid = lens_plotter_util.get_image_plane_pix_grid(should_plot_image_plane_pix, fit)
lens_plotter_util.plot_image(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
image_plane_pix_grid=image_plane_pix_grid,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_noise_map:
lens_plotter_util.plot_noise_map(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_signal_to_noise_map:
lens_plotter_util.plot_signal_to_noise_map(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_model_image:
lens_plotter_util.plot_model_data(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask, positions=positions,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_residual_map:
lens_plotter_util.plot_residual_map(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
if should_plot_chi_squared_map:
lens_plotter_util.plot_chi_squared_map(
fit=fit, mask=mask, extract_array_from_mask=extract_array_from_mask, zoom_around_mask=zoom_around_mask,
units=units, kpc_per_arcsec=kpc_per_arcsec,
output_path=output_path, output_format=output_format)
|
[
"def",
"plot_fit_individuals_lens_plane_only",
"(",
"fit",
",",
"should_plot_mask",
"=",
"True",
",",
"extract_array_from_mask",
"=",
"False",
",",
"zoom_around_mask",
"=",
"False",
",",
"positions",
"=",
"None",
",",
"should_plot_image_plane_pix",
"=",
"False",
",",
"should_plot_image",
"=",
"False",
",",
"should_plot_noise_map",
"=",
"False",
",",
"should_plot_signal_to_noise_map",
"=",
"False",
",",
"should_plot_model_image",
"=",
"False",
",",
"should_plot_residual_map",
"=",
"False",
",",
"should_plot_chi_squared_map",
"=",
"False",
",",
"units",
"=",
"'arcsec'",
",",
"output_path",
"=",
"None",
",",
"output_format",
"=",
"'show'",
")",
":",
"mask",
"=",
"lens_plotter_util",
".",
"get_mask",
"(",
"fit",
"=",
"fit",
",",
"should_plot_mask",
"=",
"should_plot_mask",
")",
"kpc_per_arcsec",
"=",
"fit",
".",
"tracer",
".",
"image_plane",
".",
"kpc_per_arcsec",
"if",
"should_plot_image",
":",
"image_plane_pix_grid",
"=",
"lens_plotter_util",
".",
"get_image_plane_pix_grid",
"(",
"should_plot_image_plane_pix",
",",
"fit",
")",
"lens_plotter_util",
".",
"plot_image",
"(",
"fit",
"=",
"fit",
",",
"mask",
"=",
"mask",
",",
"extract_array_from_mask",
"=",
"extract_array_from_mask",
",",
"zoom_around_mask",
"=",
"zoom_around_mask",
",",
"image_plane_pix_grid",
"=",
"image_plane_pix_grid",
",",
"units",
"=",
"units",
",",
"kpc_per_arcsec",
"=",
"kpc_per_arcsec",
",",
"output_path",
"=",
"output_path",
",",
"output_format",
"=",
"output_format",
")",
"if",
"should_plot_noise_map",
":",
"lens_plotter_util",
".",
"plot_noise_map",
"(",
"fit",
"=",
"fit",
",",
"mask",
"=",
"mask",
",",
"extract_array_from_mask",
"=",
"extract_array_from_mask",
",",
"zoom_around_mask",
"=",
"zoom_around_mask",
",",
"units",
"=",
"units",
",",
"kpc_per_arcsec",
"=",
"kpc_per_arcsec",
",",
"output_path",
"=",
"output_path",
",",
"output_format",
"=",
"output_format",
")",
"if",
"should_plot_signal_to_noise_map",
":",
"lens_plotter_util",
".",
"plot_signal_to_noise_map",
"(",
"fit",
"=",
"fit",
",",
"mask",
"=",
"mask",
",",
"extract_array_from_mask",
"=",
"extract_array_from_mask",
",",
"zoom_around_mask",
"=",
"zoom_around_mask",
",",
"units",
"=",
"units",
",",
"kpc_per_arcsec",
"=",
"kpc_per_arcsec",
",",
"output_path",
"=",
"output_path",
",",
"output_format",
"=",
"output_format",
")",
"if",
"should_plot_model_image",
":",
"lens_plotter_util",
".",
"plot_model_data",
"(",
"fit",
"=",
"fit",
",",
"mask",
"=",
"mask",
",",
"extract_array_from_mask",
"=",
"extract_array_from_mask",
",",
"zoom_around_mask",
"=",
"zoom_around_mask",
",",
"positions",
"=",
"positions",
",",
"units",
"=",
"units",
",",
"kpc_per_arcsec",
"=",
"kpc_per_arcsec",
",",
"output_path",
"=",
"output_path",
",",
"output_format",
"=",
"output_format",
")",
"if",
"should_plot_residual_map",
":",
"lens_plotter_util",
".",
"plot_residual_map",
"(",
"fit",
"=",
"fit",
",",
"mask",
"=",
"mask",
",",
"extract_array_from_mask",
"=",
"extract_array_from_mask",
",",
"zoom_around_mask",
"=",
"zoom_around_mask",
",",
"units",
"=",
"units",
",",
"kpc_per_arcsec",
"=",
"kpc_per_arcsec",
",",
"output_path",
"=",
"output_path",
",",
"output_format",
"=",
"output_format",
")",
"if",
"should_plot_chi_squared_map",
":",
"lens_plotter_util",
".",
"plot_chi_squared_map",
"(",
"fit",
"=",
"fit",
",",
"mask",
"=",
"mask",
",",
"extract_array_from_mask",
"=",
"extract_array_from_mask",
",",
"zoom_around_mask",
"=",
"zoom_around_mask",
",",
"units",
"=",
"units",
",",
"kpc_per_arcsec",
"=",
"kpc_per_arcsec",
",",
"output_path",
"=",
"output_path",
",",
"output_format",
"=",
"output_format",
")"
] | 44.851351 | 27.851351 |
def _get_lr_tensor(self):
"""Get lr minimizing the surrogate.
Returns:
The lr_t.
"""
lr = tf.squared_difference(1.0, tf.sqrt(self._mu)) / self._h_min
return lr
|
[
"def",
"_get_lr_tensor",
"(",
"self",
")",
":",
"lr",
"=",
"tf",
".",
"squared_difference",
"(",
"1.0",
",",
"tf",
".",
"sqrt",
"(",
"self",
".",
"_mu",
")",
")",
"/",
"self",
".",
"_h_min",
"return",
"lr"
] | 22.375 | 20.375 |
def rectmode(self, mode=None):
'''
Set the current rectmode.
:param mode: CORNER, CENTER, CORNERS
:return: rectmode if mode is None or valid.
'''
if mode in (self.CORNER, self.CENTER, self.CORNERS):
self.rectmode = mode
return self.rectmode
elif mode is None:
return self.rectmode
else:
raise ShoebotError(_("rectmode: invalid input"))
|
[
"def",
"rectmode",
"(",
"self",
",",
"mode",
"=",
"None",
")",
":",
"if",
"mode",
"in",
"(",
"self",
".",
"CORNER",
",",
"self",
".",
"CENTER",
",",
"self",
".",
"CORNERS",
")",
":",
"self",
".",
"rectmode",
"=",
"mode",
"return",
"self",
".",
"rectmode",
"elif",
"mode",
"is",
"None",
":",
"return",
"self",
".",
"rectmode",
"else",
":",
"raise",
"ShoebotError",
"(",
"_",
"(",
"\"rectmode: invalid input\"",
")",
")"
] | 31.071429 | 16.785714 |
def generate(env):
"""Add Builders and construction variables for javac to an Environment."""
java_file = SCons.Tool.CreateJavaFileBuilder(env)
java_class = SCons.Tool.CreateJavaClassFileBuilder(env)
java_class_dir = SCons.Tool.CreateJavaClassDirBuilder(env)
java_class.add_emitter(None, emit_java_classes)
java_class.add_emitter(env.subst('$JAVASUFFIX'), emit_java_classes)
java_class_dir.emitter = emit_java_classes
env.AddMethod(Java)
env['JAVAC'] = 'javac'
env['JAVACFLAGS'] = SCons.Util.CLVar('')
env['JAVABOOTCLASSPATH'] = []
env['JAVACLASSPATH'] = []
env['JAVASOURCEPATH'] = []
env['_javapathopt'] = pathopt
env['_JAVABOOTCLASSPATH'] = '${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} '
env['_JAVACLASSPATH'] = '${_javapathopt("-classpath", "JAVACLASSPATH")} '
env['_JAVASOURCEPATH'] = '${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} '
env['_JAVASOURCEPATHDEFAULT'] = '${TARGET.attributes.java_sourcedir}'
env['_JAVACCOM'] = '$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES'
env['JAVACCOM'] = "${TEMPFILE('$_JAVACCOM','$JAVACCOMSTR')}"
env['JAVACLASSSUFFIX'] = '.class'
env['JAVASUFFIX'] = '.java'
|
[
"def",
"generate",
"(",
"env",
")",
":",
"java_file",
"=",
"SCons",
".",
"Tool",
".",
"CreateJavaFileBuilder",
"(",
"env",
")",
"java_class",
"=",
"SCons",
".",
"Tool",
".",
"CreateJavaClassFileBuilder",
"(",
"env",
")",
"java_class_dir",
"=",
"SCons",
".",
"Tool",
".",
"CreateJavaClassDirBuilder",
"(",
"env",
")",
"java_class",
".",
"add_emitter",
"(",
"None",
",",
"emit_java_classes",
")",
"java_class",
".",
"add_emitter",
"(",
"env",
".",
"subst",
"(",
"'$JAVASUFFIX'",
")",
",",
"emit_java_classes",
")",
"java_class_dir",
".",
"emitter",
"=",
"emit_java_classes",
"env",
".",
"AddMethod",
"(",
"Java",
")",
"env",
"[",
"'JAVAC'",
"]",
"=",
"'javac'",
"env",
"[",
"'JAVACFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
"env",
"[",
"'JAVABOOTCLASSPATH'",
"]",
"=",
"[",
"]",
"env",
"[",
"'JAVACLASSPATH'",
"]",
"=",
"[",
"]",
"env",
"[",
"'JAVASOURCEPATH'",
"]",
"=",
"[",
"]",
"env",
"[",
"'_javapathopt'",
"]",
"=",
"pathopt",
"env",
"[",
"'_JAVABOOTCLASSPATH'",
"]",
"=",
"'${_javapathopt(\"-bootclasspath\", \"JAVABOOTCLASSPATH\")} '",
"env",
"[",
"'_JAVACLASSPATH'",
"]",
"=",
"'${_javapathopt(\"-classpath\", \"JAVACLASSPATH\")} '",
"env",
"[",
"'_JAVASOURCEPATH'",
"]",
"=",
"'${_javapathopt(\"-sourcepath\", \"JAVASOURCEPATH\", \"_JAVASOURCEPATHDEFAULT\")} '",
"env",
"[",
"'_JAVASOURCEPATHDEFAULT'",
"]",
"=",
"'${TARGET.attributes.java_sourcedir}'",
"env",
"[",
"'_JAVACCOM'",
"]",
"=",
"'$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES'",
"env",
"[",
"'JAVACCOM'",
"]",
"=",
"\"${TEMPFILE('$_JAVACCOM','$JAVACCOMSTR')}\"",
"env",
"[",
"'JAVACLASSSUFFIX'",
"]",
"=",
"'.class'",
"env",
"[",
"'JAVASUFFIX'",
"]",
"=",
"'.java'"
] | 57.2 | 25.2 |
def operation(self, other, function, **kwargs):
"""Calculate "elementwise" operation either between this TimeSeries
and another one, i.e.
operation(t) = function(self(t), other(t))
or between this timeseries and a constant:
operation(t) = function(self(t), other)
If it's another time series, the measurement times in the
resulting TimeSeries will be the union of the sets of
measurement times of the input time series. If it's a
constant, the measurement times will not change.
"""
result = TimeSeries(**kwargs)
if isinstance(other, TimeSeries):
for time, value in self:
result[time] = function(value, other[time])
for time, value in other:
result[time] = function(self[time], value)
else:
for time, value in self:
result[time] = function(value, other)
return result
|
[
"def",
"operation",
"(",
"self",
",",
"other",
",",
"function",
",",
"*",
"*",
"kwargs",
")",
":",
"result",
"=",
"TimeSeries",
"(",
"*",
"*",
"kwargs",
")",
"if",
"isinstance",
"(",
"other",
",",
"TimeSeries",
")",
":",
"for",
"time",
",",
"value",
"in",
"self",
":",
"result",
"[",
"time",
"]",
"=",
"function",
"(",
"value",
",",
"other",
"[",
"time",
"]",
")",
"for",
"time",
",",
"value",
"in",
"other",
":",
"result",
"[",
"time",
"]",
"=",
"function",
"(",
"self",
"[",
"time",
"]",
",",
"value",
")",
"else",
":",
"for",
"time",
",",
"value",
"in",
"self",
":",
"result",
"[",
"time",
"]",
"=",
"function",
"(",
"value",
",",
"other",
")",
"return",
"result"
] | 36.269231 | 16.884615 |
def to_networkx(self, names_only=False):
"""Create a NetworkX graph representing the hierarchy of a Compound.
Parameters
----------
names_only : bool, optional, default=False Store only the names of the
compounds in the graph. When set to False, the default behavior,
the nodes are the compounds themselves.
Returns
-------
G : networkx.DiGraph
"""
nx = import_('networkx')
nodes = list()
edges = list()
if names_only:
nodes.append(self.name)
else:
nodes.append(self)
nodes, edges = self._iterate_children(nodes, edges, names_only=names_only)
graph = nx.DiGraph()
graph.add_nodes_from(nodes)
graph.add_edges_from(edges)
return graph
|
[
"def",
"to_networkx",
"(",
"self",
",",
"names_only",
"=",
"False",
")",
":",
"nx",
"=",
"import_",
"(",
"'networkx'",
")",
"nodes",
"=",
"list",
"(",
")",
"edges",
"=",
"list",
"(",
")",
"if",
"names_only",
":",
"nodes",
".",
"append",
"(",
"self",
".",
"name",
")",
"else",
":",
"nodes",
".",
"append",
"(",
"self",
")",
"nodes",
",",
"edges",
"=",
"self",
".",
"_iterate_children",
"(",
"nodes",
",",
"edges",
",",
"names_only",
"=",
"names_only",
")",
"graph",
"=",
"nx",
".",
"DiGraph",
"(",
")",
"graph",
".",
"add_nodes_from",
"(",
"nodes",
")",
"graph",
".",
"add_edges_from",
"(",
"edges",
")",
"return",
"graph"
] | 29.703704 | 19.962963 |
def down(removekeys=False, tgt='*', tgt_type='glob', timeout=None, gather_job_timeout=None):
'''
.. versionchanged:: 2017.7.0
The ``expr_form`` argument has been renamed to ``tgt_type``, earlier
releases must use ``expr_form``.
Print a list of all the down or unresponsive salt minions
Optionally remove keys of down minions
CLI Example:
.. code-block:: bash
salt-run manage.down
salt-run manage.down removekeys=True
salt-run manage.down tgt="webservers" tgt_type="nodegroup"
'''
ret = status(output=False,
tgt=tgt,
tgt_type=tgt_type,
timeout=timeout,
gather_job_timeout=gather_job_timeout
).get('down', [])
for minion in ret:
if removekeys:
wheel = salt.wheel.Wheel(__opts__)
wheel.call_func('key.delete', match=minion)
return ret
|
[
"def",
"down",
"(",
"removekeys",
"=",
"False",
",",
"tgt",
"=",
"'*'",
",",
"tgt_type",
"=",
"'glob'",
",",
"timeout",
"=",
"None",
",",
"gather_job_timeout",
"=",
"None",
")",
":",
"ret",
"=",
"status",
"(",
"output",
"=",
"False",
",",
"tgt",
"=",
"tgt",
",",
"tgt_type",
"=",
"tgt_type",
",",
"timeout",
"=",
"timeout",
",",
"gather_job_timeout",
"=",
"gather_job_timeout",
")",
".",
"get",
"(",
"'down'",
",",
"[",
"]",
")",
"for",
"minion",
"in",
"ret",
":",
"if",
"removekeys",
":",
"wheel",
"=",
"salt",
".",
"wheel",
".",
"Wheel",
"(",
"__opts__",
")",
"wheel",
".",
"call_func",
"(",
"'key.delete'",
",",
"match",
"=",
"minion",
")",
"return",
"ret"
] | 30.758621 | 21.37931 |
def trigger(self, event):
"""AlertManager event handler for incoming events
:param event with incoming AlertManager message
"""
topic = event.data.get('topic', None)
if topic is None:
self.log('No alert topic to trigger', lvl=warn)
return
alert = {
'topic': topic,
'message': event.data.get('msg', 'Alert has been triggered'),
'role': event.data.get('role', 'all')
}
self._trigger(event, alert)
|
[
"def",
"trigger",
"(",
"self",
",",
"event",
")",
":",
"topic",
"=",
"event",
".",
"data",
".",
"get",
"(",
"'topic'",
",",
"None",
")",
"if",
"topic",
"is",
"None",
":",
"self",
".",
"log",
"(",
"'No alert topic to trigger'",
",",
"lvl",
"=",
"warn",
")",
"return",
"alert",
"=",
"{",
"'topic'",
":",
"topic",
",",
"'message'",
":",
"event",
".",
"data",
".",
"get",
"(",
"'msg'",
",",
"'Alert has been triggered'",
")",
",",
"'role'",
":",
"event",
".",
"data",
".",
"get",
"(",
"'role'",
",",
"'all'",
")",
"}",
"self",
".",
"_trigger",
"(",
"event",
",",
"alert",
")"
] | 28.055556 | 20.277778 |
def set_element(self, row, col, value):
"""
Sets the float value at the specified location.
:param row: the 0-based index of the row
:type row: int
:param col: the 0-based index of the column
:type col: int
:param value: the float value for that cell
:type value: float
"""
javabridge.call(
self.jobject, "setElement", "(IID)V", row, col, value)
|
[
"def",
"set_element",
"(",
"self",
",",
"row",
",",
"col",
",",
"value",
")",
":",
"javabridge",
".",
"call",
"(",
"self",
".",
"jobject",
",",
"\"setElement\"",
",",
"\"(IID)V\"",
",",
"row",
",",
"col",
",",
"value",
")"
] | 32.769231 | 13.692308 |
def rmon_alarm_entry_snmp_oid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
rmon = ET.SubElement(config, "rmon", xmlns="urn:brocade.com:mgmt:brocade-rmon")
alarm_entry = ET.SubElement(rmon, "alarm-entry")
alarm_index_key = ET.SubElement(alarm_entry, "alarm-index")
alarm_index_key.text = kwargs.pop('alarm_index')
snmp_oid = ET.SubElement(alarm_entry, "snmp-oid")
snmp_oid.text = kwargs.pop('snmp_oid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
[
"def",
"rmon_alarm_entry_snmp_oid",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"config",
"=",
"ET",
".",
"Element",
"(",
"\"config\"",
")",
"rmon",
"=",
"ET",
".",
"SubElement",
"(",
"config",
",",
"\"rmon\"",
",",
"xmlns",
"=",
"\"urn:brocade.com:mgmt:brocade-rmon\"",
")",
"alarm_entry",
"=",
"ET",
".",
"SubElement",
"(",
"rmon",
",",
"\"alarm-entry\"",
")",
"alarm_index_key",
"=",
"ET",
".",
"SubElement",
"(",
"alarm_entry",
",",
"\"alarm-index\"",
")",
"alarm_index_key",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'alarm_index'",
")",
"snmp_oid",
"=",
"ET",
".",
"SubElement",
"(",
"alarm_entry",
",",
"\"snmp-oid\"",
")",
"snmp_oid",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'snmp_oid'",
")",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
",",
"self",
".",
"_callback",
")",
"return",
"callback",
"(",
"config",
")"
] | 44.692308 | 15.692308 |
def _read_configuration_file(self, path):
"""Try to read and parse `path` as a configuration file.
If the configurations were illegal (checked with
`self._validate_options`), raises `IllegalConfiguration`.
Returns (options, should_inherit).
"""
parser = RawConfigParser(inline_comment_prefixes=('#', ';'))
options = None
should_inherit = True
if parser.read(path) and self._get_section_name(parser):
all_options = self._parser.option_list[:]
for group in self._parser.option_groups:
all_options.extend(group.option_list)
option_list = {o.dest: o.type or o.action
for o in all_options}
# First, read the default values
new_options, _ = self._parse_args([])
# Second, parse the configuration
section_name = self._get_section_name(parser)
for opt in parser.options(section_name):
if opt == 'inherit':
should_inherit = parser.getboolean(section_name, opt)
continue
if opt.replace('_', '-') not in self.CONFIG_FILE_OPTIONS:
log.warning("Unknown option '{}' ignored".format(opt))
continue
normalized_opt = opt.replace('-', '_')
opt_type = option_list[normalized_opt]
if opt_type in ('int', 'count'):
value = parser.getint(section_name, opt)
elif opt_type == 'string':
value = parser.get(section_name, opt)
else:
assert opt_type in ('store_true', 'store_false')
value = parser.getboolean(section_name, opt)
setattr(new_options, normalized_opt, value)
# Third, fix the set-options
options = self._fix_set_options(new_options)
if options is not None:
if not self._validate_options(options):
raise IllegalConfiguration('in file: {}'.format(path))
return options, should_inherit
|
[
"def",
"_read_configuration_file",
"(",
"self",
",",
"path",
")",
":",
"parser",
"=",
"RawConfigParser",
"(",
"inline_comment_prefixes",
"=",
"(",
"'#'",
",",
"';'",
")",
")",
"options",
"=",
"None",
"should_inherit",
"=",
"True",
"if",
"parser",
".",
"read",
"(",
"path",
")",
"and",
"self",
".",
"_get_section_name",
"(",
"parser",
")",
":",
"all_options",
"=",
"self",
".",
"_parser",
".",
"option_list",
"[",
":",
"]",
"for",
"group",
"in",
"self",
".",
"_parser",
".",
"option_groups",
":",
"all_options",
".",
"extend",
"(",
"group",
".",
"option_list",
")",
"option_list",
"=",
"{",
"o",
".",
"dest",
":",
"o",
".",
"type",
"or",
"o",
".",
"action",
"for",
"o",
"in",
"all_options",
"}",
"# First, read the default values",
"new_options",
",",
"_",
"=",
"self",
".",
"_parse_args",
"(",
"[",
"]",
")",
"# Second, parse the configuration",
"section_name",
"=",
"self",
".",
"_get_section_name",
"(",
"parser",
")",
"for",
"opt",
"in",
"parser",
".",
"options",
"(",
"section_name",
")",
":",
"if",
"opt",
"==",
"'inherit'",
":",
"should_inherit",
"=",
"parser",
".",
"getboolean",
"(",
"section_name",
",",
"opt",
")",
"continue",
"if",
"opt",
".",
"replace",
"(",
"'_'",
",",
"'-'",
")",
"not",
"in",
"self",
".",
"CONFIG_FILE_OPTIONS",
":",
"log",
".",
"warning",
"(",
"\"Unknown option '{}' ignored\"",
".",
"format",
"(",
"opt",
")",
")",
"continue",
"normalized_opt",
"=",
"opt",
".",
"replace",
"(",
"'-'",
",",
"'_'",
")",
"opt_type",
"=",
"option_list",
"[",
"normalized_opt",
"]",
"if",
"opt_type",
"in",
"(",
"'int'",
",",
"'count'",
")",
":",
"value",
"=",
"parser",
".",
"getint",
"(",
"section_name",
",",
"opt",
")",
"elif",
"opt_type",
"==",
"'string'",
":",
"value",
"=",
"parser",
".",
"get",
"(",
"section_name",
",",
"opt",
")",
"else",
":",
"assert",
"opt_type",
"in",
"(",
"'store_true'",
",",
"'store_false'",
")",
"value",
"=",
"parser",
".",
"getboolean",
"(",
"section_name",
",",
"opt",
")",
"setattr",
"(",
"new_options",
",",
"normalized_opt",
",",
"value",
")",
"# Third, fix the set-options",
"options",
"=",
"self",
".",
"_fix_set_options",
"(",
"new_options",
")",
"if",
"options",
"is",
"not",
"None",
":",
"if",
"not",
"self",
".",
"_validate_options",
"(",
"options",
")",
":",
"raise",
"IllegalConfiguration",
"(",
"'in file: {}'",
".",
"format",
"(",
"path",
")",
")",
"return",
"options",
",",
"should_inherit"
] | 38.851852 | 19.944444 |
def get(tree, name):
""" Return a float value attribute NAME from TREE.
"""
if name in tree:
value = tree[name]
else:
return float("nan")
try:
a = float(value)
except ValueError:
a = float("nan")
return a
|
[
"def",
"get",
"(",
"tree",
",",
"name",
")",
":",
"if",
"name",
"in",
"tree",
":",
"value",
"=",
"tree",
"[",
"name",
"]",
"else",
":",
"return",
"float",
"(",
"\"nan\"",
")",
"try",
":",
"a",
"=",
"float",
"(",
"value",
")",
"except",
"ValueError",
":",
"a",
"=",
"float",
"(",
"\"nan\"",
")",
"return",
"a"
] | 21.083333 | 17.333333 |
def download_as_pem(
base_url=d1_common.const.URL_DATAONE_ROOT,
timeout_sec=d1_common.const.DEFAULT_HTTP_TIMEOUT,
):
"""Download public certificate from a TLS/SSL web server as PEM encoded string.
Also see download_as_der().
Args:
base_url : str
A full URL to a DataONE service endpoint or a server hostname
timeout_sec : int or float
Timeout for the SSL socket operations
Returns:
str: The certificate as a PEM encoded string.
"""
return ssl.DER_cert_to_PEM_cert(download_as_der(base_url, timeout_sec))
|
[
"def",
"download_as_pem",
"(",
"base_url",
"=",
"d1_common",
".",
"const",
".",
"URL_DATAONE_ROOT",
",",
"timeout_sec",
"=",
"d1_common",
".",
"const",
".",
"DEFAULT_HTTP_TIMEOUT",
",",
")",
":",
"return",
"ssl",
".",
"DER_cert_to_PEM_cert",
"(",
"download_as_der",
"(",
"base_url",
",",
"timeout_sec",
")",
")"
] | 29.631579 | 21.736842 |
def _convert_to(maybe_device, convert_to):
'''
Convert a device name, UUID or LABEL to a device name, UUID or
LABEL.
Return the fs_spec required for fstab.
'''
# Fast path. If we already have the information required, we can
# save one blkid call
if not convert_to or \
(convert_to == 'device' and maybe_device.startswith('/')) or \
maybe_device.startswith('{}='.format(convert_to.upper())):
return maybe_device
# Get the device information
if maybe_device.startswith('/'):
blkid = __salt__['disk.blkid'](maybe_device)
else:
blkid = __salt__['disk.blkid'](token=maybe_device)
result = None
if len(blkid) == 1:
if convert_to == 'device':
result = list(blkid.keys())[0]
else:
key = convert_to.upper()
result = '{}={}'.format(key, list(blkid.values())[0][key])
return result
|
[
"def",
"_convert_to",
"(",
"maybe_device",
",",
"convert_to",
")",
":",
"# Fast path. If we already have the information required, we can",
"# save one blkid call",
"if",
"not",
"convert_to",
"or",
"(",
"convert_to",
"==",
"'device'",
"and",
"maybe_device",
".",
"startswith",
"(",
"'/'",
")",
")",
"or",
"maybe_device",
".",
"startswith",
"(",
"'{}='",
".",
"format",
"(",
"convert_to",
".",
"upper",
"(",
")",
")",
")",
":",
"return",
"maybe_device",
"# Get the device information",
"if",
"maybe_device",
".",
"startswith",
"(",
"'/'",
")",
":",
"blkid",
"=",
"__salt__",
"[",
"'disk.blkid'",
"]",
"(",
"maybe_device",
")",
"else",
":",
"blkid",
"=",
"__salt__",
"[",
"'disk.blkid'",
"]",
"(",
"token",
"=",
"maybe_device",
")",
"result",
"=",
"None",
"if",
"len",
"(",
"blkid",
")",
"==",
"1",
":",
"if",
"convert_to",
"==",
"'device'",
":",
"result",
"=",
"list",
"(",
"blkid",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"else",
":",
"key",
"=",
"convert_to",
".",
"upper",
"(",
")",
"result",
"=",
"'{}={}'",
".",
"format",
"(",
"key",
",",
"list",
"(",
"blkid",
".",
"values",
"(",
")",
")",
"[",
"0",
"]",
"[",
"key",
"]",
")",
"return",
"result"
] | 28.806452 | 22.419355 |
def princomp(x):
"""Determine the principal components of a vector of measurements
Determine the principal components of a vector of measurements
x should be a M x N numpy array composed of M observations of n variables
The output is:
coeffs - the NxN correlation matrix that can be used to transform x into its components
The code for this function is based on "A Tutorial on Principal Component
Analysis", Shlens, 2005 http://www.snl.salk.edu/~shlens/pub/notes/pca.pdf
(unpublished)
"""
(M,N) = x.shape
Mean = x.mean(0)
y = x - Mean
cov = numpy.dot(y.transpose(),y) / (M-1)
(V,PC) = numpy.linalg.eig(cov)
order = (-V).argsort()
coeff = PC[:,order]
return coeff
|
[
"def",
"princomp",
"(",
"x",
")",
":",
"(",
"M",
",",
"N",
")",
"=",
"x",
".",
"shape",
"Mean",
"=",
"x",
".",
"mean",
"(",
"0",
")",
"y",
"=",
"x",
"-",
"Mean",
"cov",
"=",
"numpy",
".",
"dot",
"(",
"y",
".",
"transpose",
"(",
")",
",",
"y",
")",
"/",
"(",
"M",
"-",
"1",
")",
"(",
"V",
",",
"PC",
")",
"=",
"numpy",
".",
"linalg",
".",
"eig",
"(",
"cov",
")",
"order",
"=",
"(",
"-",
"V",
")",
".",
"argsort",
"(",
")",
"coeff",
"=",
"PC",
"[",
":",
",",
"order",
"]",
"return",
"coeff"
] | 35.142857 | 23.238095 |
def TryAction(self, action, text = None, extension = ""):
"""Tries to execute the given action with optional source file
contents <text> and optional source file extension <extension>,
Returns the status (0 : failed, 1 : ok) and the contents of the
output file.
"""
builder = SCons.Builder.Builder(action=action)
self.env.Append( BUILDERS = {'SConfActionBuilder' : builder} )
ok = self.TryBuild(self.env.SConfActionBuilder, text, extension)
del self.env['BUILDERS']['SConfActionBuilder']
if ok:
outputStr = self.lastTarget.get_contents().decode()
return (1, outputStr)
return (0, "")
|
[
"def",
"TryAction",
"(",
"self",
",",
"action",
",",
"text",
"=",
"None",
",",
"extension",
"=",
"\"\"",
")",
":",
"builder",
"=",
"SCons",
".",
"Builder",
".",
"Builder",
"(",
"action",
"=",
"action",
")",
"self",
".",
"env",
".",
"Append",
"(",
"BUILDERS",
"=",
"{",
"'SConfActionBuilder'",
":",
"builder",
"}",
")",
"ok",
"=",
"self",
".",
"TryBuild",
"(",
"self",
".",
"env",
".",
"SConfActionBuilder",
",",
"text",
",",
"extension",
")",
"del",
"self",
".",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"'SConfActionBuilder'",
"]",
"if",
"ok",
":",
"outputStr",
"=",
"self",
".",
"lastTarget",
".",
"get_contents",
"(",
")",
".",
"decode",
"(",
")",
"return",
"(",
"1",
",",
"outputStr",
")",
"return",
"(",
"0",
",",
"\"\"",
")"
] | 48.714286 | 18.785714 |
def join(self):
"""Note that the Executor must be close()'d elsewhere,
or join() will never return.
"""
self.inputfeeder_thread.join()
self.pool.join()
self.resulttracker_thread.join()
self.failuretracker_thread.join()
|
[
"def",
"join",
"(",
"self",
")",
":",
"self",
".",
"inputfeeder_thread",
".",
"join",
"(",
")",
"self",
".",
"pool",
".",
"join",
"(",
")",
"self",
".",
"resulttracker_thread",
".",
"join",
"(",
")",
"self",
".",
"failuretracker_thread",
".",
"join",
"(",
")"
] | 28.125 | 9.5 |
def rename(self, from_path, to_path):
"""
Rename file.
:type from_path: str
:param from_path: the path of the source file
:type to_path: str
:param to_path: the path of the destination file
:raises: :exc:`~exceptions.IOError`
"""
_complain_ifclosed(self.closed)
return self.fs.rename(from_path, to_path)
|
[
"def",
"rename",
"(",
"self",
",",
"from_path",
",",
"to_path",
")",
":",
"_complain_ifclosed",
"(",
"self",
".",
"closed",
")",
"return",
"self",
".",
"fs",
".",
"rename",
"(",
"from_path",
",",
"to_path",
")"
] | 31.083333 | 10.916667 |
def _get_format_callable(term, color, back_color):
"""Get string-coloring callable
Get callable for string output using ``color`` on ``back_color``
on ``term``
:param term: blessings.Terminal instance
:param color: Color that callable will color the string it's passed
:param back_color: Back color for the string
:returns: callable(s: str) -> str
"""
if isinstance(color, str):
ensure(
any(isinstance(back_color, t) for t in [str, type(None)]),
TypeError,
"back_color must be a str or NoneType"
)
if back_color:
return getattr(term, "_".join(
[color, "on", back_color]
))
elif back_color is None:
return getattr(term, color)
elif isinstance(color, int):
return term.on_color(color)
else:
raise TypeError("Invalid type {} for color".format(
type(color)
))
|
[
"def",
"_get_format_callable",
"(",
"term",
",",
"color",
",",
"back_color",
")",
":",
"if",
"isinstance",
"(",
"color",
",",
"str",
")",
":",
"ensure",
"(",
"any",
"(",
"isinstance",
"(",
"back_color",
",",
"t",
")",
"for",
"t",
"in",
"[",
"str",
",",
"type",
"(",
"None",
")",
"]",
")",
",",
"TypeError",
",",
"\"back_color must be a str or NoneType\"",
")",
"if",
"back_color",
":",
"return",
"getattr",
"(",
"term",
",",
"\"_\"",
".",
"join",
"(",
"[",
"color",
",",
"\"on\"",
",",
"back_color",
"]",
")",
")",
"elif",
"back_color",
"is",
"None",
":",
"return",
"getattr",
"(",
"term",
",",
"color",
")",
"elif",
"isinstance",
"(",
"color",
",",
"int",
")",
":",
"return",
"term",
".",
"on_color",
"(",
"color",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"Invalid type {} for color\"",
".",
"format",
"(",
"type",
"(",
"color",
")",
")",
")"
] | 35.758621 | 15.827586 |
def maximum(left, right):
"""Returns element-wise maximum of the input elements.
Both inputs can be Symbol or scalar number. Broadcasting is not supported.
Parameters
---------
left : Symbol or scalar
First symbol to be compared.
right : Symbol or scalar
Second symbol to be compared.
Returns
-------
Symbol or scalar
The element-wise maximum of the input symbols.
Examples
--------
>>> mx.sym.maximum(2, 3.5)
3.5
>>> x = mx.sym.Variable('x')
>>> y = mx.sym.Variable('y')
>>> z = mx.sym.maximum(x, 4)
>>> z.eval(x=mx.nd.array([3,5,2,10]))[0].asnumpy()
array([ 4., 5., 4., 10.], dtype=float32)
>>> z = mx.sym.maximum(x, y)
>>> z.eval(x=mx.nd.array([3,4]), y=mx.nd.array([10,2]))[0].asnumpy()
array([ 10., 4.], dtype=float32)
"""
if isinstance(left, Symbol) and isinstance(right, Symbol):
return _internal._Maximum(left, right)
if isinstance(left, Symbol) and isinstance(right, Number):
return _internal._MaximumScalar(left, scalar=right)
if isinstance(left, Number) and isinstance(right, Symbol):
return _internal._MaximumScalar(right, scalar=left)
if isinstance(left, Number) and isinstance(right, Number):
return left if left > right else right
else:
raise TypeError('types (%s, %s) not supported' % (str(type(left)), str(type(right))))
|
[
"def",
"maximum",
"(",
"left",
",",
"right",
")",
":",
"if",
"isinstance",
"(",
"left",
",",
"Symbol",
")",
"and",
"isinstance",
"(",
"right",
",",
"Symbol",
")",
":",
"return",
"_internal",
".",
"_Maximum",
"(",
"left",
",",
"right",
")",
"if",
"isinstance",
"(",
"left",
",",
"Symbol",
")",
"and",
"isinstance",
"(",
"right",
",",
"Number",
")",
":",
"return",
"_internal",
".",
"_MaximumScalar",
"(",
"left",
",",
"scalar",
"=",
"right",
")",
"if",
"isinstance",
"(",
"left",
",",
"Number",
")",
"and",
"isinstance",
"(",
"right",
",",
"Symbol",
")",
":",
"return",
"_internal",
".",
"_MaximumScalar",
"(",
"right",
",",
"scalar",
"=",
"left",
")",
"if",
"isinstance",
"(",
"left",
",",
"Number",
")",
"and",
"isinstance",
"(",
"right",
",",
"Number",
")",
":",
"return",
"left",
"if",
"left",
">",
"right",
"else",
"right",
"else",
":",
"raise",
"TypeError",
"(",
"'types (%s, %s) not supported'",
"%",
"(",
"str",
"(",
"type",
"(",
"left",
")",
")",
",",
"str",
"(",
"type",
"(",
"right",
")",
")",
")",
")"
] | 34.55 | 20.025 |
def dedent(text):
"""Equivalent of textwrap.dedent that ignores unindented first line.
This means it will still dedent strings like:
'''foo
is a bar
'''
For use in wrap_paragraphs.
"""
if text.startswith('\n'):
# text starts with blank line, don't ignore the first line
return textwrap.dedent(text)
# split first line
splits = text.split('\n',1)
if len(splits) == 1:
# only one line
return textwrap.dedent(text)
first, rest = splits
# dedent everything but the first line
rest = textwrap.dedent(rest)
return '\n'.join([first, rest])
|
[
"def",
"dedent",
"(",
"text",
")",
":",
"if",
"text",
".",
"startswith",
"(",
"'\\n'",
")",
":",
"# text starts with blank line, don't ignore the first line",
"return",
"textwrap",
".",
"dedent",
"(",
"text",
")",
"# split first line",
"splits",
"=",
"text",
".",
"split",
"(",
"'\\n'",
",",
"1",
")",
"if",
"len",
"(",
"splits",
")",
"==",
"1",
":",
"# only one line",
"return",
"textwrap",
".",
"dedent",
"(",
"text",
")",
"first",
",",
"rest",
"=",
"splits",
"# dedent everything but the first line",
"rest",
"=",
"textwrap",
".",
"dedent",
"(",
"rest",
")",
"return",
"'\\n'",
".",
"join",
"(",
"[",
"first",
",",
"rest",
"]",
")"
] | 24.2 | 18.72 |
def sandbox(cls, path):
'''Ensures path exists before yielding, cleans up after'''
# Ensure the path exists and is clean
try:
os.makedirs(path)
logger.debug('Making %s' % path)
except OSError:
if not os.path.isdir(path):
raise
finally:
cls.clean(path)
# Then yield, but make sure to clean up the directory afterwards
try:
yield
finally:
cls.clean(path)
|
[
"def",
"sandbox",
"(",
"cls",
",",
"path",
")",
":",
"# Ensure the path exists and is clean",
"try",
":",
"os",
".",
"makedirs",
"(",
"path",
")",
"logger",
".",
"debug",
"(",
"'Making %s'",
"%",
"path",
")",
"except",
"OSError",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"raise",
"finally",
":",
"cls",
".",
"clean",
"(",
"path",
")",
"# Then yield, but make sure to clean up the directory afterwards",
"try",
":",
"yield",
"finally",
":",
"cls",
".",
"clean",
"(",
"path",
")"
] | 30.5625 | 17.8125 |
def named_entity_labels(self):
"""The named entity labels without BIO prefixes."""
if not self.is_tagged(NAMED_ENTITIES):
self.tag_named_entities()
return [ne[LABEL] for ne in self[NAMED_ENTITIES]]
|
[
"def",
"named_entity_labels",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"is_tagged",
"(",
"NAMED_ENTITIES",
")",
":",
"self",
".",
"tag_named_entities",
"(",
")",
"return",
"[",
"ne",
"[",
"LABEL",
"]",
"for",
"ne",
"in",
"self",
"[",
"NAMED_ENTITIES",
"]",
"]"
] | 45.8 | 7.2 |
def definition(
self,
text,
definition):
"""*genarate a MMD definition*
**Key Arguments:**
- ``text`` -- the text to define
- ``definition`` -- the definition
**Return:**
- ``definition`` -- the MMD style definition
**Usage:**
To genarate a MMD definition:
.. code-block:: python
text = \"\"\"Pomaceous fruit of plants of the genus Malus in the family Rosaceae.
Also the makers of really great products.\"\"\"
definition = md.definition("Apple", text)
print definition
# OUTPUT:
# Apple
# : Pomaceous fruit of plants of the genus Malus in the family Rosaceae.
# Also the makers of really great products.
#
"""
text = text.strip()
definition = definition.strip()
regex = re.compile(r'\n(\S)')
definition = regex.sub("\n \g<1>", definition)
return "%(text)s\n: %(definition)s" % locals()
|
[
"def",
"definition",
"(",
"self",
",",
"text",
",",
"definition",
")",
":",
"text",
"=",
"text",
".",
"strip",
"(",
")",
"definition",
"=",
"definition",
".",
"strip",
"(",
")",
"regex",
"=",
"re",
".",
"compile",
"(",
"r'\\n(\\S)'",
")",
"definition",
"=",
"regex",
".",
"sub",
"(",
"\"\\n \\g<1>\"",
",",
"definition",
")",
"return",
"\"%(text)s\\n: %(definition)s\"",
"%",
"locals",
"(",
")"
] | 29.513514 | 22.243243 |
def save_image(self, image_file):
"""
Saves the image file to disk.
"""
self.ensure_pyplot()
command = 'plt.gcf().savefig("%s")'%image_file
#print 'SAVEFIG', command # dbg
self.process_input_line('bookmark ipy_thisdir', store_history=False)
self.process_input_line('cd -b ipy_savedir', store_history=False)
self.process_input_line(command, store_history=False)
self.process_input_line('cd -b ipy_thisdir', store_history=False)
self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
self.clear_cout()
|
[
"def",
"save_image",
"(",
"self",
",",
"image_file",
")",
":",
"self",
".",
"ensure_pyplot",
"(",
")",
"command",
"=",
"'plt.gcf().savefig(\"%s\")'",
"%",
"image_file",
"#print 'SAVEFIG', command # dbg",
"self",
".",
"process_input_line",
"(",
"'bookmark ipy_thisdir'",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"process_input_line",
"(",
"'cd -b ipy_savedir'",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"process_input_line",
"(",
"command",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"process_input_line",
"(",
"'cd -b ipy_thisdir'",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"process_input_line",
"(",
"'bookmark -d ipy_thisdir'",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"clear_cout",
"(",
")"
] | 46.230769 | 16.384615 |
def ensure_object_is_ndarray(item, title):
"""
Ensures that a given mapping matrix is a dense numpy array. Raises a
helpful TypeError if otherwise.
"""
assert isinstance(title, str)
if not isinstance(item, np.ndarray):
msg = "{} must be a np.ndarray. {} passed instead."
raise TypeError(msg.format(title, type(item)))
return None
|
[
"def",
"ensure_object_is_ndarray",
"(",
"item",
",",
"title",
")",
":",
"assert",
"isinstance",
"(",
"title",
",",
"str",
")",
"if",
"not",
"isinstance",
"(",
"item",
",",
"np",
".",
"ndarray",
")",
":",
"msg",
"=",
"\"{} must be a np.ndarray. {} passed instead.\"",
"raise",
"TypeError",
"(",
"msg",
".",
"format",
"(",
"title",
",",
"type",
"(",
"item",
")",
")",
")",
"return",
"None"
] | 30.333333 | 15.333333 |
def executions(self) -> List[Execution]:
"""
List of all executions from this session.
"""
return list(fill.execution for fill in self.wrapper.fills.values())
|
[
"def",
"executions",
"(",
"self",
")",
"->",
"List",
"[",
"Execution",
"]",
":",
"return",
"list",
"(",
"fill",
".",
"execution",
"for",
"fill",
"in",
"self",
".",
"wrapper",
".",
"fills",
".",
"values",
"(",
")",
")"
] | 37.2 | 8.8 |
def render_output_json(self, graph_data, output_file=None):
"""Write model data to file or stdout in JSON format."""
if output_file:
with open(output_file, 'wt') as json_output_f:
json.dump(graph_data, json_output_f)
else:
self.stdout.write(json.dumps(graph_data))
|
[
"def",
"render_output_json",
"(",
"self",
",",
"graph_data",
",",
"output_file",
"=",
"None",
")",
":",
"if",
"output_file",
":",
"with",
"open",
"(",
"output_file",
",",
"'wt'",
")",
"as",
"json_output_f",
":",
"json",
".",
"dump",
"(",
"graph_data",
",",
"json_output_f",
")",
"else",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"graph_data",
")",
")"
] | 46 | 15.142857 |
def start_daemon_thread(*args, **kwargs):
"""Starts a thread and marks it as a daemon thread."""
thread = threading.Thread(*args, **kwargs)
thread.daemon = True
thread.start()
return thread
|
[
"def",
"start_daemon_thread",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"thread",
"=",
"threading",
".",
"Thread",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"thread",
".",
"daemon",
"=",
"True",
"thread",
".",
"start",
"(",
")",
"return",
"thread"
] | 34 | 11.333333 |
def validate_version(err, value, source):
'Tests a manifest version number'
field_name = '<em:version>' if source == 'install.rdf' else 'version'
err.metadata['version'] = value
# May not be longer than 32 characters
if len(value) > 32:
err.error(
('metadata_helpers', '_test_version', 'too_long'),
'The value of {name} is too long'.format(name=field_name),
'Values supplied for {name} in the {source} file must be 32 '
'characters or less.'.format(name=field_name, source=source),
source)
# Must be a valid version number.
if not VERSION_PATTERN.match(value):
err.error(
('metadata_helpers', '_test_version', 'invalid_format'),
'The value of {name} is invalid'.format(name=field_name),
'The values supplied for version in the {source} file is not a '
'valid version string. It can only contain letters, numbers, and '
'the symbols +*.-_.'.format(name=field_name, source=source),
source)
|
[
"def",
"validate_version",
"(",
"err",
",",
"value",
",",
"source",
")",
":",
"field_name",
"=",
"'<em:version>'",
"if",
"source",
"==",
"'install.rdf'",
"else",
"'version'",
"err",
".",
"metadata",
"[",
"'version'",
"]",
"=",
"value",
"# May not be longer than 32 characters",
"if",
"len",
"(",
"value",
")",
">",
"32",
":",
"err",
".",
"error",
"(",
"(",
"'metadata_helpers'",
",",
"'_test_version'",
",",
"'too_long'",
")",
",",
"'The value of {name} is too long'",
".",
"format",
"(",
"name",
"=",
"field_name",
")",
",",
"'Values supplied for {name} in the {source} file must be 32 '",
"'characters or less.'",
".",
"format",
"(",
"name",
"=",
"field_name",
",",
"source",
"=",
"source",
")",
",",
"source",
")",
"# Must be a valid version number.",
"if",
"not",
"VERSION_PATTERN",
".",
"match",
"(",
"value",
")",
":",
"err",
".",
"error",
"(",
"(",
"'metadata_helpers'",
",",
"'_test_version'",
",",
"'invalid_format'",
")",
",",
"'The value of {name} is invalid'",
".",
"format",
"(",
"name",
"=",
"field_name",
")",
",",
"'The values supplied for version in the {source} file is not a '",
"'valid version string. It can only contain letters, numbers, and '",
"'the symbols +*.-_.'",
".",
"format",
"(",
"name",
"=",
"field_name",
",",
"source",
"=",
"source",
")",
",",
"source",
")"
] | 40.115385 | 24.269231 |
def _findlinestarts(code):
"""Find the offsets in a byte code which are start of lines in the source.
Generate pairs (offset, lineno) as described in Python/compile.c.
Arguments:
code: code object.
Yields:
Address and line number pairs.
"""
byte_increments = [ord(c) for c in code.co_lnotab[0::2]]
line_increments = [ord(c) for c in code.co_lnotab[1::2]]
lastlineno = None
lineno = code.co_firstlineno
addr = 0
for byte_incr, line_incr in zip(byte_increments, line_increments):
if byte_incr:
if lineno != lastlineno:
yield (addr, lineno)
lastlineno = lineno
addr += byte_incr
lineno += line_incr
if lineno != lastlineno:
yield (addr, lineno)
|
[
"def",
"_findlinestarts",
"(",
"code",
")",
":",
"byte_increments",
"=",
"[",
"ord",
"(",
"c",
")",
"for",
"c",
"in",
"code",
".",
"co_lnotab",
"[",
"0",
":",
":",
"2",
"]",
"]",
"line_increments",
"=",
"[",
"ord",
"(",
"c",
")",
"for",
"c",
"in",
"code",
".",
"co_lnotab",
"[",
"1",
":",
":",
"2",
"]",
"]",
"lastlineno",
"=",
"None",
"lineno",
"=",
"code",
".",
"co_firstlineno",
"addr",
"=",
"0",
"for",
"byte_incr",
",",
"line_incr",
"in",
"zip",
"(",
"byte_increments",
",",
"line_increments",
")",
":",
"if",
"byte_incr",
":",
"if",
"lineno",
"!=",
"lastlineno",
":",
"yield",
"(",
"addr",
",",
"lineno",
")",
"lastlineno",
"=",
"lineno",
"addr",
"+=",
"byte_incr",
"lineno",
"+=",
"line_incr",
"if",
"lineno",
"!=",
"lastlineno",
":",
"yield",
"(",
"addr",
",",
"lineno",
")"
] | 26.615385 | 20.423077 |
def calibrate(self,
dataset_id,
pre_launch_coeffs=False,
calib_coeffs=None):
"""Calibrate the data
"""
tic = datetime.now()
if calib_coeffs is None:
calib_coeffs = {}
units = {'reflectance': '%',
'brightness_temperature': 'K',
'counts': '',
'radiance': 'W*m-2*sr-1*cm ?'}
if dataset_id.name in ("3a", "3b") and self._is3b is None:
# Is it 3a or 3b:
is3b = np.expand_dims(
np.bitwise_and(
np.right_shift(self._data['scnlinbit'], 0), 1) == 1, 1)
self._is3b = np.repeat(is3b,
self._data['hrpt'][0].shape[0], axis=1)
try:
vis_idx = ['1', '2', '3a'].index(dataset_id.name)
ir_idx = None
except ValueError:
vis_idx = None
ir_idx = ['3b', '4', '5'].index(dataset_id.name)
if vis_idx is not None:
coeffs = calib_coeffs.get('ch' + dataset_id.name)
ds = create_xarray(
_vis_calibrate(self._data,
vis_idx,
dataset_id.calibration,
pre_launch_coeffs,
coeffs,
mask=(dataset_id.name == '3a' and self._is3b)))
else:
ds = create_xarray(
_ir_calibrate(self._header,
self._data,
ir_idx,
dataset_id.calibration,
mask=(dataset_id.name == '3b' and
np.logical_not(self._is3b))))
if dataset_id.name == '3a' and np.all(np.isnan(ds)):
raise ValueError("Empty dataset for channel 3A")
if dataset_id.name == '3b' and np.all(np.isnan(ds)):
raise ValueError("Empty dataset for channel 3B")
ds.attrs['units'] = units[dataset_id.calibration]
ds.attrs.update(dataset_id._asdict())
logger.debug("Calibration time %s", str(datetime.now() - tic))
return ds
|
[
"def",
"calibrate",
"(",
"self",
",",
"dataset_id",
",",
"pre_launch_coeffs",
"=",
"False",
",",
"calib_coeffs",
"=",
"None",
")",
":",
"tic",
"=",
"datetime",
".",
"now",
"(",
")",
"if",
"calib_coeffs",
"is",
"None",
":",
"calib_coeffs",
"=",
"{",
"}",
"units",
"=",
"{",
"'reflectance'",
":",
"'%'",
",",
"'brightness_temperature'",
":",
"'K'",
",",
"'counts'",
":",
"''",
",",
"'radiance'",
":",
"'W*m-2*sr-1*cm ?'",
"}",
"if",
"dataset_id",
".",
"name",
"in",
"(",
"\"3a\"",
",",
"\"3b\"",
")",
"and",
"self",
".",
"_is3b",
"is",
"None",
":",
"# Is it 3a or 3b:",
"is3b",
"=",
"np",
".",
"expand_dims",
"(",
"np",
".",
"bitwise_and",
"(",
"np",
".",
"right_shift",
"(",
"self",
".",
"_data",
"[",
"'scnlinbit'",
"]",
",",
"0",
")",
",",
"1",
")",
"==",
"1",
",",
"1",
")",
"self",
".",
"_is3b",
"=",
"np",
".",
"repeat",
"(",
"is3b",
",",
"self",
".",
"_data",
"[",
"'hrpt'",
"]",
"[",
"0",
"]",
".",
"shape",
"[",
"0",
"]",
",",
"axis",
"=",
"1",
")",
"try",
":",
"vis_idx",
"=",
"[",
"'1'",
",",
"'2'",
",",
"'3a'",
"]",
".",
"index",
"(",
"dataset_id",
".",
"name",
")",
"ir_idx",
"=",
"None",
"except",
"ValueError",
":",
"vis_idx",
"=",
"None",
"ir_idx",
"=",
"[",
"'3b'",
",",
"'4'",
",",
"'5'",
"]",
".",
"index",
"(",
"dataset_id",
".",
"name",
")",
"if",
"vis_idx",
"is",
"not",
"None",
":",
"coeffs",
"=",
"calib_coeffs",
".",
"get",
"(",
"'ch'",
"+",
"dataset_id",
".",
"name",
")",
"ds",
"=",
"create_xarray",
"(",
"_vis_calibrate",
"(",
"self",
".",
"_data",
",",
"vis_idx",
",",
"dataset_id",
".",
"calibration",
",",
"pre_launch_coeffs",
",",
"coeffs",
",",
"mask",
"=",
"(",
"dataset_id",
".",
"name",
"==",
"'3a'",
"and",
"self",
".",
"_is3b",
")",
")",
")",
"else",
":",
"ds",
"=",
"create_xarray",
"(",
"_ir_calibrate",
"(",
"self",
".",
"_header",
",",
"self",
".",
"_data",
",",
"ir_idx",
",",
"dataset_id",
".",
"calibration",
",",
"mask",
"=",
"(",
"dataset_id",
".",
"name",
"==",
"'3b'",
"and",
"np",
".",
"logical_not",
"(",
"self",
".",
"_is3b",
")",
")",
")",
")",
"if",
"dataset_id",
".",
"name",
"==",
"'3a'",
"and",
"np",
".",
"all",
"(",
"np",
".",
"isnan",
"(",
"ds",
")",
")",
":",
"raise",
"ValueError",
"(",
"\"Empty dataset for channel 3A\"",
")",
"if",
"dataset_id",
".",
"name",
"==",
"'3b'",
"and",
"np",
".",
"all",
"(",
"np",
".",
"isnan",
"(",
"ds",
")",
")",
":",
"raise",
"ValueError",
"(",
"\"Empty dataset for channel 3B\"",
")",
"ds",
".",
"attrs",
"[",
"'units'",
"]",
"=",
"units",
"[",
"dataset_id",
".",
"calibration",
"]",
"ds",
".",
"attrs",
".",
"update",
"(",
"dataset_id",
".",
"_asdict",
"(",
")",
")",
"logger",
".",
"debug",
"(",
"\"Calibration time %s\"",
",",
"str",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"tic",
")",
")",
"return",
"ds"
] | 36.216667 | 17.55 |
def get_extent(array, units, kpc_per_arcsec, xticks_manual, yticks_manual):
"""Get the extent of the dimensions of the array in the units of the figure (e.g. arc-seconds or kpc).
This is used to set the extent of the array and thus the y / x axis limits.
Parameters
-----------
array : data.array.scaled_array.ScaledArray
The 2D array of data which is plotted.
units : str
The units of the y / x axis of the plots, in arc-seconds ('arcsec') or kiloparsecs ('kpc').
kpc_per_arcsec : float
The conversion factor between arc-seconds and kiloparsecs, required to plot the units in kpc.
xticks_manual : [] or None
If input, the xticks do not use the array's default xticks but instead overwrite them as these values.
yticks_manual : [] or None
If input, the yticks do not use the array's default yticks but instead overwrite them as these values.
"""
if xticks_manual is not None and yticks_manual is not None:
return np.asarray([xticks_manual[0], xticks_manual[3], yticks_manual[0], yticks_manual[3]])
if units in 'pixels':
return np.asarray([0, array.shape[1], 0, array.shape[0]])
elif units in 'arcsec' or kpc_per_arcsec is None:
return np.asarray([array.arc_second_minima[1], array.arc_second_maxima[1],
array.arc_second_minima[0], array.arc_second_maxima[0]])
elif units in 'kpc':
return list(map(lambda tick : tick*kpc_per_arcsec,
np.asarray([array.arc_second_minima[1], array.arc_second_maxima[1],
array.arc_second_minima[0], array.arc_second_maxima[0]])))
else:
raise exc.PlottingException('The units supplied to the plotted are not a valid string (must be pixels | '
'arcsec | kpc)')
|
[
"def",
"get_extent",
"(",
"array",
",",
"units",
",",
"kpc_per_arcsec",
",",
"xticks_manual",
",",
"yticks_manual",
")",
":",
"if",
"xticks_manual",
"is",
"not",
"None",
"and",
"yticks_manual",
"is",
"not",
"None",
":",
"return",
"np",
".",
"asarray",
"(",
"[",
"xticks_manual",
"[",
"0",
"]",
",",
"xticks_manual",
"[",
"3",
"]",
",",
"yticks_manual",
"[",
"0",
"]",
",",
"yticks_manual",
"[",
"3",
"]",
"]",
")",
"if",
"units",
"in",
"'pixels'",
":",
"return",
"np",
".",
"asarray",
"(",
"[",
"0",
",",
"array",
".",
"shape",
"[",
"1",
"]",
",",
"0",
",",
"array",
".",
"shape",
"[",
"0",
"]",
"]",
")",
"elif",
"units",
"in",
"'arcsec'",
"or",
"kpc_per_arcsec",
"is",
"None",
":",
"return",
"np",
".",
"asarray",
"(",
"[",
"array",
".",
"arc_second_minima",
"[",
"1",
"]",
",",
"array",
".",
"arc_second_maxima",
"[",
"1",
"]",
",",
"array",
".",
"arc_second_minima",
"[",
"0",
"]",
",",
"array",
".",
"arc_second_maxima",
"[",
"0",
"]",
"]",
")",
"elif",
"units",
"in",
"'kpc'",
":",
"return",
"list",
"(",
"map",
"(",
"lambda",
"tick",
":",
"tick",
"*",
"kpc_per_arcsec",
",",
"np",
".",
"asarray",
"(",
"[",
"array",
".",
"arc_second_minima",
"[",
"1",
"]",
",",
"array",
".",
"arc_second_maxima",
"[",
"1",
"]",
",",
"array",
".",
"arc_second_minima",
"[",
"0",
"]",
",",
"array",
".",
"arc_second_maxima",
"[",
"0",
"]",
"]",
")",
")",
")",
"else",
":",
"raise",
"exc",
".",
"PlottingException",
"(",
"'The units supplied to the plotted are not a valid string (must be pixels | '",
"'arcsec | kpc)'",
")"
] | 55.272727 | 31.848485 |
def _get_pwm_values(self, brightness=None, color=None):
"""
Get the pwm values for a specific state of the led.
If a state argument is omitted, current value is used.
:param brightness: The brightness of the state.
:param color: The color of the state.
:return: The pwm values.
"""
if brightness is None:
brightness = self.brightness
if color is None:
color = self.color
return [(x / 255) * brightness for x in self._rgb_to_rgbw(color)]
|
[
"def",
"_get_pwm_values",
"(",
"self",
",",
"brightness",
"=",
"None",
",",
"color",
"=",
"None",
")",
":",
"if",
"brightness",
"is",
"None",
":",
"brightness",
"=",
"self",
".",
"brightness",
"if",
"color",
"is",
"None",
":",
"color",
"=",
"self",
".",
"color",
"return",
"[",
"(",
"x",
"/",
"255",
")",
"*",
"brightness",
"for",
"x",
"in",
"self",
".",
"_rgb_to_rgbw",
"(",
"color",
")",
"]"
] | 33 | 17 |
def is_parent_of_repository(self, id_, repository_id):
"""Tests if an ``Id`` is a direct parent of a repository.
arg: id (osid.id.Id): an ``Id``
arg: repository_id (osid.id.Id): the ``Id`` of a repository
return: (boolean) - ``true`` if this ``id`` is a parent of
``repository_id,`` ``false`` otherwise
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``id`` or ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
# Implemented from template for
# osid.resource.BinHierarchySession.is_parent_of_bin
if self._catalog_session is not None:
return self._catalog_session.is_parent_of_catalog(id_=id_, catalog_id=repository_id)
return self._hierarchy_session.is_parent(id_=repository_id, parent_id=id_)
|
[
"def",
"is_parent_of_repository",
"(",
"self",
",",
"id_",
",",
"repository_id",
")",
":",
"# Implemented from template for",
"# osid.resource.BinHierarchySession.is_parent_of_bin",
"if",
"self",
".",
"_catalog_session",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_catalog_session",
".",
"is_parent_of_catalog",
"(",
"id_",
"=",
"id_",
",",
"catalog_id",
"=",
"repository_id",
")",
"return",
"self",
".",
"_hierarchy_session",
".",
"is_parent",
"(",
"id_",
"=",
"repository_id",
",",
"parent_id",
"=",
"id_",
")"
] | 53.2 | 21.5 |
def add_cookies_to_web_driver(driver, cookies):
"""
Sets cookies in an existing WebDriver session.
"""
for cookie in cookies:
driver.add_cookie(convert_cookie_to_dict(cookie))
return driver
|
[
"def",
"add_cookies_to_web_driver",
"(",
"driver",
",",
"cookies",
")",
":",
"for",
"cookie",
"in",
"cookies",
":",
"driver",
".",
"add_cookie",
"(",
"convert_cookie_to_dict",
"(",
"cookie",
")",
")",
"return",
"driver"
] | 30.142857 | 10.142857 |
def to_float(b:Collection[Tensor])->Collection[Tensor]:
"Recursively map lists of tensors in `b ` to FP16."
if is_listy(b): return [to_float(o) for o in b]
return b.float() if b.dtype not in [torch.int64, torch.int32, torch.int16] else b
|
[
"def",
"to_float",
"(",
"b",
":",
"Collection",
"[",
"Tensor",
"]",
")",
"->",
"Collection",
"[",
"Tensor",
"]",
":",
"if",
"is_listy",
"(",
"b",
")",
":",
"return",
"[",
"to_float",
"(",
"o",
")",
"for",
"o",
"in",
"b",
"]",
"return",
"b",
".",
"float",
"(",
")",
"if",
"b",
".",
"dtype",
"not",
"in",
"[",
"torch",
".",
"int64",
",",
"torch",
".",
"int32",
",",
"torch",
".",
"int16",
"]",
"else",
"b"
] | 61.5 | 21.5 |
def dispatch_to_series(left, right, func, str_rep=None, axis=None):
"""
Evaluate the frame operation func(left, right) by evaluating
column-by-column, dispatching to the Series implementation.
Parameters
----------
left : DataFrame
right : scalar or DataFrame
func : arithmetic or comparison operator
str_rep : str or None, default None
axis : {None, 0, 1, "index", "columns"}
Returns
-------
DataFrame
"""
# Note: we use iloc to access columns for compat with cases
# with non-unique columns.
import pandas.core.computation.expressions as expressions
right = lib.item_from_zerodim(right)
if lib.is_scalar(right) or np.ndim(right) == 0:
def column_op(a, b):
return {i: func(a.iloc[:, i], b)
for i in range(len(a.columns))}
elif isinstance(right, ABCDataFrame):
assert right._indexed_same(left)
def column_op(a, b):
return {i: func(a.iloc[:, i], b.iloc[:, i])
for i in range(len(a.columns))}
elif isinstance(right, ABCSeries) and axis == "columns":
# We only get here if called via left._combine_match_columns,
# in which case we specifically want to operate row-by-row
assert right.index.equals(left.columns)
def column_op(a, b):
return {i: func(a.iloc[:, i], b.iloc[i])
for i in range(len(a.columns))}
elif isinstance(right, ABCSeries):
assert right.index.equals(left.index) # Handle other cases later
def column_op(a, b):
return {i: func(a.iloc[:, i], b)
for i in range(len(a.columns))}
else:
# Remaining cases have less-obvious dispatch rules
raise NotImplementedError(right)
new_data = expressions.evaluate(column_op, str_rep, left, right)
result = left._constructor(new_data, index=left.index, copy=False)
# Pin columns instead of passing to constructor for compat with
# non-unique columns case
result.columns = left.columns
return result
|
[
"def",
"dispatch_to_series",
"(",
"left",
",",
"right",
",",
"func",
",",
"str_rep",
"=",
"None",
",",
"axis",
"=",
"None",
")",
":",
"# Note: we use iloc to access columns for compat with cases",
"# with non-unique columns.",
"import",
"pandas",
".",
"core",
".",
"computation",
".",
"expressions",
"as",
"expressions",
"right",
"=",
"lib",
".",
"item_from_zerodim",
"(",
"right",
")",
"if",
"lib",
".",
"is_scalar",
"(",
"right",
")",
"or",
"np",
".",
"ndim",
"(",
"right",
")",
"==",
"0",
":",
"def",
"column_op",
"(",
"a",
",",
"b",
")",
":",
"return",
"{",
"i",
":",
"func",
"(",
"a",
".",
"iloc",
"[",
":",
",",
"i",
"]",
",",
"b",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"a",
".",
"columns",
")",
")",
"}",
"elif",
"isinstance",
"(",
"right",
",",
"ABCDataFrame",
")",
":",
"assert",
"right",
".",
"_indexed_same",
"(",
"left",
")",
"def",
"column_op",
"(",
"a",
",",
"b",
")",
":",
"return",
"{",
"i",
":",
"func",
"(",
"a",
".",
"iloc",
"[",
":",
",",
"i",
"]",
",",
"b",
".",
"iloc",
"[",
":",
",",
"i",
"]",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"a",
".",
"columns",
")",
")",
"}",
"elif",
"isinstance",
"(",
"right",
",",
"ABCSeries",
")",
"and",
"axis",
"==",
"\"columns\"",
":",
"# We only get here if called via left._combine_match_columns,",
"# in which case we specifically want to operate row-by-row",
"assert",
"right",
".",
"index",
".",
"equals",
"(",
"left",
".",
"columns",
")",
"def",
"column_op",
"(",
"a",
",",
"b",
")",
":",
"return",
"{",
"i",
":",
"func",
"(",
"a",
".",
"iloc",
"[",
":",
",",
"i",
"]",
",",
"b",
".",
"iloc",
"[",
"i",
"]",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"a",
".",
"columns",
")",
")",
"}",
"elif",
"isinstance",
"(",
"right",
",",
"ABCSeries",
")",
":",
"assert",
"right",
".",
"index",
".",
"equals",
"(",
"left",
".",
"index",
")",
"# Handle other cases later",
"def",
"column_op",
"(",
"a",
",",
"b",
")",
":",
"return",
"{",
"i",
":",
"func",
"(",
"a",
".",
"iloc",
"[",
":",
",",
"i",
"]",
",",
"b",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"a",
".",
"columns",
")",
")",
"}",
"else",
":",
"# Remaining cases have less-obvious dispatch rules",
"raise",
"NotImplementedError",
"(",
"right",
")",
"new_data",
"=",
"expressions",
".",
"evaluate",
"(",
"column_op",
",",
"str_rep",
",",
"left",
",",
"right",
")",
"result",
"=",
"left",
".",
"_constructor",
"(",
"new_data",
",",
"index",
"=",
"left",
".",
"index",
",",
"copy",
"=",
"False",
")",
"# Pin columns instead of passing to constructor for compat with",
"# non-unique columns case",
"result",
".",
"columns",
"=",
"left",
".",
"columns",
"return",
"result"
] | 32.822581 | 20.112903 |
def add_urlhelpers(event):
"""
Add helpers to the template engine.
"""
event['static_url'] = lambda x: static_path(x, event['request'])
event['route_url'] = lambda name, *args, **kwargs: \
route_path(name, event['request'], *args, **kwargs)
event['parse_rest'] = parse_rest
event['has_permission'] = event['request'].has_permission
|
[
"def",
"add_urlhelpers",
"(",
"event",
")",
":",
"event",
"[",
"'static_url'",
"]",
"=",
"lambda",
"x",
":",
"static_path",
"(",
"x",
",",
"event",
"[",
"'request'",
"]",
")",
"event",
"[",
"'route_url'",
"]",
"=",
"lambda",
"name",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
":",
"route_path",
"(",
"name",
",",
"event",
"[",
"'request'",
"]",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"event",
"[",
"'parse_rest'",
"]",
"=",
"parse_rest",
"event",
"[",
"'has_permission'",
"]",
"=",
"event",
"[",
"'request'",
"]",
".",
"has_permission"
] | 39.888889 | 11.444444 |
def _create_link(self, act_node, name, instance):
"""Creates a link and checks if names are appropriate
"""
act_node._links[name] = instance
act_node._children[name] = instance
full_name = instance.v_full_name
if full_name not in self._root_instance._linked_by:
self._root_instance._linked_by[full_name] = {}
linking = self._root_instance._linked_by[full_name]
if act_node.v_full_name not in linking:
linking[act_node.v_full_name] = (act_node, set())
linking[act_node.v_full_name][1].add(name)
if name not in self._links_count:
self._links_count[name] = 0
self._links_count[name] = self._links_count[name] + 1
self._logger.debug('Added link `%s` under `%s` pointing '
'to `%s`.' % (name, act_node.v_full_name,
instance.v_full_name))
return instance
|
[
"def",
"_create_link",
"(",
"self",
",",
"act_node",
",",
"name",
",",
"instance",
")",
":",
"act_node",
".",
"_links",
"[",
"name",
"]",
"=",
"instance",
"act_node",
".",
"_children",
"[",
"name",
"]",
"=",
"instance",
"full_name",
"=",
"instance",
".",
"v_full_name",
"if",
"full_name",
"not",
"in",
"self",
".",
"_root_instance",
".",
"_linked_by",
":",
"self",
".",
"_root_instance",
".",
"_linked_by",
"[",
"full_name",
"]",
"=",
"{",
"}",
"linking",
"=",
"self",
".",
"_root_instance",
".",
"_linked_by",
"[",
"full_name",
"]",
"if",
"act_node",
".",
"v_full_name",
"not",
"in",
"linking",
":",
"linking",
"[",
"act_node",
".",
"v_full_name",
"]",
"=",
"(",
"act_node",
",",
"set",
"(",
")",
")",
"linking",
"[",
"act_node",
".",
"v_full_name",
"]",
"[",
"1",
"]",
".",
"add",
"(",
"name",
")",
"if",
"name",
"not",
"in",
"self",
".",
"_links_count",
":",
"self",
".",
"_links_count",
"[",
"name",
"]",
"=",
"0",
"self",
".",
"_links_count",
"[",
"name",
"]",
"=",
"self",
".",
"_links_count",
"[",
"name",
"]",
"+",
"1",
"self",
".",
"_logger",
".",
"debug",
"(",
"'Added link `%s` under `%s` pointing '",
"'to `%s`.'",
"%",
"(",
"name",
",",
"act_node",
".",
"v_full_name",
",",
"instance",
".",
"v_full_name",
")",
")",
"return",
"instance"
] | 40.782609 | 16.608696 |
def create_token_role(self, role,
allowed_policies=None, disallowed_policies=None,
orphan=None, period=None, renewable=None,
path_suffix=None, explicit_max_ttl=None):
"""POST /auth/token/roles/<role>
:param role:
:type role:
:param allowed_policies:
:type allowed_policies:
:param disallowed_policies:
:type disallowed_policies:
:param orphan:
:type orphan:
:param period:
:type period:
:param renewable:
:type renewable:
:param path_suffix:
:type path_suffix:
:param explicit_max_ttl:
:type explicit_max_ttl:
:return:
:rtype:
"""
params = {
'allowed_policies': allowed_policies,
'disallowed_policies': disallowed_policies,
'orphan': orphan,
'period': period,
'renewable': renewable,
'path_suffix': path_suffix,
'explicit_max_ttl': explicit_max_ttl
}
return self._adapter.post('/v1/auth/token/roles/{0}'.format(role), json=params)
|
[
"def",
"create_token_role",
"(",
"self",
",",
"role",
",",
"allowed_policies",
"=",
"None",
",",
"disallowed_policies",
"=",
"None",
",",
"orphan",
"=",
"None",
",",
"period",
"=",
"None",
",",
"renewable",
"=",
"None",
",",
"path_suffix",
"=",
"None",
",",
"explicit_max_ttl",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'allowed_policies'",
":",
"allowed_policies",
",",
"'disallowed_policies'",
":",
"disallowed_policies",
",",
"'orphan'",
":",
"orphan",
",",
"'period'",
":",
"period",
",",
"'renewable'",
":",
"renewable",
",",
"'path_suffix'",
":",
"path_suffix",
",",
"'explicit_max_ttl'",
":",
"explicit_max_ttl",
"}",
"return",
"self",
".",
"_adapter",
".",
"post",
"(",
"'/v1/auth/token/roles/{0}'",
".",
"format",
"(",
"role",
")",
",",
"json",
"=",
"params",
")"
] | 32.657143 | 16.057143 |
def reload(self):
''' reloads this object so if it was updated in the database it now
contains the new values'''
key = self.key()
redis = type(self).get_redis()
if not redis.exists(key):
raise ModelNotFoundError('This object has been deleted')
data = debyte_hash(redis.hgetall(key))
for fieldname, field in self.proxy:
value = field.recover(data, redis)
setattr(
self,
fieldname,
value
)
return self
|
[
"def",
"reload",
"(",
"self",
")",
":",
"key",
"=",
"self",
".",
"key",
"(",
")",
"redis",
"=",
"type",
"(",
"self",
")",
".",
"get_redis",
"(",
")",
"if",
"not",
"redis",
".",
"exists",
"(",
"key",
")",
":",
"raise",
"ModelNotFoundError",
"(",
"'This object has been deleted'",
")",
"data",
"=",
"debyte_hash",
"(",
"redis",
".",
"hgetall",
"(",
"key",
")",
")",
"for",
"fieldname",
",",
"field",
"in",
"self",
".",
"proxy",
":",
"value",
"=",
"field",
".",
"recover",
"(",
"data",
",",
"redis",
")",
"setattr",
"(",
"self",
",",
"fieldname",
",",
"value",
")",
"return",
"self"
] | 25.904762 | 21.52381 |
def configs_for_reader(reader=None, ppp_config_dir=None):
"""Generator of reader configuration files for one or more readers
Args:
reader (Optional[str]): Yield configs only for this reader
ppp_config_dir (Optional[str]): Additional configuration directory
to search for reader configuration files.
Returns: Generator of lists of configuration files
"""
search_paths = (ppp_config_dir,) if ppp_config_dir else tuple()
if reader is not None:
if not isinstance(reader, (list, tuple)):
reader = [reader]
# check for old reader names
new_readers = []
for reader_name in reader:
if reader_name.endswith('.yaml') or reader_name not in OLD_READER_NAMES:
new_readers.append(reader_name)
continue
new_name = OLD_READER_NAMES[reader_name]
# Satpy 0.11 only displays a warning
# Satpy 0.13 will raise an exception
raise ValueError("Reader name '{}' has been deprecated, use '{}' instead.".format(reader_name, new_name))
# Satpy 0.15 or 1.0, remove exception and mapping
reader = new_readers
# given a config filename or reader name
config_files = [r if r.endswith('.yaml') else r + '.yaml' for r in reader]
else:
reader_configs = glob_config(os.path.join('readers', '*.yaml'),
*search_paths)
config_files = set(reader_configs)
for config_file in config_files:
config_basename = os.path.basename(config_file)
reader_configs = config_search_paths(
os.path.join("readers", config_basename), *search_paths)
if not reader_configs:
# either the reader they asked for does not exist
# or satpy is improperly configured and can't find its own readers
raise ValueError("No reader(s) named: {}".format(reader))
yield reader_configs
|
[
"def",
"configs_for_reader",
"(",
"reader",
"=",
"None",
",",
"ppp_config_dir",
"=",
"None",
")",
":",
"search_paths",
"=",
"(",
"ppp_config_dir",
",",
")",
"if",
"ppp_config_dir",
"else",
"tuple",
"(",
")",
"if",
"reader",
"is",
"not",
"None",
":",
"if",
"not",
"isinstance",
"(",
"reader",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"reader",
"=",
"[",
"reader",
"]",
"# check for old reader names",
"new_readers",
"=",
"[",
"]",
"for",
"reader_name",
"in",
"reader",
":",
"if",
"reader_name",
".",
"endswith",
"(",
"'.yaml'",
")",
"or",
"reader_name",
"not",
"in",
"OLD_READER_NAMES",
":",
"new_readers",
".",
"append",
"(",
"reader_name",
")",
"continue",
"new_name",
"=",
"OLD_READER_NAMES",
"[",
"reader_name",
"]",
"# Satpy 0.11 only displays a warning",
"# Satpy 0.13 will raise an exception",
"raise",
"ValueError",
"(",
"\"Reader name '{}' has been deprecated, use '{}' instead.\"",
".",
"format",
"(",
"reader_name",
",",
"new_name",
")",
")",
"# Satpy 0.15 or 1.0, remove exception and mapping",
"reader",
"=",
"new_readers",
"# given a config filename or reader name",
"config_files",
"=",
"[",
"r",
"if",
"r",
".",
"endswith",
"(",
"'.yaml'",
")",
"else",
"r",
"+",
"'.yaml'",
"for",
"r",
"in",
"reader",
"]",
"else",
":",
"reader_configs",
"=",
"glob_config",
"(",
"os",
".",
"path",
".",
"join",
"(",
"'readers'",
",",
"'*.yaml'",
")",
",",
"*",
"search_paths",
")",
"config_files",
"=",
"set",
"(",
"reader_configs",
")",
"for",
"config_file",
"in",
"config_files",
":",
"config_basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"config_file",
")",
"reader_configs",
"=",
"config_search_paths",
"(",
"os",
".",
"path",
".",
"join",
"(",
"\"readers\"",
",",
"config_basename",
")",
",",
"*",
"search_paths",
")",
"if",
"not",
"reader_configs",
":",
"# either the reader they asked for does not exist",
"# or satpy is improperly configured and can't find its own readers",
"raise",
"ValueError",
"(",
"\"No reader(s) named: {}\"",
".",
"format",
"(",
"reader",
")",
")",
"yield",
"reader_configs"
] | 41.212766 | 22 |
def substitute_variables(cls, configuration, value, ref):
"""
Substitute variables in `value` from `configuration` where any path reference is relative to
`ref`.
Parameters
----------
configuration : dict
configuration (required to resolve intra-document references)
value :
value to resolve substitutions for
ref : str
path to `value` in the `configuration`
Returns
-------
value :
value after substitution
"""
if isinstance(value, str):
# Substitute all intra-document references
while True:
match = cls.REF_PATTERN.search(value)
if match is None:
break
path = os.path.join(os.path.dirname(ref), match.group('path'))
try:
value = value.replace(
match.group(0), str(util.get_value(configuration, path)))
except KeyError:
raise KeyError(path)
# Substitute all variable references
while True:
match = cls.VAR_PATTERN.search(value)
if match is None:
break
value = value.replace(
match.group(0),
str(util.get_value(cls.VARIABLES, match.group('path'), '/')))
return value
|
[
"def",
"substitute_variables",
"(",
"cls",
",",
"configuration",
",",
"value",
",",
"ref",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"# Substitute all intra-document references",
"while",
"True",
":",
"match",
"=",
"cls",
".",
"REF_PATTERN",
".",
"search",
"(",
"value",
")",
"if",
"match",
"is",
"None",
":",
"break",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"ref",
")",
",",
"match",
".",
"group",
"(",
"'path'",
")",
")",
"try",
":",
"value",
"=",
"value",
".",
"replace",
"(",
"match",
".",
"group",
"(",
"0",
")",
",",
"str",
"(",
"util",
".",
"get_value",
"(",
"configuration",
",",
"path",
")",
")",
")",
"except",
"KeyError",
":",
"raise",
"KeyError",
"(",
"path",
")",
"# Substitute all variable references",
"while",
"True",
":",
"match",
"=",
"cls",
".",
"VAR_PATTERN",
".",
"search",
"(",
"value",
")",
"if",
"match",
"is",
"None",
":",
"break",
"value",
"=",
"value",
".",
"replace",
"(",
"match",
".",
"group",
"(",
"0",
")",
",",
"str",
"(",
"util",
".",
"get_value",
"(",
"cls",
".",
"VARIABLES",
",",
"match",
".",
"group",
"(",
"'path'",
")",
",",
"'/'",
")",
")",
")",
"return",
"value"
] | 34.390244 | 18.634146 |
def calc_significand(prefix, dpd_bits, num_bits):
"""
prefix: High bits integer value
dpd_bits: dpd encoded bits
num_bits: bit length of dpd_bits
"""
# https://en.wikipedia.org/wiki/Decimal128_floating-point_format#Densely_packed_decimal_significand_field
num_segments = num_bits // 10
segments = []
for i in range(num_segments):
segments.append(dpd_bits & 0b1111111111)
dpd_bits >>= 10
segments.reverse()
v = prefix
for dpd in segments:
v = v * 1000 + dpd_to_int(dpd)
return v
|
[
"def",
"calc_significand",
"(",
"prefix",
",",
"dpd_bits",
",",
"num_bits",
")",
":",
"# https://en.wikipedia.org/wiki/Decimal128_floating-point_format#Densely_packed_decimal_significand_field",
"num_segments",
"=",
"num_bits",
"//",
"10",
"segments",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"num_segments",
")",
":",
"segments",
".",
"append",
"(",
"dpd_bits",
"&",
"0b1111111111",
")",
"dpd_bits",
">>=",
"10",
"segments",
".",
"reverse",
"(",
")",
"v",
"=",
"prefix",
"for",
"dpd",
"in",
"segments",
":",
"v",
"=",
"v",
"*",
"1000",
"+",
"dpd_to_int",
"(",
"dpd",
")",
"return",
"v"
] | 28.263158 | 17.315789 |
def xml(self, xml):
"""
Defines a XML body value to match.
Arguments:
xml (str|regex): body XML to match.
Returns:
self: current Mock instance.
"""
self._request.xml = xml
self.add_matcher(matcher('XMLMatcher', xml))
|
[
"def",
"xml",
"(",
"self",
",",
"xml",
")",
":",
"self",
".",
"_request",
".",
"xml",
"=",
"xml",
"self",
".",
"add_matcher",
"(",
"matcher",
"(",
"'XMLMatcher'",
",",
"xml",
")",
")"
] | 23.916667 | 14.75 |
async def _check_for_matching_user(self, **user_filters):
"""
This function checks if there is a user with the same uid in the
remote user service
Args:
**kwds : the filters of the user to check for
Returns:
(bool): wether or not there is a matching user
"""
# there is a matching user if there are no errors and no results from
user_data = self._get_matching_user(user_filters)
# return true if there were no errors and at lease one result
return not user_data['errors'] and len(user_data['data'][root_query()])
|
[
"async",
"def",
"_check_for_matching_user",
"(",
"self",
",",
"*",
"*",
"user_filters",
")",
":",
"# there is a matching user if there are no errors and no results from",
"user_data",
"=",
"self",
".",
"_get_matching_user",
"(",
"user_filters",
")",
"# return true if there were no errors and at lease one result",
"return",
"not",
"user_data",
"[",
"'errors'",
"]",
"and",
"len",
"(",
"user_data",
"[",
"'data'",
"]",
"[",
"root_query",
"(",
")",
"]",
")"
] | 44.928571 | 22.214286 |
def get_manifest_from_meta(metaurl, name):
"""
Extact manifest url from metadata url
:param metaurl: Url to metadata
:param name: Name of application to extract
:return:
"""
if 'http' in metaurl:
kit = yaml.safe_load(requests.get(url=metaurl).content)['kit']['applications']
else:
kit = yaml.safe_load(open(metaurl).read())['kit']['applications']
app_urls = [x['manifest'] for x in kit if x['name'] == name]
assert len(app_urls) == 1
return app_urls[0]
|
[
"def",
"get_manifest_from_meta",
"(",
"metaurl",
",",
"name",
")",
":",
"if",
"'http'",
"in",
"metaurl",
":",
"kit",
"=",
"yaml",
".",
"safe_load",
"(",
"requests",
".",
"get",
"(",
"url",
"=",
"metaurl",
")",
".",
"content",
")",
"[",
"'kit'",
"]",
"[",
"'applications'",
"]",
"else",
":",
"kit",
"=",
"yaml",
".",
"safe_load",
"(",
"open",
"(",
"metaurl",
")",
".",
"read",
"(",
")",
")",
"[",
"'kit'",
"]",
"[",
"'applications'",
"]",
"app_urls",
"=",
"[",
"x",
"[",
"'manifest'",
"]",
"for",
"x",
"in",
"kit",
"if",
"x",
"[",
"'name'",
"]",
"==",
"name",
"]",
"assert",
"len",
"(",
"app_urls",
")",
"==",
"1",
"return",
"app_urls",
"[",
"0",
"]"
] | 35.642857 | 15.785714 |
def migrate_constituencies(apps, schema_editor):
"""
Re-save constituencies to recompute fingerprints
"""
Constituency = apps.get_model("representatives", "Constituency")
for c in Constituency.objects.all():
c.save()
|
[
"def",
"migrate_constituencies",
"(",
"apps",
",",
"schema_editor",
")",
":",
"Constituency",
"=",
"apps",
".",
"get_model",
"(",
"\"representatives\"",
",",
"\"Constituency\"",
")",
"for",
"c",
"in",
"Constituency",
".",
"objects",
".",
"all",
"(",
")",
":",
"c",
".",
"save",
"(",
")"
] | 34 | 10.285714 |
def all_control_flow_elements_count(bpmn_graph):
"""
Returns the total count of all control flow elements
in the BPMNDiagramGraph instance.
:param bpmn_graph: an instance of BpmnDiagramGraph representing BPMN model.
:return: total count of the control flow elements in the BPMNDiagramGraph instance
"""
gateway_counts = get_gateway_counts(bpmn_graph)
events_counts = get_events_counts(bpmn_graph)
control_flow_elements_counts = gateway_counts.copy()
control_flow_elements_counts.update(events_counts)
return sum([
count for name, count in control_flow_elements_counts.items()
])
|
[
"def",
"all_control_flow_elements_count",
"(",
"bpmn_graph",
")",
":",
"gateway_counts",
"=",
"get_gateway_counts",
"(",
"bpmn_graph",
")",
"events_counts",
"=",
"get_events_counts",
"(",
"bpmn_graph",
")",
"control_flow_elements_counts",
"=",
"gateway_counts",
".",
"copy",
"(",
")",
"control_flow_elements_counts",
".",
"update",
"(",
"events_counts",
")",
"return",
"sum",
"(",
"[",
"count",
"for",
"name",
",",
"count",
"in",
"control_flow_elements_counts",
".",
"items",
"(",
")",
"]",
")"
] | 36.166667 | 22.277778 |
def execute(self, request):
"""Execute a request and return a response"""
url = request.uri
if request.parameters:
url += '?' + urlencode(request.parameters)
if request.headers:
headers = dict(self._headers, **request.headers)
else:
headers = self._headers
retry = 0
server = getattr(self._local, "server", None)
while True:
if not server:
self._local.server = server = self._get_server()
try:
parse_result = urlparse(server)
conn = get_pool().connection_from_host(parse_result.hostname,
parse_result.port,
parse_result.scheme)
kwargs = dict(
method=Method._VALUES_TO_NAMES[request.method],
url=parse_result.path + url,
body=request.body,
headers=headers,
timeout=self._timeout,
)
response = conn.urlopen(**kwargs)
return RestResponse(status=response.status,
body=response.data,
headers=response.headers)
except (IOError, urllib3.exceptions.HTTPError) as ex:
self._drop_server(server)
self._local.server = server = None
if retry >= self._max_retries:
logger.error("Client error: bailing out after %d failed retries",
self._max_retries, exc_info=1)
raise NoServerAvailable(ex)
logger.exception("Client error: %d retries left", self._max_retries - retry)
retry += 1
|
[
"def",
"execute",
"(",
"self",
",",
"request",
")",
":",
"url",
"=",
"request",
".",
"uri",
"if",
"request",
".",
"parameters",
":",
"url",
"+=",
"'?'",
"+",
"urlencode",
"(",
"request",
".",
"parameters",
")",
"if",
"request",
".",
"headers",
":",
"headers",
"=",
"dict",
"(",
"self",
".",
"_headers",
",",
"*",
"*",
"request",
".",
"headers",
")",
"else",
":",
"headers",
"=",
"self",
".",
"_headers",
"retry",
"=",
"0",
"server",
"=",
"getattr",
"(",
"self",
".",
"_local",
",",
"\"server\"",
",",
"None",
")",
"while",
"True",
":",
"if",
"not",
"server",
":",
"self",
".",
"_local",
".",
"server",
"=",
"server",
"=",
"self",
".",
"_get_server",
"(",
")",
"try",
":",
"parse_result",
"=",
"urlparse",
"(",
"server",
")",
"conn",
"=",
"get_pool",
"(",
")",
".",
"connection_from_host",
"(",
"parse_result",
".",
"hostname",
",",
"parse_result",
".",
"port",
",",
"parse_result",
".",
"scheme",
")",
"kwargs",
"=",
"dict",
"(",
"method",
"=",
"Method",
".",
"_VALUES_TO_NAMES",
"[",
"request",
".",
"method",
"]",
",",
"url",
"=",
"parse_result",
".",
"path",
"+",
"url",
",",
"body",
"=",
"request",
".",
"body",
",",
"headers",
"=",
"headers",
",",
"timeout",
"=",
"self",
".",
"_timeout",
",",
")",
"response",
"=",
"conn",
".",
"urlopen",
"(",
"*",
"*",
"kwargs",
")",
"return",
"RestResponse",
"(",
"status",
"=",
"response",
".",
"status",
",",
"body",
"=",
"response",
".",
"data",
",",
"headers",
"=",
"response",
".",
"headers",
")",
"except",
"(",
"IOError",
",",
"urllib3",
".",
"exceptions",
".",
"HTTPError",
")",
"as",
"ex",
":",
"self",
".",
"_drop_server",
"(",
"server",
")",
"self",
".",
"_local",
".",
"server",
"=",
"server",
"=",
"None",
"if",
"retry",
">=",
"self",
".",
"_max_retries",
":",
"logger",
".",
"error",
"(",
"\"Client error: bailing out after %d failed retries\"",
",",
"self",
".",
"_max_retries",
",",
"exc_info",
"=",
"1",
")",
"raise",
"NoServerAvailable",
"(",
"ex",
")",
"logger",
".",
"exception",
"(",
"\"Client error: %d retries left\"",
",",
"self",
".",
"_max_retries",
"-",
"retry",
")",
"retry",
"+=",
"1"
] | 42.325581 | 19.232558 |
def docker_environment(env):
"""
Transform dictionary of environment variables into Docker -e parameters.
>>> result = docker_environment({'param1': 'val1', 'param2': 'val2'})
>>> result in ['-e "param1=val1" -e "param2=val2"', '-e "param2=val2" -e "param1=val1"']
True
"""
return ' '.join(
["-e \"%s=%s\"" % (key, value.replace("$", "\\$").replace("\"", "\\\"").replace("`", "\\`"))
for key, value in env.items()])
|
[
"def",
"docker_environment",
"(",
"env",
")",
":",
"return",
"' '",
".",
"join",
"(",
"[",
"\"-e \\\"%s=%s\\\"\"",
"%",
"(",
"key",
",",
"value",
".",
"replace",
"(",
"\"$\"",
",",
"\"\\\\$\"",
")",
".",
"replace",
"(",
"\"\\\"\"",
",",
"\"\\\\\\\"\"",
")",
".",
"replace",
"(",
"\"`\"",
",",
"\"\\\\`\"",
")",
")",
"for",
"key",
",",
"value",
"in",
"env",
".",
"items",
"(",
")",
"]",
")"
] | 41 | 25.909091 |
def worker(compute, task_queue, result_queue, log_queue, complete,
*context):
"""A worker process, run by ``multiprocessing.Process``."""
try:
MapReduce._forked = True
log.debug('Worker process starting...')
configure_worker_logging(log_queue)
for obj in iter(task_queue.get, POISON_PILL):
if complete.is_set():
log.debug('Worker received signal - exiting early')
break
log.debug('Worker got %s', obj)
result_queue.put(compute(obj, *context))
log.debug('Worker finished %s', obj)
result_queue.put(POISON_PILL)
log.debug('Worker process exiting')
except Exception as e: # pylint: disable=broad-except
result_queue.put(ExceptionWrapper(e))
|
[
"def",
"worker",
"(",
"compute",
",",
"task_queue",
",",
"result_queue",
",",
"log_queue",
",",
"complete",
",",
"*",
"context",
")",
":",
"try",
":",
"MapReduce",
".",
"_forked",
"=",
"True",
"log",
".",
"debug",
"(",
"'Worker process starting...'",
")",
"configure_worker_logging",
"(",
"log_queue",
")",
"for",
"obj",
"in",
"iter",
"(",
"task_queue",
".",
"get",
",",
"POISON_PILL",
")",
":",
"if",
"complete",
".",
"is_set",
"(",
")",
":",
"log",
".",
"debug",
"(",
"'Worker received signal - exiting early'",
")",
"break",
"log",
".",
"debug",
"(",
"'Worker got %s'",
",",
"obj",
")",
"result_queue",
".",
"put",
"(",
"compute",
"(",
"obj",
",",
"*",
"context",
")",
")",
"log",
".",
"debug",
"(",
"'Worker finished %s'",
",",
"obj",
")",
"result_queue",
".",
"put",
"(",
"POISON_PILL",
")",
"log",
".",
"debug",
"(",
"'Worker process exiting'",
")",
"except",
"Exception",
"as",
"e",
":",
"# pylint: disable=broad-except",
"result_queue",
".",
"put",
"(",
"ExceptionWrapper",
"(",
"e",
")",
")"
] | 36.869565 | 18.73913 |
def do_b0(self, line):
"""Send the Master a BinaryInput (group 2) value of False at index 6. Command syntax is: b0"""
self.application.apply_update(opendnp3.Binary(False), index=6)
|
[
"def",
"do_b0",
"(",
"self",
",",
"line",
")",
":",
"self",
".",
"application",
".",
"apply_update",
"(",
"opendnp3",
".",
"Binary",
"(",
"False",
")",
",",
"index",
"=",
"6",
")"
] | 64.666667 | 16 |
def match_value_to_text(self, text):
"""
this is going to be the tricky bit - probably not possible
to get the 'exact' rating for a value. Will need to do sentiment
analysis of the text to see how it matches the rating. Even that
sounds like it wont work - maybe a ML algorithm would do it, but
that requires a large body of text already matched to values - and
values aren't even defined as far as I have found.
UPDATE - this could work if we assume values can be single words,
eg tax=0.3, freedom=0.7, healthcare=0.3, welfare=0.3 etc
"""
if self.nme in text:
res = 0.8
else:
res = 0.2
return self.nme + ' = ' + str(res) + ' match against ' + text
|
[
"def",
"match_value_to_text",
"(",
"self",
",",
"text",
")",
":",
"if",
"self",
".",
"nme",
"in",
"text",
":",
"res",
"=",
"0.8",
"else",
":",
"res",
"=",
"0.2",
"return",
"self",
".",
"nme",
"+",
"' = '",
"+",
"str",
"(",
"res",
")",
"+",
"' match against '",
"+",
"text"
] | 38.05 | 25.05 |
def pip_installer_url(version=None):
"""Get argument to give to ``pip`` to install HPCBench.
"""
version = version or hpcbench.__version__
version = str(version)
if '.dev' in version:
git_rev = 'master'
if 'TRAVIS_BRANCH' in os.environ:
git_rev = version.split('+', 1)[-1]
if '.' in git_rev: # get rid of date suffix
git_rev = git_rev.split('.', 1)[0]
git_rev = git_rev[1:] # get rid of scm letter
return 'git+{project_url}@{git_rev}#egg=hpcbench'.format(
project_url='http://github.com/BlueBrain/hpcbench',
git_rev=git_rev or 'master',
)
return 'hpcbench=={}'.format(version)
|
[
"def",
"pip_installer_url",
"(",
"version",
"=",
"None",
")",
":",
"version",
"=",
"version",
"or",
"hpcbench",
".",
"__version__",
"version",
"=",
"str",
"(",
"version",
")",
"if",
"'.dev'",
"in",
"version",
":",
"git_rev",
"=",
"'master'",
"if",
"'TRAVIS_BRANCH'",
"in",
"os",
".",
"environ",
":",
"git_rev",
"=",
"version",
".",
"split",
"(",
"'+'",
",",
"1",
")",
"[",
"-",
"1",
"]",
"if",
"'.'",
"in",
"git_rev",
":",
"# get rid of date suffix",
"git_rev",
"=",
"git_rev",
".",
"split",
"(",
"'.'",
",",
"1",
")",
"[",
"0",
"]",
"git_rev",
"=",
"git_rev",
"[",
"1",
":",
"]",
"# get rid of scm letter",
"return",
"'git+{project_url}@{git_rev}#egg=hpcbench'",
".",
"format",
"(",
"project_url",
"=",
"'http://github.com/BlueBrain/hpcbench'",
",",
"git_rev",
"=",
"git_rev",
"or",
"'master'",
",",
")",
"return",
"'hpcbench=={}'",
".",
"format",
"(",
"version",
")"
] | 40.823529 | 10.823529 |
def cmyk_to_rgb(Class, c, m, y, k):
"""CMYK in % to RGB in 0-255
based on https://www.openprocessing.org/sketch/46231#
"""
c = float(c)/100.0
m = float(m)/100.0
y = float(y)/100.0
k = float(k)/100.0
nc = (c * (1-k)) + k
nm = (m * (1-k)) + k
ny = (y * (1-k)) + k
r = int((1-nc) * 255)
g = int((1-nm) * 255)
b = int((1-ny) * 255)
return dict(r=r, g=g, b=b)
|
[
"def",
"cmyk_to_rgb",
"(",
"Class",
",",
"c",
",",
"m",
",",
"y",
",",
"k",
")",
":",
"c",
"=",
"float",
"(",
"c",
")",
"/",
"100.0",
"m",
"=",
"float",
"(",
"m",
")",
"/",
"100.0",
"y",
"=",
"float",
"(",
"y",
")",
"/",
"100.0",
"k",
"=",
"float",
"(",
"k",
")",
"/",
"100.0",
"nc",
"=",
"(",
"c",
"*",
"(",
"1",
"-",
"k",
")",
")",
"+",
"k",
"nm",
"=",
"(",
"m",
"*",
"(",
"1",
"-",
"k",
")",
")",
"+",
"k",
"ny",
"=",
"(",
"y",
"*",
"(",
"1",
"-",
"k",
")",
")",
"+",
"k",
"r",
"=",
"int",
"(",
"(",
"1",
"-",
"nc",
")",
"*",
"255",
")",
"g",
"=",
"int",
"(",
"(",
"1",
"-",
"nm",
")",
"*",
"255",
")",
"b",
"=",
"int",
"(",
"(",
"1",
"-",
"ny",
")",
"*",
"255",
")",
"return",
"dict",
"(",
"r",
"=",
"r",
",",
"g",
"=",
"g",
",",
"b",
"=",
"b",
")"
] | 26.055556 | 14.666667 |
def get_tags(self, md5):
"""Get tags for this sample"""
tag_data = self.data_store.get_work_results('tags', md5)
return tag_data['tags'] if tag_data else None
|
[
"def",
"get_tags",
"(",
"self",
",",
"md5",
")",
":",
"tag_data",
"=",
"self",
".",
"data_store",
".",
"get_work_results",
"(",
"'tags'",
",",
"md5",
")",
"return",
"tag_data",
"[",
"'tags'",
"]",
"if",
"tag_data",
"else",
"None"
] | 44.75 | 13.25 |
def valid(self):
""" Check to see if we are still active. """
if self.finished is not None:
return False
with self._db_conn() as conn:
row = conn.get('''
SELECT (last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND) AS valid
FROM %s
WHERE
id = %%(task_id)s
AND execution_id = %%(execution_id)s
''' % self._queue.table_name,
now=datetime.utcnow(),
ttl=self._queue.execution_ttl,
task_id=self.task_id,
execution_id=self.execution_id)
return bool(row is not None and row.valid)
|
[
"def",
"valid",
"(",
"self",
")",
":",
"if",
"self",
".",
"finished",
"is",
"not",
"None",
":",
"return",
"False",
"with",
"self",
".",
"_db_conn",
"(",
")",
"as",
"conn",
":",
"row",
"=",
"conn",
".",
"get",
"(",
"'''\n SELECT (last_contact > %%(now)s - INTERVAL %%(ttl)s SECOND) AS valid\n FROM %s\n WHERE\n id = %%(task_id)s\n AND execution_id = %%(execution_id)s\n '''",
"%",
"self",
".",
"_queue",
".",
"table_name",
",",
"now",
"=",
"datetime",
".",
"utcnow",
"(",
")",
",",
"ttl",
"=",
"self",
".",
"_queue",
".",
"execution_ttl",
",",
"task_id",
"=",
"self",
".",
"task_id",
",",
"execution_id",
"=",
"self",
".",
"execution_id",
")",
"return",
"bool",
"(",
"row",
"is",
"not",
"None",
"and",
"row",
".",
"valid",
")"
] | 35.578947 | 13.894737 |
def modify_relationship(self, rel_uri, old_object, new_object):
"""
Modify a relationship from RELS-EXT for this object. As the Fedora API-M does not contain
a native "modifyRelationship", this method purges an existing one, then adds a new one,
pivoting on the predicate.
Calls :meth:`API_M.purgeRelationship`, :meth:`API_M.addRelationship`
Example usage::
predicate = 'info:fedora/fedora-system:def/relations-external#isMemberOfCollection'
old_object = 'info:fedora/foo:456'
new_object = 'info:fedora/foo:789'
object.modify_relationship(predicate, old_object, new_object)
:param rel_uri: URI for the existing relationship
:param old_object: previous target object for relationship; can be
:class:`DigitalObject` or string; if string begins with info:fedora/ it
will be treated as a resource, otherwise it will be treated as a literal
:param new_object: new target object for relationship; can be
:class:`DigitalObject` or string; if string begins with info:fedora/ it
will be treated as a resource, otherwise it will be treated as a literal
:rtype: boolean
"""
if isinstance(rel_uri, URIRef):
rel_uri = force_text(rel_uri)
# old_object
obj_old_is_literal = True
if isinstance(old_object, DigitalObject):
old_object = old_object.uri
obj_old_is_literal = False
elif (isinstance(old_object, str) or isinstance(old_object, six.string_types)) \
and old_object.startswith('info:fedora/'):
obj_old_is_literal = False
# new_object
obj_new_is_literal = True
if isinstance(new_object, DigitalObject):
new_object = new_object.uri
obj_new_is_literal = False
elif (isinstance(new_object, str) or isinstance(new_object, six.string_types)) \
and new_object.startswith('info:fedora/'):
obj_new_is_literal = False
# this call will change RELS-EXT, possibly creating it if it's
# missing. remove any cached info we have for that datastream.
if 'RELS-EXT' in self.dscache:
del self.dscache['RELS-EXT']
self._ds_list = None
# attempt purge
if self.api.purgeRelationship(self.pid, self.uri, rel_uri, old_object,
obj_old_is_literal) is not True:
return False
# attempt add
elif self.api.addRelationship(self.pid, self.uri, rel_uri, new_object,
obj_new_is_literal) is not True:
# if addRelationship fails, rollback to old_object
self.api.addRelationship(self.pid, self.uri, rel_uri, old_object,
obj_old_is_literal)
return False
else:
return True
|
[
"def",
"modify_relationship",
"(",
"self",
",",
"rel_uri",
",",
"old_object",
",",
"new_object",
")",
":",
"if",
"isinstance",
"(",
"rel_uri",
",",
"URIRef",
")",
":",
"rel_uri",
"=",
"force_text",
"(",
"rel_uri",
")",
"# old_object",
"obj_old_is_literal",
"=",
"True",
"if",
"isinstance",
"(",
"old_object",
",",
"DigitalObject",
")",
":",
"old_object",
"=",
"old_object",
".",
"uri",
"obj_old_is_literal",
"=",
"False",
"elif",
"(",
"isinstance",
"(",
"old_object",
",",
"str",
")",
"or",
"isinstance",
"(",
"old_object",
",",
"six",
".",
"string_types",
")",
")",
"and",
"old_object",
".",
"startswith",
"(",
"'info:fedora/'",
")",
":",
"obj_old_is_literal",
"=",
"False",
"# new_object",
"obj_new_is_literal",
"=",
"True",
"if",
"isinstance",
"(",
"new_object",
",",
"DigitalObject",
")",
":",
"new_object",
"=",
"new_object",
".",
"uri",
"obj_new_is_literal",
"=",
"False",
"elif",
"(",
"isinstance",
"(",
"new_object",
",",
"str",
")",
"or",
"isinstance",
"(",
"new_object",
",",
"six",
".",
"string_types",
")",
")",
"and",
"new_object",
".",
"startswith",
"(",
"'info:fedora/'",
")",
":",
"obj_new_is_literal",
"=",
"False",
"# this call will change RELS-EXT, possibly creating it if it's",
"# missing. remove any cached info we have for that datastream.",
"if",
"'RELS-EXT'",
"in",
"self",
".",
"dscache",
":",
"del",
"self",
".",
"dscache",
"[",
"'RELS-EXT'",
"]",
"self",
".",
"_ds_list",
"=",
"None",
"# attempt purge",
"if",
"self",
".",
"api",
".",
"purgeRelationship",
"(",
"self",
".",
"pid",
",",
"self",
".",
"uri",
",",
"rel_uri",
",",
"old_object",
",",
"obj_old_is_literal",
")",
"is",
"not",
"True",
":",
"return",
"False",
"# attempt add",
"elif",
"self",
".",
"api",
".",
"addRelationship",
"(",
"self",
".",
"pid",
",",
"self",
".",
"uri",
",",
"rel_uri",
",",
"new_object",
",",
"obj_new_is_literal",
")",
"is",
"not",
"True",
":",
"# if addRelationship fails, rollback to old_object",
"self",
".",
"api",
".",
"addRelationship",
"(",
"self",
".",
"pid",
",",
"self",
".",
"uri",
",",
"rel_uri",
",",
"old_object",
",",
"obj_old_is_literal",
")",
"return",
"False",
"else",
":",
"return",
"True"
] | 45.153846 | 24.230769 |
def plot_result(filt, full, prntres=True):
r"""QC the inversion result.
Parameters
----------
- filt, full as returned from fdesign.design with full_output=True
- If prntres is True, it calls fdesign.print_result as well.
r"""
# Check matplotlib (soft dependency)
if not plt:
print(plt_msg)
return
if prntres:
print_result(filt, full)
# Get spacing and shift values from full output of brute
spacing = full[2][0, :, 0]
shift = full[2][1, 0, :]
# Get minimum field values from full output of brute
minfield = np.squeeze(full[3])
plt.figure("Brute force result", figsize=(9.5, 4.5))
plt.subplots_adjust(wspace=.4, bottom=0.2)
# Figure 1: Only if more than 1 spacing or more than 1 shift
# Figure of minfield, depending if spacing/shift are vectors or floats
if spacing.size > 1 or shift.size > 1:
plt.subplot(121)
if full[4] == 0: # Min amp
plt.title("Minimal recovered fields")
ylabel = 'Minimal recovered amplitude (log10)'
field = np.log10(minfield)
cmap = plt.cm.viridis
else: # Max r
plt.title("Maximum recovered r")
ylabel = 'Maximum recovered r'
field = 1/minfield
cmap = plt.cm.viridis_r
if shift.size == 1: # (a) if only one shift value,
plt.plot(spacing, field)
plt.xlabel('Spacing')
plt.ylabel(ylabel)
elif spacing.size == 1: # (b) if only one spacing value
plt.plot(shift, field)
plt.xlabel('Shift')
plt.ylabel(ylabel)
else: # (c) if several spacing and several shift values
field = np.ma.masked_where(np.isinf(minfield), field)
plt.pcolormesh(shift, spacing, field, cmap=cmap)
plt.ylabel('Spacing')
plt.xlabel('Shift')
plt.colorbar()
# Figure 2: Filter values
if spacing.size > 1 or shift.size > 1:
plt.subplot(122)
plt.title('Filter values of best filter')
for attr in ['j0', 'j1', 'sin', 'cos']:
if hasattr(filt, attr):
plt.plot(np.log10(filt.base),
np.log10(np.abs(getattr(filt, attr))), '.-', lw=.5,
label='abs('+attr+')')
plt.plot(np.log10(filt.base), np.log10(-getattr(filt, attr)), '.',
color='k', ms=4)
plt.plot(np.inf, 0, '.', color='k', ms=4, label='Neg. values')
plt.xlabel('Base (log10)')
plt.ylabel('Abs(Amplitude) (log10)')
plt.legend(loc='best')
plt.gcf().canvas.draw() # To force draw in notebook while running
plt.show()
|
[
"def",
"plot_result",
"(",
"filt",
",",
"full",
",",
"prntres",
"=",
"True",
")",
":",
"# Check matplotlib (soft dependency)",
"if",
"not",
"plt",
":",
"print",
"(",
"plt_msg",
")",
"return",
"if",
"prntres",
":",
"print_result",
"(",
"filt",
",",
"full",
")",
"# Get spacing and shift values from full output of brute",
"spacing",
"=",
"full",
"[",
"2",
"]",
"[",
"0",
",",
":",
",",
"0",
"]",
"shift",
"=",
"full",
"[",
"2",
"]",
"[",
"1",
",",
"0",
",",
":",
"]",
"# Get minimum field values from full output of brute",
"minfield",
"=",
"np",
".",
"squeeze",
"(",
"full",
"[",
"3",
"]",
")",
"plt",
".",
"figure",
"(",
"\"Brute force result\"",
",",
"figsize",
"=",
"(",
"9.5",
",",
"4.5",
")",
")",
"plt",
".",
"subplots_adjust",
"(",
"wspace",
"=",
".4",
",",
"bottom",
"=",
"0.2",
")",
"# Figure 1: Only if more than 1 spacing or more than 1 shift",
"# Figure of minfield, depending if spacing/shift are vectors or floats",
"if",
"spacing",
".",
"size",
">",
"1",
"or",
"shift",
".",
"size",
">",
"1",
":",
"plt",
".",
"subplot",
"(",
"121",
")",
"if",
"full",
"[",
"4",
"]",
"==",
"0",
":",
"# Min amp",
"plt",
".",
"title",
"(",
"\"Minimal recovered fields\"",
")",
"ylabel",
"=",
"'Minimal recovered amplitude (log10)'",
"field",
"=",
"np",
".",
"log10",
"(",
"minfield",
")",
"cmap",
"=",
"plt",
".",
"cm",
".",
"viridis",
"else",
":",
"# Max r",
"plt",
".",
"title",
"(",
"\"Maximum recovered r\"",
")",
"ylabel",
"=",
"'Maximum recovered r'",
"field",
"=",
"1",
"/",
"minfield",
"cmap",
"=",
"plt",
".",
"cm",
".",
"viridis_r",
"if",
"shift",
".",
"size",
"==",
"1",
":",
"# (a) if only one shift value,",
"plt",
".",
"plot",
"(",
"spacing",
",",
"field",
")",
"plt",
".",
"xlabel",
"(",
"'Spacing'",
")",
"plt",
".",
"ylabel",
"(",
"ylabel",
")",
"elif",
"spacing",
".",
"size",
"==",
"1",
":",
"# (b) if only one spacing value",
"plt",
".",
"plot",
"(",
"shift",
",",
"field",
")",
"plt",
".",
"xlabel",
"(",
"'Shift'",
")",
"plt",
".",
"ylabel",
"(",
"ylabel",
")",
"else",
":",
"# (c) if several spacing and several shift values",
"field",
"=",
"np",
".",
"ma",
".",
"masked_where",
"(",
"np",
".",
"isinf",
"(",
"minfield",
")",
",",
"field",
")",
"plt",
".",
"pcolormesh",
"(",
"shift",
",",
"spacing",
",",
"field",
",",
"cmap",
"=",
"cmap",
")",
"plt",
".",
"ylabel",
"(",
"'Spacing'",
")",
"plt",
".",
"xlabel",
"(",
"'Shift'",
")",
"plt",
".",
"colorbar",
"(",
")",
"# Figure 2: Filter values",
"if",
"spacing",
".",
"size",
">",
"1",
"or",
"shift",
".",
"size",
">",
"1",
":",
"plt",
".",
"subplot",
"(",
"122",
")",
"plt",
".",
"title",
"(",
"'Filter values of best filter'",
")",
"for",
"attr",
"in",
"[",
"'j0'",
",",
"'j1'",
",",
"'sin'",
",",
"'cos'",
"]",
":",
"if",
"hasattr",
"(",
"filt",
",",
"attr",
")",
":",
"plt",
".",
"plot",
"(",
"np",
".",
"log10",
"(",
"filt",
".",
"base",
")",
",",
"np",
".",
"log10",
"(",
"np",
".",
"abs",
"(",
"getattr",
"(",
"filt",
",",
"attr",
")",
")",
")",
",",
"'.-'",
",",
"lw",
"=",
".5",
",",
"label",
"=",
"'abs('",
"+",
"attr",
"+",
"')'",
")",
"plt",
".",
"plot",
"(",
"np",
".",
"log10",
"(",
"filt",
".",
"base",
")",
",",
"np",
".",
"log10",
"(",
"-",
"getattr",
"(",
"filt",
",",
"attr",
")",
")",
",",
"'.'",
",",
"color",
"=",
"'k'",
",",
"ms",
"=",
"4",
")",
"plt",
".",
"plot",
"(",
"np",
".",
"inf",
",",
"0",
",",
"'.'",
",",
"color",
"=",
"'k'",
",",
"ms",
"=",
"4",
",",
"label",
"=",
"'Neg. values'",
")",
"plt",
".",
"xlabel",
"(",
"'Base (log10)'",
")",
"plt",
".",
"ylabel",
"(",
"'Abs(Amplitude) (log10)'",
")",
"plt",
".",
"legend",
"(",
"loc",
"=",
"'best'",
")",
"plt",
".",
"gcf",
"(",
")",
".",
"canvas",
".",
"draw",
"(",
")",
"# To force draw in notebook while running",
"plt",
".",
"show",
"(",
")"
] | 34.368421 | 17.263158 |
def offset_overlays(self, text, offset=0, **kw):
"""
Generate overlays after offset.
:param text: The text to be searched.
:param offset: Match starting that index. If none just search.
:returns: An overlay or None
"""
# This may be a bit slower but overlayedtext takes care of
# unicode issues.
if not isinstance(text, OverlayedText):
text = OverlayedText(text)
for m in self.regex.finditer(unicode(text)[offset:]):
yield Overlay(text, (offset + m.start(), offset + m.end()),
props=self.props,
value=self.value(rxmatch=m))
|
[
"def",
"offset_overlays",
"(",
"self",
",",
"text",
",",
"offset",
"=",
"0",
",",
"*",
"*",
"kw",
")",
":",
"# This may be a bit slower but overlayedtext takes care of",
"# unicode issues.",
"if",
"not",
"isinstance",
"(",
"text",
",",
"OverlayedText",
")",
":",
"text",
"=",
"OverlayedText",
"(",
"text",
")",
"for",
"m",
"in",
"self",
".",
"regex",
".",
"finditer",
"(",
"unicode",
"(",
"text",
")",
"[",
"offset",
":",
"]",
")",
":",
"yield",
"Overlay",
"(",
"text",
",",
"(",
"offset",
"+",
"m",
".",
"start",
"(",
")",
",",
"offset",
"+",
"m",
".",
"end",
"(",
")",
")",
",",
"props",
"=",
"self",
".",
"props",
",",
"value",
"=",
"self",
".",
"value",
"(",
"rxmatch",
"=",
"m",
")",
")"
] | 39.117647 | 14.529412 |
def print_graphic_information(self, num_curve, information):
"""
This function displays information about curves.
Inputs ; num_curve ; The index of the curve's line that we have to display.
information ; The array which contains the information, of all curves to display.
"""
"""In this function, the best would to create labels each time we need to create one,
following the number of labels in label_information.
#self.essai = QtGui.QLabel(self.ui.tab)
#self.essai.setGeometry(PyQt4.QtCore.QRect(870,650,111,16))
#self.essai.setText("ESSAI")
"""
label_information = information[0]
data_information = information[1:]
count_nb_label = 0 # Iterator on all labels of label_information
nb_label = len(label_information)
while count_nb_label <= nb_label:
self.ui.column1_label.setText(label_information[0].strip('\"'))
self.ui.column2_label.setText(label_information[1].strip('\"'))
self.ui.column3_label.setText(label_information[2].strip('\"'))
self.ui.column4_label.setText(label_information[3].strip('\"'))
self.ui.column5_label.setText(label_information[4].strip('\"'))
self.ui.column6_label.setText(label_information[5].strip('\"'))
self.ui.column7_label.setText(label_information[6].strip('\"'))
self.ui.column8_label.setText(label_information[7].strip('\"'))
count_nb_label += 1
line_of_data = 0 # Iterator on each line of data_information.
while line_of_data < len(data_information):
if line_of_data == num_curve:
self.ui.column1_result.setText(data_information[line_of_data][0])
self.ui.column2_result.setText(data_information[line_of_data][1])
self.ui.column3_result.setText(data_information[line_of_data][2])
self.ui.column4_result.setText(data_information[line_of_data][3])
self.ui.column5_result.setText(data_information[line_of_data][4])
self.ui.column6_result.setText(data_information[line_of_data][5])
self.ui.column7_result.setText(data_information[line_of_data][6])
self.ui.column8_result.setText(data_information[line_of_data][7])
line_of_data += 1
|
[
"def",
"print_graphic_information",
"(",
"self",
",",
"num_curve",
",",
"information",
")",
":",
"\"\"\"In this function, the best would to create labels each time we need to create one,\n following the number of labels in label_information.\n #self.essai = QtGui.QLabel(self.ui.tab)\n #self.essai.setGeometry(PyQt4.QtCore.QRect(870,650,111,16))\n #self.essai.setText(\"ESSAI\")\n \"\"\"",
"label_information",
"=",
"information",
"[",
"0",
"]",
"data_information",
"=",
"information",
"[",
"1",
":",
"]",
"count_nb_label",
"=",
"0",
"# Iterator on all labels of label_information",
"nb_label",
"=",
"len",
"(",
"label_information",
")",
"while",
"count_nb_label",
"<=",
"nb_label",
":",
"self",
".",
"ui",
".",
"column1_label",
".",
"setText",
"(",
"label_information",
"[",
"0",
"]",
".",
"strip",
"(",
"'\\\"'",
")",
")",
"self",
".",
"ui",
".",
"column2_label",
".",
"setText",
"(",
"label_information",
"[",
"1",
"]",
".",
"strip",
"(",
"'\\\"'",
")",
")",
"self",
".",
"ui",
".",
"column3_label",
".",
"setText",
"(",
"label_information",
"[",
"2",
"]",
".",
"strip",
"(",
"'\\\"'",
")",
")",
"self",
".",
"ui",
".",
"column4_label",
".",
"setText",
"(",
"label_information",
"[",
"3",
"]",
".",
"strip",
"(",
"'\\\"'",
")",
")",
"self",
".",
"ui",
".",
"column5_label",
".",
"setText",
"(",
"label_information",
"[",
"4",
"]",
".",
"strip",
"(",
"'\\\"'",
")",
")",
"self",
".",
"ui",
".",
"column6_label",
".",
"setText",
"(",
"label_information",
"[",
"5",
"]",
".",
"strip",
"(",
"'\\\"'",
")",
")",
"self",
".",
"ui",
".",
"column7_label",
".",
"setText",
"(",
"label_information",
"[",
"6",
"]",
".",
"strip",
"(",
"'\\\"'",
")",
")",
"self",
".",
"ui",
".",
"column8_label",
".",
"setText",
"(",
"label_information",
"[",
"7",
"]",
".",
"strip",
"(",
"'\\\"'",
")",
")",
"count_nb_label",
"+=",
"1",
"line_of_data",
"=",
"0",
"# Iterator on each line of data_information.",
"while",
"line_of_data",
"<",
"len",
"(",
"data_information",
")",
":",
"if",
"line_of_data",
"==",
"num_curve",
":",
"self",
".",
"ui",
".",
"column1_result",
".",
"setText",
"(",
"data_information",
"[",
"line_of_data",
"]",
"[",
"0",
"]",
")",
"self",
".",
"ui",
".",
"column2_result",
".",
"setText",
"(",
"data_information",
"[",
"line_of_data",
"]",
"[",
"1",
"]",
")",
"self",
".",
"ui",
".",
"column3_result",
".",
"setText",
"(",
"data_information",
"[",
"line_of_data",
"]",
"[",
"2",
"]",
")",
"self",
".",
"ui",
".",
"column4_result",
".",
"setText",
"(",
"data_information",
"[",
"line_of_data",
"]",
"[",
"3",
"]",
")",
"self",
".",
"ui",
".",
"column5_result",
".",
"setText",
"(",
"data_information",
"[",
"line_of_data",
"]",
"[",
"4",
"]",
")",
"self",
".",
"ui",
".",
"column6_result",
".",
"setText",
"(",
"data_information",
"[",
"line_of_data",
"]",
"[",
"5",
"]",
")",
"self",
".",
"ui",
".",
"column7_result",
".",
"setText",
"(",
"data_information",
"[",
"line_of_data",
"]",
"[",
"6",
"]",
")",
"self",
".",
"ui",
".",
"column8_result",
".",
"setText",
"(",
"data_information",
"[",
"line_of_data",
"]",
"[",
"7",
"]",
")",
"line_of_data",
"+=",
"1"
] | 57.121951 | 24.97561 |
def recv_into(self, buffer, nbytes=None, flags=None):
"""
Receive data on the connection and copy it directly into the provided
buffer, rather than creating a new string.
:param buffer: The buffer to copy into.
:param nbytes: (optional) The maximum number of bytes to read into the
buffer. If not present, defaults to the size of the buffer. If
larger than the size of the buffer, is reduced to the size of the
buffer.
:param flags: (optional) The only supported flag is ``MSG_PEEK``,
all other flags are ignored.
:return: The number of bytes read into the buffer.
"""
if nbytes is None:
nbytes = len(buffer)
else:
nbytes = min(nbytes, len(buffer))
# We need to create a temporary buffer. This is annoying, it would be
# better if we could pass memoryviews straight into the SSL_read call,
# but right now we can't. Revisit this if CFFI gets that ability.
buf = _no_zero_allocator("char[]", nbytes)
if flags is not None and flags & socket.MSG_PEEK:
result = _lib.SSL_peek(self._ssl, buf, nbytes)
else:
result = _lib.SSL_read(self._ssl, buf, nbytes)
self._raise_ssl_error(self._ssl, result)
# This strange line is all to avoid a memory copy. The buffer protocol
# should allow us to assign a CFFI buffer to the LHS of this line, but
# on CPython 3.3+ that segfaults. As a workaround, we can temporarily
# wrap it in a memoryview.
buffer[:result] = memoryview(_ffi.buffer(buf, result))
return result
|
[
"def",
"recv_into",
"(",
"self",
",",
"buffer",
",",
"nbytes",
"=",
"None",
",",
"flags",
"=",
"None",
")",
":",
"if",
"nbytes",
"is",
"None",
":",
"nbytes",
"=",
"len",
"(",
"buffer",
")",
"else",
":",
"nbytes",
"=",
"min",
"(",
"nbytes",
",",
"len",
"(",
"buffer",
")",
")",
"# We need to create a temporary buffer. This is annoying, it would be",
"# better if we could pass memoryviews straight into the SSL_read call,",
"# but right now we can't. Revisit this if CFFI gets that ability.",
"buf",
"=",
"_no_zero_allocator",
"(",
"\"char[]\"",
",",
"nbytes",
")",
"if",
"flags",
"is",
"not",
"None",
"and",
"flags",
"&",
"socket",
".",
"MSG_PEEK",
":",
"result",
"=",
"_lib",
".",
"SSL_peek",
"(",
"self",
".",
"_ssl",
",",
"buf",
",",
"nbytes",
")",
"else",
":",
"result",
"=",
"_lib",
".",
"SSL_read",
"(",
"self",
".",
"_ssl",
",",
"buf",
",",
"nbytes",
")",
"self",
".",
"_raise_ssl_error",
"(",
"self",
".",
"_ssl",
",",
"result",
")",
"# This strange line is all to avoid a memory copy. The buffer protocol",
"# should allow us to assign a CFFI buffer to the LHS of this line, but",
"# on CPython 3.3+ that segfaults. As a workaround, we can temporarily",
"# wrap it in a memoryview.",
"buffer",
"[",
":",
"result",
"]",
"=",
"memoryview",
"(",
"_ffi",
".",
"buffer",
"(",
"buf",
",",
"result",
")",
")",
"return",
"result"
] | 45.722222 | 23 |
def temp_files(self):
"""Return a list of the temporary files produced by this link.
This returns all files that were explicitly marked for removal.
"""
ret_list = []
for key, val in self.file_dict.items():
# For temp files we only want files that were marked for removal
if val & FileFlags.rm_mask:
ret_list.append(key)
return ret_list
|
[
"def",
"temp_files",
"(",
"self",
")",
":",
"ret_list",
"=",
"[",
"]",
"for",
"key",
",",
"val",
"in",
"self",
".",
"file_dict",
".",
"items",
"(",
")",
":",
"# For temp files we only want files that were marked for removal",
"if",
"val",
"&",
"FileFlags",
".",
"rm_mask",
":",
"ret_list",
".",
"append",
"(",
"key",
")",
"return",
"ret_list"
] | 37.727273 | 15.818182 |
def compare_version(a, b): # Ignore PyDocStyleBear
"""Compare two version strings.
:param a: str
:param b: str
:return: -1 / 0 / 1
"""
def _range(q):
"""Convert a version string to array of integers.
"1.2.3" -> [1, 2, 3]
:param q: str
:return: List[int]
"""
r = []
for n in q.replace("-", ".").split("."):
try:
r.append(int(n))
except ValueError:
# sort rc*, alpha, beta etc. lower than their non-annotated counterparts
r.append(-1)
return r
def _append_zeros(x, num_zeros):
"""Append `num_zeros` zeros to a copy of `x` and return it.
:param x: List[int]
:param num_zeros: int
:return: List[int]
"""
nx = list(x)
for _ in range(num_zeros):
nx.append(0)
return nx
def _cardinal(x, y):
"""Make both input lists be of same cardinality.
:param x: List[int]
:param y: List[int]
:return: List[int]
"""
lx, ly = len(x), len(y)
if lx == ly:
return x, y
elif lx > ly:
return x, _append_zeros(y, lx - ly)
else:
return _append_zeros(x, ly - lx), y
left, right = _cardinal(_range(a), _range(b))
return (left > right) - (left < right)
|
[
"def",
"compare_version",
"(",
"a",
",",
"b",
")",
":",
"# Ignore PyDocStyleBear",
"def",
"_range",
"(",
"q",
")",
":",
"\"\"\"Convert a version string to array of integers.\n\n \"1.2.3\" -> [1, 2, 3]\n\n :param q: str\n :return: List[int]\n \"\"\"",
"r",
"=",
"[",
"]",
"for",
"n",
"in",
"q",
".",
"replace",
"(",
"\"-\"",
",",
"\".\"",
")",
".",
"split",
"(",
"\".\"",
")",
":",
"try",
":",
"r",
".",
"append",
"(",
"int",
"(",
"n",
")",
")",
"except",
"ValueError",
":",
"# sort rc*, alpha, beta etc. lower than their non-annotated counterparts",
"r",
".",
"append",
"(",
"-",
"1",
")",
"return",
"r",
"def",
"_append_zeros",
"(",
"x",
",",
"num_zeros",
")",
":",
"\"\"\"Append `num_zeros` zeros to a copy of `x` and return it.\n\n :param x: List[int]\n :param num_zeros: int\n :return: List[int]\n \"\"\"",
"nx",
"=",
"list",
"(",
"x",
")",
"for",
"_",
"in",
"range",
"(",
"num_zeros",
")",
":",
"nx",
".",
"append",
"(",
"0",
")",
"return",
"nx",
"def",
"_cardinal",
"(",
"x",
",",
"y",
")",
":",
"\"\"\"Make both input lists be of same cardinality.\n\n :param x: List[int]\n :param y: List[int]\n :return: List[int]\n \"\"\"",
"lx",
",",
"ly",
"=",
"len",
"(",
"x",
")",
",",
"len",
"(",
"y",
")",
"if",
"lx",
"==",
"ly",
":",
"return",
"x",
",",
"y",
"elif",
"lx",
">",
"ly",
":",
"return",
"x",
",",
"_append_zeros",
"(",
"y",
",",
"lx",
"-",
"ly",
")",
"else",
":",
"return",
"_append_zeros",
"(",
"x",
",",
"ly",
"-",
"lx",
")",
",",
"y",
"left",
",",
"right",
"=",
"_cardinal",
"(",
"_range",
"(",
"a",
")",
",",
"_range",
"(",
"b",
")",
")",
"return",
"(",
"left",
">",
"right",
")",
"-",
"(",
"left",
"<",
"right",
")"
] | 24.327273 | 17.836364 |
def set_snap_server_variables(host, port, snap_extension='.xml', path=None):
""" Change dynamically port and host variable in xml Snap! project file"""
localdir = os.getcwd()
if path is None:
os.chdir(os.path.dirname(os.path.realpath(__file__)))
else:
os.chdir(path)
xml_files = [f for f in os.listdir('.') if f.endswith(snap_extension)]
for filename in xml_files:
with open(filename, 'r') as xf:
xml = xf.read()
# Change host variable
xml = re.sub(r'''<variable name="host"><l>[\s\S]*?<\/l><\/variable>''',
'''<variable name="host"><l>{}</l></variable>'''.format(host), xml)
# Change host argument of "set $robot host"
xml = re.sub(r'''<custom-block s="set \$robot host to \%s"><l>[\s\S]*?<\/l>''',
'''<custom-block s="set $robot host to %s"><l>{}</l>'''.format(host), xml)
# Change port variable
xml = re.sub(r'''<variable name="port"><l>[\s\S]*?<\/l><\/variable>''',
'''<variable name="port"><l>{}</l></variable>'''.format(port), xml)
with open(filename, 'w') as xf:
xf.write(xml)
os.chdir(localdir)
|
[
"def",
"set_snap_server_variables",
"(",
"host",
",",
"port",
",",
"snap_extension",
"=",
"'.xml'",
",",
"path",
"=",
"None",
")",
":",
"localdir",
"=",
"os",
".",
"getcwd",
"(",
")",
"if",
"path",
"is",
"None",
":",
"os",
".",
"chdir",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"__file__",
")",
")",
")",
"else",
":",
"os",
".",
"chdir",
"(",
"path",
")",
"xml_files",
"=",
"[",
"f",
"for",
"f",
"in",
"os",
".",
"listdir",
"(",
"'.'",
")",
"if",
"f",
".",
"endswith",
"(",
"snap_extension",
")",
"]",
"for",
"filename",
"in",
"xml_files",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"xf",
":",
"xml",
"=",
"xf",
".",
"read",
"(",
")",
"# Change host variable",
"xml",
"=",
"re",
".",
"sub",
"(",
"r'''<variable name=\"host\"><l>[\\s\\S]*?<\\/l><\\/variable>'''",
",",
"'''<variable name=\"host\"><l>{}</l></variable>'''",
".",
"format",
"(",
"host",
")",
",",
"xml",
")",
"# Change host argument of \"set $robot host\"",
"xml",
"=",
"re",
".",
"sub",
"(",
"r'''<custom-block s=\"set \\$robot host to \\%s\"><l>[\\s\\S]*?<\\/l>'''",
",",
"'''<custom-block s=\"set $robot host to %s\"><l>{}</l>'''",
".",
"format",
"(",
"host",
")",
",",
"xml",
")",
"# Change port variable",
"xml",
"=",
"re",
".",
"sub",
"(",
"r'''<variable name=\"port\"><l>[\\s\\S]*?<\\/l><\\/variable>'''",
",",
"'''<variable name=\"port\"><l>{}</l></variable>'''",
".",
"format",
"(",
"port",
")",
",",
"xml",
")",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"xf",
":",
"xf",
".",
"write",
"(",
"xml",
")",
"os",
".",
"chdir",
"(",
"localdir",
")"
] | 47 | 24.76 |
def _mainthread_poll_readable(self):
"""Searches for readable client sockets. These sockets are then put in a subthread
to be handled by _handle_readable
"""
events = self._recv_selector.select(self.block_time)
for key, mask in events:
if mask == selectors.EVENT_READ:
self._recv_selector.unregister(key.fileobj)
self._threads_limiter.start_thread(target=self._subthread_handle_readable,
args=(key.fileobj,))
|
[
"def",
"_mainthread_poll_readable",
"(",
"self",
")",
":",
"events",
"=",
"self",
".",
"_recv_selector",
".",
"select",
"(",
"self",
".",
"block_time",
")",
"for",
"key",
",",
"mask",
"in",
"events",
":",
"if",
"mask",
"==",
"selectors",
".",
"EVENT_READ",
":",
"self",
".",
"_recv_selector",
".",
"unregister",
"(",
"key",
".",
"fileobj",
")",
"self",
".",
"_threads_limiter",
".",
"start_thread",
"(",
"target",
"=",
"self",
".",
"_subthread_handle_readable",
",",
"args",
"=",
"(",
"key",
".",
"fileobj",
",",
")",
")"
] | 48.545455 | 16.090909 |
def stop(id):
"""
Stop a running job.
"""
try:
experiment = ExperimentClient().get(normalize_job_name(id))
except FloydException:
experiment = ExperimentClient().get(id)
if experiment.state not in ["queued", "queue_scheduled", "running"]:
floyd_logger.info("Job in {} state cannot be stopped".format(experiment.state))
sys.exit(1)
if not ExperimentClient().stop(experiment.id):
floyd_logger.error("Failed to stop job")
sys.exit(1)
floyd_logger.info("Experiment shutdown request submitted. Check status to confirm shutdown")
|
[
"def",
"stop",
"(",
"id",
")",
":",
"try",
":",
"experiment",
"=",
"ExperimentClient",
"(",
")",
".",
"get",
"(",
"normalize_job_name",
"(",
"id",
")",
")",
"except",
"FloydException",
":",
"experiment",
"=",
"ExperimentClient",
"(",
")",
".",
"get",
"(",
"id",
")",
"if",
"experiment",
".",
"state",
"not",
"in",
"[",
"\"queued\"",
",",
"\"queue_scheduled\"",
",",
"\"running\"",
"]",
":",
"floyd_logger",
".",
"info",
"(",
"\"Job in {} state cannot be stopped\"",
".",
"format",
"(",
"experiment",
".",
"state",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"not",
"ExperimentClient",
"(",
")",
".",
"stop",
"(",
"experiment",
".",
"id",
")",
":",
"floyd_logger",
".",
"error",
"(",
"\"Failed to stop job\"",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"floyd_logger",
".",
"info",
"(",
"\"Experiment shutdown request submitted. Check status to confirm shutdown\"",
")"
] | 32.722222 | 24.388889 |
def police_priority_map_exceed_map_pri2_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, "name")
name_key.text = kwargs.pop('name')
exceed = ET.SubElement(police_priority_map, "exceed")
map_pri2_exceed = ET.SubElement(exceed, "map-pri2-exceed")
map_pri2_exceed.text = kwargs.pop('map_pri2_exceed')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
[
"def",
"police_priority_map_exceed_map_pri2_exceed",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"config",
"=",
"ET",
".",
"Element",
"(",
"\"config\"",
")",
"police_priority_map",
"=",
"ET",
".",
"SubElement",
"(",
"config",
",",
"\"police-priority-map\"",
",",
"xmlns",
"=",
"\"urn:brocade.com:mgmt:brocade-policer\"",
")",
"name_key",
"=",
"ET",
".",
"SubElement",
"(",
"police_priority_map",
",",
"\"name\"",
")",
"name_key",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'name'",
")",
"exceed",
"=",
"ET",
".",
"SubElement",
"(",
"police_priority_map",
",",
"\"exceed\"",
")",
"map_pri2_exceed",
"=",
"ET",
".",
"SubElement",
"(",
"exceed",
",",
"\"map-pri2-exceed\"",
")",
"map_pri2_exceed",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'map_pri2_exceed'",
")",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
",",
"self",
".",
"_callback",
")",
"return",
"callback",
"(",
"config",
")"
] | 49.153846 | 20.153846 |
def plot(self, axis, title=None, saved=False):
"""
Plots the planar average electrostatic potential against the Long range and short range models from Freysoldt
"""
x = self.metadata['pot_plot_data'][axis]['x']
v_R = self.metadata['pot_plot_data'][axis]['Vr']
dft_diff = self.metadata['pot_plot_data'][axis]['dft_diff']
final_shift = self.metadata['pot_plot_data'][axis]['final_shift']
check = self.metadata['pot_plot_data'][axis]['check']
plt.figure()
plt.clf()
plt.plot(x, v_R, c="green", zorder=1, label="long range from model")
plt.plot(x, dft_diff, c="red", label="DFT locpot diff")
plt.plot(x, final_shift, c="blue", label="short range (aligned)")
tmpx = [x[i] for i in range(check[0], check[1])]
plt.fill_between(tmpx, -100, 100, facecolor="red", alpha=0.15, label="sampling region")
plt.xlim(round(x[0]), round(x[-1]))
ymin = min(min(v_R), min(dft_diff), min(final_shift))
ymax = max(max(v_R), max(dft_diff), max(final_shift))
plt.ylim(-0.2 + ymin, 0.2 + ymax)
plt.xlabel("distance along axis ($\AA$)", fontsize=15)
plt.ylabel("Potential (V)", fontsize=15)
plt.legend(loc=9)
plt.axhline(y=0, linewidth=0.2, color="black")
plt.title(str(title) + " defect potential", fontsize=18)
plt.xlim(0, max(x))
if saved:
plt.savefig(str(title) + "FreyplnravgPlot.pdf")
else:
return plt
|
[
"def",
"plot",
"(",
"self",
",",
"axis",
",",
"title",
"=",
"None",
",",
"saved",
"=",
"False",
")",
":",
"x",
"=",
"self",
".",
"metadata",
"[",
"'pot_plot_data'",
"]",
"[",
"axis",
"]",
"[",
"'x'",
"]",
"v_R",
"=",
"self",
".",
"metadata",
"[",
"'pot_plot_data'",
"]",
"[",
"axis",
"]",
"[",
"'Vr'",
"]",
"dft_diff",
"=",
"self",
".",
"metadata",
"[",
"'pot_plot_data'",
"]",
"[",
"axis",
"]",
"[",
"'dft_diff'",
"]",
"final_shift",
"=",
"self",
".",
"metadata",
"[",
"'pot_plot_data'",
"]",
"[",
"axis",
"]",
"[",
"'final_shift'",
"]",
"check",
"=",
"self",
".",
"metadata",
"[",
"'pot_plot_data'",
"]",
"[",
"axis",
"]",
"[",
"'check'",
"]",
"plt",
".",
"figure",
"(",
")",
"plt",
".",
"clf",
"(",
")",
"plt",
".",
"plot",
"(",
"x",
",",
"v_R",
",",
"c",
"=",
"\"green\"",
",",
"zorder",
"=",
"1",
",",
"label",
"=",
"\"long range from model\"",
")",
"plt",
".",
"plot",
"(",
"x",
",",
"dft_diff",
",",
"c",
"=",
"\"red\"",
",",
"label",
"=",
"\"DFT locpot diff\"",
")",
"plt",
".",
"plot",
"(",
"x",
",",
"final_shift",
",",
"c",
"=",
"\"blue\"",
",",
"label",
"=",
"\"short range (aligned)\"",
")",
"tmpx",
"=",
"[",
"x",
"[",
"i",
"]",
"for",
"i",
"in",
"range",
"(",
"check",
"[",
"0",
"]",
",",
"check",
"[",
"1",
"]",
")",
"]",
"plt",
".",
"fill_between",
"(",
"tmpx",
",",
"-",
"100",
",",
"100",
",",
"facecolor",
"=",
"\"red\"",
",",
"alpha",
"=",
"0.15",
",",
"label",
"=",
"\"sampling region\"",
")",
"plt",
".",
"xlim",
"(",
"round",
"(",
"x",
"[",
"0",
"]",
")",
",",
"round",
"(",
"x",
"[",
"-",
"1",
"]",
")",
")",
"ymin",
"=",
"min",
"(",
"min",
"(",
"v_R",
")",
",",
"min",
"(",
"dft_diff",
")",
",",
"min",
"(",
"final_shift",
")",
")",
"ymax",
"=",
"max",
"(",
"max",
"(",
"v_R",
")",
",",
"max",
"(",
"dft_diff",
")",
",",
"max",
"(",
"final_shift",
")",
")",
"plt",
".",
"ylim",
"(",
"-",
"0.2",
"+",
"ymin",
",",
"0.2",
"+",
"ymax",
")",
"plt",
".",
"xlabel",
"(",
"\"distance along axis ($\\AA$)\"",
",",
"fontsize",
"=",
"15",
")",
"plt",
".",
"ylabel",
"(",
"\"Potential (V)\"",
",",
"fontsize",
"=",
"15",
")",
"plt",
".",
"legend",
"(",
"loc",
"=",
"9",
")",
"plt",
".",
"axhline",
"(",
"y",
"=",
"0",
",",
"linewidth",
"=",
"0.2",
",",
"color",
"=",
"\"black\"",
")",
"plt",
".",
"title",
"(",
"str",
"(",
"title",
")",
"+",
"\" defect potential\"",
",",
"fontsize",
"=",
"18",
")",
"plt",
".",
"xlim",
"(",
"0",
",",
"max",
"(",
"x",
")",
")",
"if",
"saved",
":",
"plt",
".",
"savefig",
"(",
"str",
"(",
"title",
")",
"+",
"\"FreyplnravgPlot.pdf\"",
")",
"else",
":",
"return",
"plt"
] | 42.628571 | 23.657143 |
def encode_int(n):
"""
Encodes an int as a variable length signed 29-bit integer as defined by
the spec.
@param n: The integer to be encoded
@return: The encoded string
@rtype: C{str}
@raise OverflowError: Out of range.
"""
global ENCODED_INT_CACHE
try:
return ENCODED_INT_CACHE[n]
except KeyError:
pass
if n < MIN_29B_INT or n > MAX_29B_INT:
raise OverflowError("Out of range")
if n < 0:
n += 0x20000000
bytes = ''
real_value = None
if n > 0x1fffff:
real_value = n
n >>= 1
bytes += chr(0x80 | ((n >> 21) & 0xff))
if n > 0x3fff:
bytes += chr(0x80 | ((n >> 14) & 0xff))
if n > 0x7f:
bytes += chr(0x80 | ((n >> 7) & 0xff))
if real_value is not None:
n = real_value
if n > 0x1fffff:
bytes += chr(n & 0xff)
else:
bytes += chr(n & 0x7f)
ENCODED_INT_CACHE[n] = bytes
return bytes
|
[
"def",
"encode_int",
"(",
"n",
")",
":",
"global",
"ENCODED_INT_CACHE",
"try",
":",
"return",
"ENCODED_INT_CACHE",
"[",
"n",
"]",
"except",
"KeyError",
":",
"pass",
"if",
"n",
"<",
"MIN_29B_INT",
"or",
"n",
">",
"MAX_29B_INT",
":",
"raise",
"OverflowError",
"(",
"\"Out of range\"",
")",
"if",
"n",
"<",
"0",
":",
"n",
"+=",
"0x20000000",
"bytes",
"=",
"''",
"real_value",
"=",
"None",
"if",
"n",
">",
"0x1fffff",
":",
"real_value",
"=",
"n",
"n",
">>=",
"1",
"bytes",
"+=",
"chr",
"(",
"0x80",
"|",
"(",
"(",
"n",
">>",
"21",
")",
"&",
"0xff",
")",
")",
"if",
"n",
">",
"0x3fff",
":",
"bytes",
"+=",
"chr",
"(",
"0x80",
"|",
"(",
"(",
"n",
">>",
"14",
")",
"&",
"0xff",
")",
")",
"if",
"n",
">",
"0x7f",
":",
"bytes",
"+=",
"chr",
"(",
"0x80",
"|",
"(",
"(",
"n",
">>",
"7",
")",
"&",
"0xff",
")",
")",
"if",
"real_value",
"is",
"not",
"None",
":",
"n",
"=",
"real_value",
"if",
"n",
">",
"0x1fffff",
":",
"bytes",
"+=",
"chr",
"(",
"n",
"&",
"0xff",
")",
"else",
":",
"bytes",
"+=",
"chr",
"(",
"n",
"&",
"0x7f",
")",
"ENCODED_INT_CACHE",
"[",
"n",
"]",
"=",
"bytes",
"return",
"bytes"
] | 19.291667 | 21.833333 |
def flush_stdout(self):
"""
Empties the standard out redirect buffer and renders the
contents to the body as a preformatted text box.
"""
try:
contents = self.stdout_interceptor.flush_all()
except Exception:
return
if len(contents) > 0:
self.body.append(render_texts.preformatted_text(contents))
self._last_update_time = time.time()
return contents
|
[
"def",
"flush_stdout",
"(",
"self",
")",
":",
"try",
":",
"contents",
"=",
"self",
".",
"stdout_interceptor",
".",
"flush_all",
"(",
")",
"except",
"Exception",
":",
"return",
"if",
"len",
"(",
"contents",
")",
">",
"0",
":",
"self",
".",
"body",
".",
"append",
"(",
"render_texts",
".",
"preformatted_text",
"(",
"contents",
")",
")",
"self",
".",
"_last_update_time",
"=",
"time",
".",
"time",
"(",
")",
"return",
"contents"
] | 29.866667 | 19.066667 |
def lookup(img, **kwargs):
"""Assign values to channels based on a table."""
luts = np.array(kwargs['luts'], dtype=np.float32) / 255.0
def func(band_data, luts=luts, index=-1):
# NaN/null values will become 0
lut = luts[:, index] if len(luts.shape) == 2 else luts
band_data = band_data.clip(0, lut.size - 1).astype(np.uint8)
new_delay = dask.delayed(_lookup_delayed)(lut, band_data)
new_data = da.from_delayed(new_delay, shape=band_data.shape,
dtype=luts.dtype)
return new_data
return apply_enhancement(img.data, func, separate=True, pass_dask=True)
|
[
"def",
"lookup",
"(",
"img",
",",
"*",
"*",
"kwargs",
")",
":",
"luts",
"=",
"np",
".",
"array",
"(",
"kwargs",
"[",
"'luts'",
"]",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
"/",
"255.0",
"def",
"func",
"(",
"band_data",
",",
"luts",
"=",
"luts",
",",
"index",
"=",
"-",
"1",
")",
":",
"# NaN/null values will become 0",
"lut",
"=",
"luts",
"[",
":",
",",
"index",
"]",
"if",
"len",
"(",
"luts",
".",
"shape",
")",
"==",
"2",
"else",
"luts",
"band_data",
"=",
"band_data",
".",
"clip",
"(",
"0",
",",
"lut",
".",
"size",
"-",
"1",
")",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
"new_delay",
"=",
"dask",
".",
"delayed",
"(",
"_lookup_delayed",
")",
"(",
"lut",
",",
"band_data",
")",
"new_data",
"=",
"da",
".",
"from_delayed",
"(",
"new_delay",
",",
"shape",
"=",
"band_data",
".",
"shape",
",",
"dtype",
"=",
"luts",
".",
"dtype",
")",
"return",
"new_data",
"return",
"apply_enhancement",
"(",
"img",
".",
"data",
",",
"func",
",",
"separate",
"=",
"True",
",",
"pass_dask",
"=",
"True",
")"
] | 42.466667 | 21.866667 |
def p_arr_decl_initialized(p):
""" var_arr_decl : DIM idlist LP bound_list RP typedef RIGHTARROW const_vector
| DIM idlist LP bound_list RP typedef EQ const_vector
"""
def check_bound(boundlist, remaining):
""" Checks if constant vector bounds matches the array one
"""
lineno = p.lineno(8)
if not boundlist: # Returns on empty list
if not isinstance(remaining, list):
return True # It's OK :-)
syntax_error(lineno, 'Unexpected extra vector dimensions. It should be %i' % len(remaining))
if not isinstance(remaining, list):
syntax_error(lineno, 'Mismatched vector size. Missing %i extra dimension(s)' % len(boundlist))
return False
if len(remaining) != boundlist[0].count:
syntax_error(lineno, 'Mismatched vector size. Expected %i elements, got %i.' % (boundlist[0].count,
len(remaining)))
return False # It's wrong. :-(
for row in remaining:
if not check_bound(boundlist[1:], row):
return False
return True
if p[8] is None:
p[0] = None
return
if check_bound(p[4].children, p[8]):
id_, lineno = p[2][0]
SYMBOL_TABLE.declare_array(id_, lineno, p[6], p[4], default_value=p[8])
p[0] = None
|
[
"def",
"p_arr_decl_initialized",
"(",
"p",
")",
":",
"def",
"check_bound",
"(",
"boundlist",
",",
"remaining",
")",
":",
"\"\"\" Checks if constant vector bounds matches the array one\n \"\"\"",
"lineno",
"=",
"p",
".",
"lineno",
"(",
"8",
")",
"if",
"not",
"boundlist",
":",
"# Returns on empty list",
"if",
"not",
"isinstance",
"(",
"remaining",
",",
"list",
")",
":",
"return",
"True",
"# It's OK :-)",
"syntax_error",
"(",
"lineno",
",",
"'Unexpected extra vector dimensions. It should be %i'",
"%",
"len",
"(",
"remaining",
")",
")",
"if",
"not",
"isinstance",
"(",
"remaining",
",",
"list",
")",
":",
"syntax_error",
"(",
"lineno",
",",
"'Mismatched vector size. Missing %i extra dimension(s)'",
"%",
"len",
"(",
"boundlist",
")",
")",
"return",
"False",
"if",
"len",
"(",
"remaining",
")",
"!=",
"boundlist",
"[",
"0",
"]",
".",
"count",
":",
"syntax_error",
"(",
"lineno",
",",
"'Mismatched vector size. Expected %i elements, got %i.'",
"%",
"(",
"boundlist",
"[",
"0",
"]",
".",
"count",
",",
"len",
"(",
"remaining",
")",
")",
")",
"return",
"False",
"# It's wrong. :-(",
"for",
"row",
"in",
"remaining",
":",
"if",
"not",
"check_bound",
"(",
"boundlist",
"[",
"1",
":",
"]",
",",
"row",
")",
":",
"return",
"False",
"return",
"True",
"if",
"p",
"[",
"8",
"]",
"is",
"None",
":",
"p",
"[",
"0",
"]",
"=",
"None",
"return",
"if",
"check_bound",
"(",
"p",
"[",
"4",
"]",
".",
"children",
",",
"p",
"[",
"8",
"]",
")",
":",
"id_",
",",
"lineno",
"=",
"p",
"[",
"2",
"]",
"[",
"0",
"]",
"SYMBOL_TABLE",
".",
"declare_array",
"(",
"id_",
",",
"lineno",
",",
"p",
"[",
"6",
"]",
",",
"p",
"[",
"4",
"]",
",",
"default_value",
"=",
"p",
"[",
"8",
"]",
")",
"p",
"[",
"0",
"]",
"=",
"None"
] | 36.128205 | 23.923077 |
def _make_in_prompt(self, number):
""" Given a prompt number, returns an HTML In prompt.
"""
try:
body = self.in_prompt % number
except TypeError:
# allow in_prompt to leave out number, e.g. '>>> '
body = self.in_prompt
return '<span class="in-prompt">%s</span>' % body
|
[
"def",
"_make_in_prompt",
"(",
"self",
",",
"number",
")",
":",
"try",
":",
"body",
"=",
"self",
".",
"in_prompt",
"%",
"number",
"except",
"TypeError",
":",
"# allow in_prompt to leave out number, e.g. '>>> '",
"body",
"=",
"self",
".",
"in_prompt",
"return",
"'<span class=\"in-prompt\">%s</span>'",
"%",
"body"
] | 37.444444 | 10.777778 |
def get_timestamp(str_len=13):
""" get timestamp string, length can only between 0 and 16
"""
if isinstance(str_len, integer_types) and 0 < str_len < 17:
return builtin_str(time.time()).replace(".", "")[:str_len]
raise ParamsError("timestamp length can only between 0 and 16.")
|
[
"def",
"get_timestamp",
"(",
"str_len",
"=",
"13",
")",
":",
"if",
"isinstance",
"(",
"str_len",
",",
"integer_types",
")",
"and",
"0",
"<",
"str_len",
"<",
"17",
":",
"return",
"builtin_str",
"(",
"time",
".",
"time",
"(",
")",
")",
".",
"replace",
"(",
"\".\"",
",",
"\"\"",
")",
"[",
":",
"str_len",
"]",
"raise",
"ParamsError",
"(",
"\"timestamp length can only between 0 and 16.\"",
")"
] | 42.285714 | 18.142857 |
def create_info_endpoint(self, name, data):
"""Create an endpoint to serve info GET requests."""
# make sure data is serializable
data = make_serializable(data)
# create generic restful resource to serve static JSON data
class InfoBase(Resource):
@staticmethod
def get():
return data
def info_factory(name):
"""Return an Info derivative resource."""
class NewClass(InfoBase):
pass
NewClass.__name__ = "{}_{}".format(name, InfoBase.__name__)
return NewClass
path = '/info/{}'.format(name)
self.api.add_resource(info_factory(name), path)
logger.info('Regestered informational resource to {} (available via GET)'.format(path))
logger.debug('Endpoint {} will now serve the following static data:\n{}'.format(path, data))
|
[
"def",
"create_info_endpoint",
"(",
"self",
",",
"name",
",",
"data",
")",
":",
"# make sure data is serializable",
"data",
"=",
"make_serializable",
"(",
"data",
")",
"# create generic restful resource to serve static JSON data",
"class",
"InfoBase",
"(",
"Resource",
")",
":",
"@",
"staticmethod",
"def",
"get",
"(",
")",
":",
"return",
"data",
"def",
"info_factory",
"(",
"name",
")",
":",
"\"\"\"Return an Info derivative resource.\"\"\"",
"class",
"NewClass",
"(",
"InfoBase",
")",
":",
"pass",
"NewClass",
".",
"__name__",
"=",
"\"{}_{}\"",
".",
"format",
"(",
"name",
",",
"InfoBase",
".",
"__name__",
")",
"return",
"NewClass",
"path",
"=",
"'/info/{}'",
".",
"format",
"(",
"name",
")",
"self",
".",
"api",
".",
"add_resource",
"(",
"info_factory",
"(",
"name",
")",
",",
"path",
")",
"logger",
".",
"info",
"(",
"'Regestered informational resource to {} (available via GET)'",
".",
"format",
"(",
"path",
")",
")",
"logger",
".",
"debug",
"(",
"'Endpoint {} will now serve the following static data:\\n{}'",
".",
"format",
"(",
"path",
",",
"data",
")",
")"
] | 40.090909 | 18.772727 |
def get_resource(self, resource_key, **variables):
"""Get a resource.
Attempts to get and return a cached version of the resource if
available, otherwise a new resource object is created and returned.
Args:
resource_key (`str`): Name of the type of `Resources` to find
variables: data to identify / store on the resource
Returns:
`PackageRepositoryResource` instance.
"""
handle = self.make_resource_handle(resource_key, **variables)
return self.get_resource_from_handle(handle, verify_repo=False)
|
[
"def",
"get_resource",
"(",
"self",
",",
"resource_key",
",",
"*",
"*",
"variables",
")",
":",
"handle",
"=",
"self",
".",
"make_resource_handle",
"(",
"resource_key",
",",
"*",
"*",
"variables",
")",
"return",
"self",
".",
"get_resource_from_handle",
"(",
"handle",
",",
"verify_repo",
"=",
"False",
")"
] | 39.133333 | 24.8 |
def set_dead_lock(self, deadLock):
"""
Set the dead lock time.
:Parameters:
#. deadLock (number): The time delay judging if the lock was left out mistakenly
after a system crash or other unexpected reasons. Normally Locker is stable
and takes care of not leaving any locking file hanging even it crashes or it
is forced to stop by a user signal.
"""
try:
deadLock = float(deadLock)
assert deadLock>=0
except:
raise Exception('deadLock must be a positive number')
self.__deadLock = deadLock
|
[
"def",
"set_dead_lock",
"(",
"self",
",",
"deadLock",
")",
":",
"try",
":",
"deadLock",
"=",
"float",
"(",
"deadLock",
")",
"assert",
"deadLock",
">=",
"0",
"except",
":",
"raise",
"Exception",
"(",
"'deadLock must be a positive number'",
")",
"self",
".",
"__deadLock",
"=",
"deadLock"
] | 39 | 20.875 |
def as_colr(self, label_args=None, value_args=None):
""" Like __str__, except it returns a colorized Colr instance. """
label_args = label_args or {'fore': 'red'}
value_args = value_args or {'fore': 'blue', 'style': 'bright'}
return Colr(self.default_format.format(
label=Colr(self.label, **label_args),
value=Colr(repr(self.value), **value_args),
))
|
[
"def",
"as_colr",
"(",
"self",
",",
"label_args",
"=",
"None",
",",
"value_args",
"=",
"None",
")",
":",
"label_args",
"=",
"label_args",
"or",
"{",
"'fore'",
":",
"'red'",
"}",
"value_args",
"=",
"value_args",
"or",
"{",
"'fore'",
":",
"'blue'",
",",
"'style'",
":",
"'bright'",
"}",
"return",
"Colr",
"(",
"self",
".",
"default_format",
".",
"format",
"(",
"label",
"=",
"Colr",
"(",
"self",
".",
"label",
",",
"*",
"*",
"label_args",
")",
",",
"value",
"=",
"Colr",
"(",
"repr",
"(",
"self",
".",
"value",
")",
",",
"*",
"*",
"value_args",
")",
",",
")",
")"
] | 50.875 | 14.125 |
def subject(self, value):
"""The subject of the email(s)
:param value: The subject of the email(s)
:type value: Subject, string
"""
if isinstance(value, Subject):
if value.personalization is not None:
try:
personalization = \
self._personalizations[value.personalization]
has_internal_personalization = True
except IndexError:
personalization = Personalization()
has_internal_personalization = False
personalization.subject = value.subject
if not has_internal_personalization:
self.add_personalization(
personalization,
index=value.personalization)
else:
self._subject = value
else:
self._subject = Subject(value)
|
[
"def",
"subject",
"(",
"self",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"Subject",
")",
":",
"if",
"value",
".",
"personalization",
"is",
"not",
"None",
":",
"try",
":",
"personalization",
"=",
"self",
".",
"_personalizations",
"[",
"value",
".",
"personalization",
"]",
"has_internal_personalization",
"=",
"True",
"except",
"IndexError",
":",
"personalization",
"=",
"Personalization",
"(",
")",
"has_internal_personalization",
"=",
"False",
"personalization",
".",
"subject",
"=",
"value",
".",
"subject",
"if",
"not",
"has_internal_personalization",
":",
"self",
".",
"add_personalization",
"(",
"personalization",
",",
"index",
"=",
"value",
".",
"personalization",
")",
"else",
":",
"self",
".",
"_subject",
"=",
"value",
"else",
":",
"self",
".",
"_subject",
"=",
"Subject",
"(",
"value",
")"
] | 37.08 | 12.8 |
def clear(self):
"""Clear all work items from the session.
This removes any associated results as well.
"""
with self._conn:
self._conn.execute('DELETE FROM results')
self._conn.execute('DELETE FROM work_items')
|
[
"def",
"clear",
"(",
"self",
")",
":",
"with",
"self",
".",
"_conn",
":",
"self",
".",
"_conn",
".",
"execute",
"(",
"'DELETE FROM results'",
")",
"self",
".",
"_conn",
".",
"execute",
"(",
"'DELETE FROM work_items'",
")"
] | 32.625 | 15.125 |
def witnesses(ctx):
""" List witnesses and relevant information
"""
t = [
[
"weight",
"account",
"signing_key",
"vote_id",
"url",
"total_missed",
"last_confirmed_block_num",
]
]
for witness in sorted(Witnesses(), key=lambda x: x.weight, reverse=True):
witness.refresh()
t.append(
[
"{:.2f}%".format(witness.weight * 100),
witness.account["name"],
witness["signing_key"],
witness["vote_id"],
witness["url"],
witness["total_missed"],
witness["last_confirmed_block_num"],
]
)
print_table(t)
|
[
"def",
"witnesses",
"(",
"ctx",
")",
":",
"t",
"=",
"[",
"[",
"\"weight\"",
",",
"\"account\"",
",",
"\"signing_key\"",
",",
"\"vote_id\"",
",",
"\"url\"",
",",
"\"total_missed\"",
",",
"\"last_confirmed_block_num\"",
",",
"]",
"]",
"for",
"witness",
"in",
"sorted",
"(",
"Witnesses",
"(",
")",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
".",
"weight",
",",
"reverse",
"=",
"True",
")",
":",
"witness",
".",
"refresh",
"(",
")",
"t",
".",
"append",
"(",
"[",
"\"{:.2f}%\"",
".",
"format",
"(",
"witness",
".",
"weight",
"*",
"100",
")",
",",
"witness",
".",
"account",
"[",
"\"name\"",
"]",
",",
"witness",
"[",
"\"signing_key\"",
"]",
",",
"witness",
"[",
"\"vote_id\"",
"]",
",",
"witness",
"[",
"\"url\"",
"]",
",",
"witness",
"[",
"\"total_missed\"",
"]",
",",
"witness",
"[",
"\"last_confirmed_block_num\"",
"]",
",",
"]",
")",
"print_table",
"(",
"t",
")"
] | 26.571429 | 17.071429 |
def pilcoords(self, (x,y)):
"""
Converts the coordinates (x,y) of the original array or FITS file to the current coordinates of the PIL image,
respecting cropping, rebinning, and upsampling.
This is only used once the PIL image is available, for drawing.
Note that we also have to take care about the different origin conventions here !
For PIL, (0,0) is top left, so the y axis needs to be inverted.
"""
pilx = int((x - 1 - self.xa) * float(self.upsamplefactor) / float(self.binfactor))
pily = int((self.yb - y) * float(self.upsamplefactor) / float(self.binfactor))
return (pilx, pily)
|
[
"def",
"pilcoords",
"(",
"self",
",",
"(",
"x",
",",
"y",
")",
")",
":",
"pilx",
"=",
"int",
"(",
"(",
"x",
"-",
"1",
"-",
"self",
".",
"xa",
")",
"*",
"float",
"(",
"self",
".",
"upsamplefactor",
")",
"/",
"float",
"(",
"self",
".",
"binfactor",
")",
")",
"pily",
"=",
"int",
"(",
"(",
"self",
".",
"yb",
"-",
"y",
")",
"*",
"float",
"(",
"self",
".",
"upsamplefactor",
")",
"/",
"float",
"(",
"self",
".",
"binfactor",
")",
")",
"return",
"(",
"pilx",
",",
"pily",
")"
] | 51.692308 | 30 |
def process_stdin(line):
'''handle commands from user'''
if line is None:
sys.exit(0)
line = line.strip()
if not line:
return
args = shlex.split(line)
cmd = args[0]
if cmd == 'help':
k = command_map.keys()
k.sort()
for cmd in k:
(fn, help) = command_map[cmd]
print("%-15s : %s" % (cmd, help))
return
if cmd == 'exit':
mestate.exit = True
return
if not cmd in command_map:
print("Unknown command '%s'" % line)
return
(fn, help) = command_map[cmd]
try:
fn(args[1:])
except Exception as e:
print("ERROR in command %s: %s" % (args[1:], str(e)))
|
[
"def",
"process_stdin",
"(",
"line",
")",
":",
"if",
"line",
"is",
"None",
":",
"sys",
".",
"exit",
"(",
"0",
")",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"not",
"line",
":",
"return",
"args",
"=",
"shlex",
".",
"split",
"(",
"line",
")",
"cmd",
"=",
"args",
"[",
"0",
"]",
"if",
"cmd",
"==",
"'help'",
":",
"k",
"=",
"command_map",
".",
"keys",
"(",
")",
"k",
".",
"sort",
"(",
")",
"for",
"cmd",
"in",
"k",
":",
"(",
"fn",
",",
"help",
")",
"=",
"command_map",
"[",
"cmd",
"]",
"print",
"(",
"\"%-15s : %s\"",
"%",
"(",
"cmd",
",",
"help",
")",
")",
"return",
"if",
"cmd",
"==",
"'exit'",
":",
"mestate",
".",
"exit",
"=",
"True",
"return",
"if",
"not",
"cmd",
"in",
"command_map",
":",
"print",
"(",
"\"Unknown command '%s'\"",
"%",
"line",
")",
"return",
"(",
"fn",
",",
"help",
")",
"=",
"command_map",
"[",
"cmd",
"]",
"try",
":",
"fn",
"(",
"args",
"[",
"1",
":",
"]",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"\"ERROR in command %s: %s\"",
"%",
"(",
"args",
"[",
"1",
":",
"]",
",",
"str",
"(",
"e",
")",
")",
")"
] | 22.733333 | 19.333333 |
def inject_func_as_property(self, func, method_name=None, class_=None):
"""
WARNING:
properties are more safely injected using metaclasses
References:
http://stackoverflow.com/questions/13850114/dynamically-adding-methods-with-or-without-metaclass-in-python
"""
if method_name is None:
method_name = get_funcname(func)
#new_method = func.__get__(self, self.__class__)
new_property = property(func)
setattr(self.__class__, method_name, new_property)
|
[
"def",
"inject_func_as_property",
"(",
"self",
",",
"func",
",",
"method_name",
"=",
"None",
",",
"class_",
"=",
"None",
")",
":",
"if",
"method_name",
"is",
"None",
":",
"method_name",
"=",
"get_funcname",
"(",
"func",
")",
"#new_method = func.__get__(self, self.__class__)",
"new_property",
"=",
"property",
"(",
"func",
")",
"setattr",
"(",
"self",
".",
"__class__",
",",
"method_name",
",",
"new_property",
")"
] | 37.923077 | 20.384615 |
def get_name(obj, setting_name='LONG_NAME_FORMAT'):
"""
Returns the correct order of the name according to the current language.
"""
nickname = obj.get_nickname()
romanized_first_name = obj.get_romanized_first_name()
romanized_last_name = obj.get_romanized_last_name()
non_romanized_first_name = obj.get_non_romanized_first_name()
non_romanized_last_name = obj.get_non_romanized_last_name()
non_translated_title = obj.get_title()
non_translated_gender = obj.get_gender()
# when the title is blank, gettext returns weird header text. So if this
# occurs, we will pass it on blank without gettext
if non_translated_title:
title = gettext(non_translated_title)
else:
title = non_translated_title
if non_translated_gender:
gender = gettext(non_translated_gender)
else:
gender = non_translated_gender
format_string = u'{}'.format(get_format(setting_name))
format_kwargs = {}
if '{n}' in format_string:
format_kwargs.update({'n': nickname})
if '{N}' in format_string:
format_kwargs.update({'N': nickname.upper()})
if '{f}' in format_string:
format_kwargs.update({'f': romanized_first_name})
if '{F}' in format_string:
format_kwargs.update({'F': romanized_first_name.upper()})
if '{l}' in format_string:
format_kwargs.update({'l': romanized_last_name})
if '{L}' in format_string:
format_kwargs.update({'L': romanized_last_name.upper()})
if '{a}' in format_string:
format_kwargs.update({'a': non_romanized_first_name})
if '{A}' in format_string:
format_kwargs.update({'A': non_romanized_first_name.upper()})
if '{x}' in format_string:
format_kwargs.update({'x': non_romanized_last_name})
if '{X}' in format_string:
format_kwargs.update({'X': non_romanized_last_name.upper()})
if '{t}' in format_string:
format_kwargs.update({'t': title})
if '{T}' in format_string:
format_kwargs.update({'T': title.upper()})
if '{g}' in format_string:
format_kwargs.update({'g': gender})
if '{G}' in format_string:
format_kwargs.update({'G': gender.upper()})
return format_string.format(**format_kwargs)
|
[
"def",
"get_name",
"(",
"obj",
",",
"setting_name",
"=",
"'LONG_NAME_FORMAT'",
")",
":",
"nickname",
"=",
"obj",
".",
"get_nickname",
"(",
")",
"romanized_first_name",
"=",
"obj",
".",
"get_romanized_first_name",
"(",
")",
"romanized_last_name",
"=",
"obj",
".",
"get_romanized_last_name",
"(",
")",
"non_romanized_first_name",
"=",
"obj",
".",
"get_non_romanized_first_name",
"(",
")",
"non_romanized_last_name",
"=",
"obj",
".",
"get_non_romanized_last_name",
"(",
")",
"non_translated_title",
"=",
"obj",
".",
"get_title",
"(",
")",
"non_translated_gender",
"=",
"obj",
".",
"get_gender",
"(",
")",
"# when the title is blank, gettext returns weird header text. So if this",
"# occurs, we will pass it on blank without gettext",
"if",
"non_translated_title",
":",
"title",
"=",
"gettext",
"(",
"non_translated_title",
")",
"else",
":",
"title",
"=",
"non_translated_title",
"if",
"non_translated_gender",
":",
"gender",
"=",
"gettext",
"(",
"non_translated_gender",
")",
"else",
":",
"gender",
"=",
"non_translated_gender",
"format_string",
"=",
"u'{}'",
".",
"format",
"(",
"get_format",
"(",
"setting_name",
")",
")",
"format_kwargs",
"=",
"{",
"}",
"if",
"'{n}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'n'",
":",
"nickname",
"}",
")",
"if",
"'{N}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'N'",
":",
"nickname",
".",
"upper",
"(",
")",
"}",
")",
"if",
"'{f}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'f'",
":",
"romanized_first_name",
"}",
")",
"if",
"'{F}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'F'",
":",
"romanized_first_name",
".",
"upper",
"(",
")",
"}",
")",
"if",
"'{l}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'l'",
":",
"romanized_last_name",
"}",
")",
"if",
"'{L}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'L'",
":",
"romanized_last_name",
".",
"upper",
"(",
")",
"}",
")",
"if",
"'{a}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'a'",
":",
"non_romanized_first_name",
"}",
")",
"if",
"'{A}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'A'",
":",
"non_romanized_first_name",
".",
"upper",
"(",
")",
"}",
")",
"if",
"'{x}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'x'",
":",
"non_romanized_last_name",
"}",
")",
"if",
"'{X}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'X'",
":",
"non_romanized_last_name",
".",
"upper",
"(",
")",
"}",
")",
"if",
"'{t}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'t'",
":",
"title",
"}",
")",
"if",
"'{T}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'T'",
":",
"title",
".",
"upper",
"(",
")",
"}",
")",
"if",
"'{g}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'g'",
":",
"gender",
"}",
")",
"if",
"'{G}'",
"in",
"format_string",
":",
"format_kwargs",
".",
"update",
"(",
"{",
"'G'",
":",
"gender",
".",
"upper",
"(",
")",
"}",
")",
"return",
"format_string",
".",
"format",
"(",
"*",
"*",
"format_kwargs",
")"
] | 40.054545 | 14.927273 |
def get_configs(cls):
"""Get rate limiters configuration
specified at application level
:rtype: dict of configurations
"""
import docido_sdk.config
http_config = docido_sdk.config.get('http') or {}
session_config = http_config.get('session') or {}
rate_limits = session_config.get('rate_limit') or {}
return rate_limits
|
[
"def",
"get_configs",
"(",
"cls",
")",
":",
"import",
"docido_sdk",
".",
"config",
"http_config",
"=",
"docido_sdk",
".",
"config",
".",
"get",
"(",
"'http'",
")",
"or",
"{",
"}",
"session_config",
"=",
"http_config",
".",
"get",
"(",
"'session'",
")",
"or",
"{",
"}",
"rate_limits",
"=",
"session_config",
".",
"get",
"(",
"'rate_limit'",
")",
"or",
"{",
"}",
"return",
"rate_limits"
] | 34.727273 | 12.636364 |
def gray2qimage(gray, normalize = False):
"""Convert the 2D numpy array `gray` into a 8-bit, indexed QImage_
with a gray colormap. The first dimension represents the vertical
image axis.
The parameter `normalize` can be used to normalize an image's
value range to 0..255:
`normalize` = (nmin, nmax):
scale & clip image values from nmin..nmax to 0..255
`normalize` = nmax:
lets nmin default to zero, i.e. scale & clip the range 0..nmax
to 0..255
`normalize` = True:
scale image values to 0..255 (same as passing (gray.min(),
gray.max()))
If the source array `gray` contains masked values, the result will
have only 255 shades of gray, and one color map entry will be used
to make the corresponding pixels transparent.
A full alpha channel cannot be supported with indexed images;
instead, use `array2qimage` to convert into a 32-bit QImage.
:param gray: image data which should be converted (copied) into a QImage_
:type gray: 2D or 3D numpy.ndarray_ or `numpy.ma.array <masked arrays>`_
:param normalize: normalization parameter (see above, default: no value changing)
:type normalize: bool, scalar, or pair
:rtype: QImage_ with RGB32 or ARGB32 format"""
if _np.ndim(gray) != 2:
raise ValueError("gray2QImage can only convert 2D arrays" +
" (try using array2qimage)" if _np.ndim(gray) == 3 else "")
h, w = gray.shape
result = _qt.QImage(w, h, _qt.QImage.Format_Indexed8)
if not _np.ma.is_masked(gray):
for i in range(256):
result.setColor(i, _qt.qRgb(i,i,i))
_qimageview(result)[:] = _normalize255(gray, normalize)
else:
# map gray value 1 to gray value 0, in order to make room for
# transparent colormap entry:
result.setColor(0, _qt.qRgb(0,0,0))
for i in range(2, 256):
result.setColor(i-1, _qt.qRgb(i,i,i))
_qimageview(result)[:] = _normalize255(gray, normalize, clip = (1, 255)) - 1
result.setColor(255, 0)
_qimageview(result)[gray.mask] = 255
return result
|
[
"def",
"gray2qimage",
"(",
"gray",
",",
"normalize",
"=",
"False",
")",
":",
"if",
"_np",
".",
"ndim",
"(",
"gray",
")",
"!=",
"2",
":",
"raise",
"ValueError",
"(",
"\"gray2QImage can only convert 2D arrays\"",
"+",
"\" (try using array2qimage)\"",
"if",
"_np",
".",
"ndim",
"(",
"gray",
")",
"==",
"3",
"else",
"\"\"",
")",
"h",
",",
"w",
"=",
"gray",
".",
"shape",
"result",
"=",
"_qt",
".",
"QImage",
"(",
"w",
",",
"h",
",",
"_qt",
".",
"QImage",
".",
"Format_Indexed8",
")",
"if",
"not",
"_np",
".",
"ma",
".",
"is_masked",
"(",
"gray",
")",
":",
"for",
"i",
"in",
"range",
"(",
"256",
")",
":",
"result",
".",
"setColor",
"(",
"i",
",",
"_qt",
".",
"qRgb",
"(",
"i",
",",
"i",
",",
"i",
")",
")",
"_qimageview",
"(",
"result",
")",
"[",
":",
"]",
"=",
"_normalize255",
"(",
"gray",
",",
"normalize",
")",
"else",
":",
"# map gray value 1 to gray value 0, in order to make room for",
"# transparent colormap entry:",
"result",
".",
"setColor",
"(",
"0",
",",
"_qt",
".",
"qRgb",
"(",
"0",
",",
"0",
",",
"0",
")",
")",
"for",
"i",
"in",
"range",
"(",
"2",
",",
"256",
")",
":",
"result",
".",
"setColor",
"(",
"i",
"-",
"1",
",",
"_qt",
".",
"qRgb",
"(",
"i",
",",
"i",
",",
"i",
")",
")",
"_qimageview",
"(",
"result",
")",
"[",
":",
"]",
"=",
"_normalize255",
"(",
"gray",
",",
"normalize",
",",
"clip",
"=",
"(",
"1",
",",
"255",
")",
")",
"-",
"1",
"result",
".",
"setColor",
"(",
"255",
",",
"0",
")",
"_qimageview",
"(",
"result",
")",
"[",
"gray",
".",
"mask",
"]",
"=",
"255",
"return",
"result"
] | 37.071429 | 24 |
def at_time(self, time, asof=False, axis=None):
"""
Select values at particular time of day (e.g. 9:30AM).
Parameters
----------
time : datetime.time or str
axis : {0 or 'index', 1 or 'columns'}, default 0
.. versionadded:: 0.24.0
Returns
-------
Series or DataFrame
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
between_time : Select values between particular times of the day.
first : Select initial periods of time series based on a date offset.
last : Select final periods of time series based on a date offset.
DatetimeIndex.indexer_at_time : Get just the index locations for
values at particular time of the day.
Examples
--------
>>> i = pd.date_range('2018-04-09', periods=4, freq='12H')
>>> ts = pd.DataFrame({'A': [1, 2, 3, 4]}, index=i)
>>> ts
A
2018-04-09 00:00:00 1
2018-04-09 12:00:00 2
2018-04-10 00:00:00 3
2018-04-10 12:00:00 4
>>> ts.at_time('12:00')
A
2018-04-09 12:00:00 2
2018-04-10 12:00:00 4
"""
if axis is None:
axis = self._stat_axis_number
axis = self._get_axis_number(axis)
index = self._get_axis(axis)
try:
indexer = index.indexer_at_time(time, asof=asof)
except AttributeError:
raise TypeError('Index must be DatetimeIndex')
return self._take(indexer, axis=axis)
|
[
"def",
"at_time",
"(",
"self",
",",
"time",
",",
"asof",
"=",
"False",
",",
"axis",
"=",
"None",
")",
":",
"if",
"axis",
"is",
"None",
":",
"axis",
"=",
"self",
".",
"_stat_axis_number",
"axis",
"=",
"self",
".",
"_get_axis_number",
"(",
"axis",
")",
"index",
"=",
"self",
".",
"_get_axis",
"(",
"axis",
")",
"try",
":",
"indexer",
"=",
"index",
".",
"indexer_at_time",
"(",
"time",
",",
"asof",
"=",
"asof",
")",
"except",
"AttributeError",
":",
"raise",
"TypeError",
"(",
"'Index must be DatetimeIndex'",
")",
"return",
"self",
".",
"_take",
"(",
"indexer",
",",
"axis",
"=",
"axis",
")"
] | 29.454545 | 20.327273 |
def linkify_s_by_sd(self, services):
"""Add dependency in service objects
:return: None
"""
for servicedep in self:
# Only used for debugging purpose when loops are detected
setattr(servicedep, "service_description_string", "undefined")
setattr(servicedep, "dependent_service_description_string", "undefined")
if getattr(servicedep, 'service_description', None) is None or\
getattr(servicedep, 'dependent_service_description', None) is None:
continue
services.add_act_dependency(servicedep.dependent_service_description,
servicedep.service_description,
servicedep.notification_failure_criteria,
getattr(servicedep, 'dependency_period', ''),
servicedep.inherits_parent)
services.add_chk_dependency(servicedep.dependent_service_description,
servicedep.service_description,
servicedep.execution_failure_criteria,
getattr(servicedep, 'dependency_period', ''),
servicedep.inherits_parent)
# Only used for debugging purpose when loops are detected
setattr(servicedep, "service_description_string",
services[servicedep.service_description].get_name())
setattr(servicedep, "dependent_service_description_string",
services[servicedep.dependent_service_description].get_name())
|
[
"def",
"linkify_s_by_sd",
"(",
"self",
",",
"services",
")",
":",
"for",
"servicedep",
"in",
"self",
":",
"# Only used for debugging purpose when loops are detected",
"setattr",
"(",
"servicedep",
",",
"\"service_description_string\"",
",",
"\"undefined\"",
")",
"setattr",
"(",
"servicedep",
",",
"\"dependent_service_description_string\"",
",",
"\"undefined\"",
")",
"if",
"getattr",
"(",
"servicedep",
",",
"'service_description'",
",",
"None",
")",
"is",
"None",
"or",
"getattr",
"(",
"servicedep",
",",
"'dependent_service_description'",
",",
"None",
")",
"is",
"None",
":",
"continue",
"services",
".",
"add_act_dependency",
"(",
"servicedep",
".",
"dependent_service_description",
",",
"servicedep",
".",
"service_description",
",",
"servicedep",
".",
"notification_failure_criteria",
",",
"getattr",
"(",
"servicedep",
",",
"'dependency_period'",
",",
"''",
")",
",",
"servicedep",
".",
"inherits_parent",
")",
"services",
".",
"add_chk_dependency",
"(",
"servicedep",
".",
"dependent_service_description",
",",
"servicedep",
".",
"service_description",
",",
"servicedep",
".",
"execution_failure_criteria",
",",
"getattr",
"(",
"servicedep",
",",
"'dependency_period'",
",",
"''",
")",
",",
"servicedep",
".",
"inherits_parent",
")",
"# Only used for debugging purpose when loops are detected",
"setattr",
"(",
"servicedep",
",",
"\"service_description_string\"",
",",
"services",
"[",
"servicedep",
".",
"service_description",
"]",
".",
"get_name",
"(",
")",
")",
"setattr",
"(",
"servicedep",
",",
"\"dependent_service_description_string\"",
",",
"services",
"[",
"servicedep",
".",
"dependent_service_description",
"]",
".",
"get_name",
"(",
")",
")"
] | 54.129032 | 30.935484 |
def augknt(knots,order):
"""Augment knot sequence such that some boundary conditions
are met."""
a = []
[a.append(knots[0]) for t in range(0,order)]
[a.append(k) for k in knots]
[a.append(knots[-1]) for t in range(0,order)]
return np.array(a)
|
[
"def",
"augknt",
"(",
"knots",
",",
"order",
")",
":",
"a",
"=",
"[",
"]",
"[",
"a",
".",
"append",
"(",
"knots",
"[",
"0",
"]",
")",
"for",
"t",
"in",
"range",
"(",
"0",
",",
"order",
")",
"]",
"[",
"a",
".",
"append",
"(",
"k",
")",
"for",
"k",
"in",
"knots",
"]",
"[",
"a",
".",
"append",
"(",
"knots",
"[",
"-",
"1",
"]",
")",
"for",
"t",
"in",
"range",
"(",
"0",
",",
"order",
")",
"]",
"return",
"np",
".",
"array",
"(",
"a",
")"
] | 33 | 11.125 |
def compute_K_numerical(dataframe, settings=None, keep_dir=None):
"""Use a finite-element modeling code to infer geometric factors for meshes
with topography or irregular electrode spacings.
Parameters
----------
dataframe : pandas.DataFrame
the data frame that contains the data
settings : dict
The settings required to compute the geometric factors. See examples
down below for more information in the required content.
keep_dir : path
if not None, copy modeling dir here
Returns
-------
K : :class:`numpy.ndarray`
K factors (are also directly written to the dataframe)
Examples
--------
::
settings = {
'rho': 100,
'elem': 'elem.dat',
'elec': 'elec.dat',
'sink_node': '100',
'2D': False,
}
"""
inversion_code = reda.rcParams.get('geom_factor.inversion_code', 'crtomo')
if inversion_code == 'crtomo':
import reda.utils.geom_fac_crtomo as geom_fac_crtomo
if keep_dir is not None:
keep_dir = os.path.abspath(keep_dir)
K = geom_fac_crtomo.compute_K(
dataframe, settings, keep_dir)
else:
raise Exception(
'Inversion code {0} not implemented for K computation'.format(
inversion_code
))
return K
|
[
"def",
"compute_K_numerical",
"(",
"dataframe",
",",
"settings",
"=",
"None",
",",
"keep_dir",
"=",
"None",
")",
":",
"inversion_code",
"=",
"reda",
".",
"rcParams",
".",
"get",
"(",
"'geom_factor.inversion_code'",
",",
"'crtomo'",
")",
"if",
"inversion_code",
"==",
"'crtomo'",
":",
"import",
"reda",
".",
"utils",
".",
"geom_fac_crtomo",
"as",
"geom_fac_crtomo",
"if",
"keep_dir",
"is",
"not",
"None",
":",
"keep_dir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"keep_dir",
")",
"K",
"=",
"geom_fac_crtomo",
".",
"compute_K",
"(",
"dataframe",
",",
"settings",
",",
"keep_dir",
")",
"else",
":",
"raise",
"Exception",
"(",
"'Inversion code {0} not implemented for K computation'",
".",
"format",
"(",
"inversion_code",
")",
")",
"return",
"K"
] | 29.065217 | 21.021739 |
def context_processor(self, func):
""" Decorate a given function to use as a context processor.
::
@app.ps.jinja2.context_processor
def my_context():
return {...}
"""
func = to_coroutine(func)
self.providers.append(func)
return func
|
[
"def",
"context_processor",
"(",
"self",
",",
"func",
")",
":",
"func",
"=",
"to_coroutine",
"(",
"func",
")",
"self",
".",
"providers",
".",
"append",
"(",
"func",
")",
"return",
"func"
] | 28.272727 | 12.363636 |
def sector(self, start_ray, end_ray, start_distance=None, end_distance=None, units='b'):
"""Slices a sector from the selected dataset.
Slice contains the start and end rays. If start and end rays are equal
one ray is returned. If the start_ray is greater than the end_ray
slicing continues over the 359-0 border.
Parameters
----------
start_ray : int
Starting ray of of the slice first ray is 0
end_ray : int
End ray of the slice, last ray is 359
Keywords
--------
start_distance : int
Starting distance of the slice, if not defined sector starts
form zero
end_distance : int
Ending distance of the slice, if not defined sector continues to
the end last ray of the dataset
units : str
Units used in distance slicing. Option 'b' means that bin number
is used as index. Option 'm' means that meters are used and the
slicing index is calculated using bin width.
Returns
-------
sector : ndarray
Numpy array containing the sector values
Examples
--------
Get one ray from the selected dataset
>>> pvol = odimPVOL('pvol.h5')
>>> pvol.select_dataset('A', 'DBZH')
>>> ray = pvol.sector(10, 10)
Get sector from selected dataset, rays from 100 to 200
at distances from 5 km to 10 km.
>>> pvol = odimPVOL('pvol.h5')
>>> pvol.select_dataset('A', 'DBZH')
>>> sector = pvol.sector(100, 200, 5000, 10000)
"""
if self.dataset is None:
raise ValueError('Dataset is not selected')
# Validate parameter values
ray_max, distance_max = self.dataset.shape
if start_ray > ray_max:
raise ValueError('Value of start_ray is bigger than the number of rays')
if start_ray < 0:
raise ValueError('start_ray must be non negative')
if start_distance is None:
start_distance_index = 0
else:
if units == 'b':
start_distance_index = start_distance
elif units == 'm':
try:
rscale = next(self.attr_gen('rscale')).value
except:
raise MissingMetadataError
start_distance_index = int(start_distance / rscale)
if end_distance is None:
end_distance_index = self.dataset.shape[1]
else:
if units == 'b':
end_distance_index = end_distance
elif units == 'm':
end_distance_index = int(end_distance / rscale)
if end_ray is None:
sector = self.dataset[start_ray, start_distance_index:end_distance_index]
else:
if start_ray <= end_ray:
sector = self.dataset[start_ray:end_ray+1, start_distance_index:end_distance_index]
else:
sector1 = self.dataset[start_ray:, start_distance_index:end_distance_index]
sector2 = self.dataset[:end_ray+1, start_distance_index:end_distance_index]
sector = np.concatenate((sector1, sector2), axis=0)
return sector
|
[
"def",
"sector",
"(",
"self",
",",
"start_ray",
",",
"end_ray",
",",
"start_distance",
"=",
"None",
",",
"end_distance",
"=",
"None",
",",
"units",
"=",
"'b'",
")",
":",
"if",
"self",
".",
"dataset",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Dataset is not selected'",
")",
"# Validate parameter values ",
"ray_max",
",",
"distance_max",
"=",
"self",
".",
"dataset",
".",
"shape",
"if",
"start_ray",
">",
"ray_max",
":",
"raise",
"ValueError",
"(",
"'Value of start_ray is bigger than the number of rays'",
")",
"if",
"start_ray",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"'start_ray must be non negative'",
")",
"if",
"start_distance",
"is",
"None",
":",
"start_distance_index",
"=",
"0",
"else",
":",
"if",
"units",
"==",
"'b'",
":",
"start_distance_index",
"=",
"start_distance",
"elif",
"units",
"==",
"'m'",
":",
"try",
":",
"rscale",
"=",
"next",
"(",
"self",
".",
"attr_gen",
"(",
"'rscale'",
")",
")",
".",
"value",
"except",
":",
"raise",
"MissingMetadataError",
"start_distance_index",
"=",
"int",
"(",
"start_distance",
"/",
"rscale",
")",
"if",
"end_distance",
"is",
"None",
":",
"end_distance_index",
"=",
"self",
".",
"dataset",
".",
"shape",
"[",
"1",
"]",
"else",
":",
"if",
"units",
"==",
"'b'",
":",
"end_distance_index",
"=",
"end_distance",
"elif",
"units",
"==",
"'m'",
":",
"end_distance_index",
"=",
"int",
"(",
"end_distance",
"/",
"rscale",
")",
"if",
"end_ray",
"is",
"None",
":",
"sector",
"=",
"self",
".",
"dataset",
"[",
"start_ray",
",",
"start_distance_index",
":",
"end_distance_index",
"]",
"else",
":",
"if",
"start_ray",
"<=",
"end_ray",
":",
"sector",
"=",
"self",
".",
"dataset",
"[",
"start_ray",
":",
"end_ray",
"+",
"1",
",",
"start_distance_index",
":",
"end_distance_index",
"]",
"else",
":",
"sector1",
"=",
"self",
".",
"dataset",
"[",
"start_ray",
":",
",",
"start_distance_index",
":",
"end_distance_index",
"]",
"sector2",
"=",
"self",
".",
"dataset",
"[",
":",
"end_ray",
"+",
"1",
",",
"start_distance_index",
":",
"end_distance_index",
"]",
"sector",
"=",
"np",
".",
"concatenate",
"(",
"(",
"sector1",
",",
"sector2",
")",
",",
"axis",
"=",
"0",
")",
"return",
"sector"
] | 38.044944 | 21.101124 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.