repo
stringlengths 7
54
| path
stringlengths 4
192
| url
stringlengths 87
284
| code
stringlengths 78
104k
| code_tokens
sequence | docstring
stringlengths 1
46.9k
| docstring_tokens
sequence | language
stringclasses 1
value | partition
stringclasses 3
values |
---|---|---|---|---|---|---|---|---|
Unbabel/unbabel-py | unbabel/api.py | https://github.com/Unbabel/unbabel-py/blob/3bd6397174e184d89d2a11149d87be5d12570c64/unbabel/api.py#L451-L476 | def get_language_pairs(self, train_langs=None):
'''
Returns the language pairs available on unbabel
'''
if train_langs is None:
result = self.api_call('language_pair/')
else:
result = self.api_call(
'language_pair/?train_langs={}'.format(train_langs))
try:
langs_json = json.loads(result.content)
if 'error' in langs_json:
return []
languages = [LangPair(Language(
shortname=lang_json["lang_pair"]["source_language"][
"shortname"],
name=lang_json["lang_pair"]["source_language"]["name"]),
Language(shortname=lang_json["lang_pair"][
"target_language"]["shortname"],
name=lang_json["lang_pair"][
"target_language"]["name"])
) for lang_json in langs_json["objects"]]
except Exception, e:
log.exception("Error decoding get language pairs")
raise e
return languages | [
"def",
"get_language_pairs",
"(",
"self",
",",
"train_langs",
"=",
"None",
")",
":",
"if",
"train_langs",
"is",
"None",
":",
"result",
"=",
"self",
".",
"api_call",
"(",
"'language_pair/'",
")",
"else",
":",
"result",
"=",
"self",
".",
"api_call",
"(",
"'language_pair/?train_langs={}'",
".",
"format",
"(",
"train_langs",
")",
")",
"try",
":",
"langs_json",
"=",
"json",
".",
"loads",
"(",
"result",
".",
"content",
")",
"if",
"'error'",
"in",
"langs_json",
":",
"return",
"[",
"]",
"languages",
"=",
"[",
"LangPair",
"(",
"Language",
"(",
"shortname",
"=",
"lang_json",
"[",
"\"lang_pair\"",
"]",
"[",
"\"source_language\"",
"]",
"[",
"\"shortname\"",
"]",
",",
"name",
"=",
"lang_json",
"[",
"\"lang_pair\"",
"]",
"[",
"\"source_language\"",
"]",
"[",
"\"name\"",
"]",
")",
",",
"Language",
"(",
"shortname",
"=",
"lang_json",
"[",
"\"lang_pair\"",
"]",
"[",
"\"target_language\"",
"]",
"[",
"\"shortname\"",
"]",
",",
"name",
"=",
"lang_json",
"[",
"\"lang_pair\"",
"]",
"[",
"\"target_language\"",
"]",
"[",
"\"name\"",
"]",
")",
")",
"for",
"lang_json",
"in",
"langs_json",
"[",
"\"objects\"",
"]",
"]",
"except",
"Exception",
",",
"e",
":",
"log",
".",
"exception",
"(",
"\"Error decoding get language pairs\"",
")",
"raise",
"e",
"return",
"languages"
] | Returns the language pairs available on unbabel | [
"Returns",
"the",
"language",
"pairs",
"available",
"on",
"unbabel"
] | python | train |
adamziel/python_translate | python_translate/translations.py | https://github.com/adamziel/python_translate/blob/0aee83f434bd2d1b95767bcd63adb7ac7036c7df/python_translate/translations.py#L114-L130 | def get(self, id, domain='messages'):
"""
Gets a message translation.
@rtype: str
@return: The message translation
"""
assert isinstance(id, (str, unicode))
assert isinstance(domain, (str, unicode))
if self.defines(id, domain):
return self.messages[domain][id]
if self.fallback_catalogue is not None:
return self.fallback_catalogue.get(id, domain)
return id | [
"def",
"get",
"(",
"self",
",",
"id",
",",
"domain",
"=",
"'messages'",
")",
":",
"assert",
"isinstance",
"(",
"id",
",",
"(",
"str",
",",
"unicode",
")",
")",
"assert",
"isinstance",
"(",
"domain",
",",
"(",
"str",
",",
"unicode",
")",
")",
"if",
"self",
".",
"defines",
"(",
"id",
",",
"domain",
")",
":",
"return",
"self",
".",
"messages",
"[",
"domain",
"]",
"[",
"id",
"]",
"if",
"self",
".",
"fallback_catalogue",
"is",
"not",
"None",
":",
"return",
"self",
".",
"fallback_catalogue",
".",
"get",
"(",
"id",
",",
"domain",
")",
"return",
"id"
] | Gets a message translation.
@rtype: str
@return: The message translation | [
"Gets",
"a",
"message",
"translation",
"."
] | python | train |
gabstopper/smc-python | smc/core/interfaces.py | https://github.com/gabstopper/smc-python/blob/e027b8a5dcfaf884eada32d113d41c1e56b32457/smc/core/interfaces.py#L143-L156 | def set_primary_heartbeat(self, interface_id):
"""
Set this interface as the primary heartbeat for this engine.
This will 'unset' the current primary heartbeat and move to
specified interface_id.
Clusters and Master NGFW Engines only.
:param str,int interface_id: interface specified for primary mgmt
:raises InterfaceNotFound: specified interface is not found
:raises UpdateElementFailed: failed modifying interfaces
:return: None
"""
self.interface.set_unset(interface_id, 'primary_heartbeat')
self._engine.update() | [
"def",
"set_primary_heartbeat",
"(",
"self",
",",
"interface_id",
")",
":",
"self",
".",
"interface",
".",
"set_unset",
"(",
"interface_id",
",",
"'primary_heartbeat'",
")",
"self",
".",
"_engine",
".",
"update",
"(",
")"
] | Set this interface as the primary heartbeat for this engine.
This will 'unset' the current primary heartbeat and move to
specified interface_id.
Clusters and Master NGFW Engines only.
:param str,int interface_id: interface specified for primary mgmt
:raises InterfaceNotFound: specified interface is not found
:raises UpdateElementFailed: failed modifying interfaces
:return: None | [
"Set",
"this",
"interface",
"as",
"the",
"primary",
"heartbeat",
"for",
"this",
"engine",
".",
"This",
"will",
"unset",
"the",
"current",
"primary",
"heartbeat",
"and",
"move",
"to",
"specified",
"interface_id",
".",
"Clusters",
"and",
"Master",
"NGFW",
"Engines",
"only",
".",
":",
"param",
"str",
"int",
"interface_id",
":",
"interface",
"specified",
"for",
"primary",
"mgmt",
":",
"raises",
"InterfaceNotFound",
":",
"specified",
"interface",
"is",
"not",
"found",
":",
"raises",
"UpdateElementFailed",
":",
"failed",
"modifying",
"interfaces",
":",
"return",
":",
"None"
] | python | train |
quantumlib/Cirq | cirq/devices/noise_model.py | https://github.com/quantumlib/Cirq/blob/0827da80dd7880e5b923eb69407e980ed9bc0bd2/cirq/devices/noise_model.py#L75-L92 | def noisy_moment(self, moment: 'cirq.Moment',
system_qubits: Sequence['cirq.Qid']) -> 'cirq.OP_TREE':
"""Adds noise to the operations from a moment.
Args:
moment: The moment to add noise to.
system_qubits: A list of all qubits in the system.
Returns:
An OP_TREE corresponding to the noisy operations for the moment.
"""
if not hasattr(self.noisy_moments, '_not_overridden'):
return self.noisy_moments([moment], system_qubits)
if not hasattr(self.noisy_operation, '_not_overridden'):
return [self.noisy_operation(op) for op in moment]
assert False, 'Should be unreachable.' | [
"def",
"noisy_moment",
"(",
"self",
",",
"moment",
":",
"'cirq.Moment'",
",",
"system_qubits",
":",
"Sequence",
"[",
"'cirq.Qid'",
"]",
")",
"->",
"'cirq.OP_TREE'",
":",
"if",
"not",
"hasattr",
"(",
"self",
".",
"noisy_moments",
",",
"'_not_overridden'",
")",
":",
"return",
"self",
".",
"noisy_moments",
"(",
"[",
"moment",
"]",
",",
"system_qubits",
")",
"if",
"not",
"hasattr",
"(",
"self",
".",
"noisy_operation",
",",
"'_not_overridden'",
")",
":",
"return",
"[",
"self",
".",
"noisy_operation",
"(",
"op",
")",
"for",
"op",
"in",
"moment",
"]",
"assert",
"False",
",",
"'Should be unreachable.'"
] | Adds noise to the operations from a moment.
Args:
moment: The moment to add noise to.
system_qubits: A list of all qubits in the system.
Returns:
An OP_TREE corresponding to the noisy operations for the moment. | [
"Adds",
"noise",
"to",
"the",
"operations",
"from",
"a",
"moment",
"."
] | python | train |
alantygel/ckanext-semantictags | ckanext/semantictags/db.py | https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L60-L74 | def by_id(cls, semantictag_id, autoflush=True):
'''Return the semantic tag with the given id, or None.
:param semantictag_id: the id of the semantic tag to return
:type semantictag_id: string
:returns: the semantic tag with the given id, or None if there is no tag with
that id
:rtype: ckan.model.semantictag.SemanticTag # TODO check this
'''
query = meta.Session.query(SemanticTag).filter(SemanticTag.id==semantictag_id)
query = query.autoflush(autoflush)
semantictag = query.first()
return semantictag | [
"def",
"by_id",
"(",
"cls",
",",
"semantictag_id",
",",
"autoflush",
"=",
"True",
")",
":",
"query",
"=",
"meta",
".",
"Session",
".",
"query",
"(",
"SemanticTag",
")",
".",
"filter",
"(",
"SemanticTag",
".",
"id",
"==",
"semantictag_id",
")",
"query",
"=",
"query",
".",
"autoflush",
"(",
"autoflush",
")",
"semantictag",
"=",
"query",
".",
"first",
"(",
")",
"return",
"semantictag"
] | Return the semantic tag with the given id, or None.
:param semantictag_id: the id of the semantic tag to return
:type semantictag_id: string
:returns: the semantic tag with the given id, or None if there is no tag with
that id
:rtype: ckan.model.semantictag.SemanticTag # TODO check this | [
"Return",
"the",
"semantic",
"tag",
"with",
"the",
"given",
"id",
"or",
"None",
"."
] | python | train |
bicv/LogGabor | LogGabor/LogGabor.py | https://github.com/bicv/LogGabor/blob/dea9560d8752cc9aa040ac3fd895cf9bb72b61f4/LogGabor/LogGabor.py#L61-L120 | def golden_pyramid(self, z, mask=False, spiral=True, fig_width=13):
"""
The Golden Laplacian Pyramid.
To represent the edges of the image at different levels, we may use a simple recursive approach constructing progressively a set of images of decreasing sizes, from a base to the summit of a pyramid. Using simple down-scaling and up-scaling operators we may approximate well a Laplacian operator. This is represented here by stacking images on a Golden Rectangle, that is where the aspect ratio is the golden section $\phi \eqdef \frac{1+\sqrt{5}}{2}$. We present here the base image on the left and the successive levels of the pyramid in a clockwise fashion (for clarity, we stopped at level $8$). Note that here we also use $\phi^2$ (that is $\phi+1$) as the down-scaling factor so that the resolution of the pyramid images correspond across scales. Note at last that coefficient are very kurtotic: most are near zero, the distribution of coefficients has long tails.
"""
import matplotlib.pyplot as plt
opts= {'vmin':0., 'vmax':1., 'interpolation':'nearest', 'origin':'upper'}
N_X, N_Y = z.shape[0], z.shape[1]
if spiral:
phi = (np.sqrt(5)+1.)/2. # golden ratio
fig = plt.figure(figsize=(fig_width, N_X/N_Y*fig_width/phi), frameon=True)
xmin, ymin, size = 0, 0, 1.
else:
fig = plt.figure(figsize=(fig_width, N_X/N_Y*fig_width*self.n_levels), frameon=True)
axs = []
for i_sf_0 in range(len(self.sf_0)):
if spiral:
# https://matplotlib.org/api/_as_gen/matplotlib.figure.Figure.html#matplotlib.figure.Figure.add_axes says:
# Add an axes at position rect [left, bottom, width, height] where all quantities are in fractions of figure width and height.
ax = fig.add_axes((xmin/phi, ymin, size/phi, size), facecolor='w')
else:
ax = fig.add_axes((0, i_sf_0/self.n_levels, 1, 1/self.n_levels), facecolor='w')
ax.axis(c='r', lw=1)
plt.setp(ax, xticks=[], yticks=[])
im_RGB = np.zeros((self.pe.N_X, self.pe.N_Y, 3))
for i_theta, theta_ in enumerate(self.theta):
im_abs = np.absolute(z[:, :, i_theta, i_sf_0])
RGB = np.array([.5*np.sin(2*theta_ + 2*i*np.pi/3)+.5 for i in range(3)])
im_RGB += im_abs[:,:, np.newaxis] * RGB[np.newaxis, np.newaxis, :]
im_RGB /= im_RGB.max()
ax.imshow(1-im_RGB, **opts)
#ax.grid(b=False, which="both")
if mask:
linewidth_mask = 1 #
from matplotlib.patches import Ellipse
circ = Ellipse((.5*self.pe.N_Y, .5*self.pe.N_X),
self.pe.N_Y-linewidth_mask, self.pe.N_X-linewidth_mask,
fill=False, facecolor='none', edgecolor = 'black', alpha = 0.5, ls='dashed', lw=linewidth_mask)
ax.add_patch(circ)
if spiral:
i_orientation = np.mod(i_sf_0, 4)
if i_orientation==0:
xmin += size
ymin += size/phi**2
elif i_orientation==1:
xmin += size/phi**2
ymin += -size/phi
elif i_orientation==2:
xmin += -size/phi
elif i_orientation==3:
ymin += size
size /= phi
axs.append(ax)
return fig, axs | [
"def",
"golden_pyramid",
"(",
"self",
",",
"z",
",",
"mask",
"=",
"False",
",",
"spiral",
"=",
"True",
",",
"fig_width",
"=",
"13",
")",
":",
"import",
"matplotlib",
".",
"pyplot",
"as",
"plt",
"opts",
"=",
"{",
"'vmin'",
":",
"0.",
",",
"'vmax'",
":",
"1.",
",",
"'interpolation'",
":",
"'nearest'",
",",
"'origin'",
":",
"'upper'",
"}",
"N_X",
",",
"N_Y",
"=",
"z",
".",
"shape",
"[",
"0",
"]",
",",
"z",
".",
"shape",
"[",
"1",
"]",
"if",
"spiral",
":",
"phi",
"=",
"(",
"np",
".",
"sqrt",
"(",
"5",
")",
"+",
"1.",
")",
"/",
"2.",
"# golden ratio",
"fig",
"=",
"plt",
".",
"figure",
"(",
"figsize",
"=",
"(",
"fig_width",
",",
"N_X",
"/",
"N_Y",
"*",
"fig_width",
"/",
"phi",
")",
",",
"frameon",
"=",
"True",
")",
"xmin",
",",
"ymin",
",",
"size",
"=",
"0",
",",
"0",
",",
"1.",
"else",
":",
"fig",
"=",
"plt",
".",
"figure",
"(",
"figsize",
"=",
"(",
"fig_width",
",",
"N_X",
"/",
"N_Y",
"*",
"fig_width",
"*",
"self",
".",
"n_levels",
")",
",",
"frameon",
"=",
"True",
")",
"axs",
"=",
"[",
"]",
"for",
"i_sf_0",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"sf_0",
")",
")",
":",
"if",
"spiral",
":",
"# https://matplotlib.org/api/_as_gen/matplotlib.figure.Figure.html#matplotlib.figure.Figure.add_axes says:",
"# Add an axes at position rect [left, bottom, width, height] where all quantities are in fractions of figure width and height.",
"ax",
"=",
"fig",
".",
"add_axes",
"(",
"(",
"xmin",
"/",
"phi",
",",
"ymin",
",",
"size",
"/",
"phi",
",",
"size",
")",
",",
"facecolor",
"=",
"'w'",
")",
"else",
":",
"ax",
"=",
"fig",
".",
"add_axes",
"(",
"(",
"0",
",",
"i_sf_0",
"/",
"self",
".",
"n_levels",
",",
"1",
",",
"1",
"/",
"self",
".",
"n_levels",
")",
",",
"facecolor",
"=",
"'w'",
")",
"ax",
".",
"axis",
"(",
"c",
"=",
"'r'",
",",
"lw",
"=",
"1",
")",
"plt",
".",
"setp",
"(",
"ax",
",",
"xticks",
"=",
"[",
"]",
",",
"yticks",
"=",
"[",
"]",
")",
"im_RGB",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"pe",
".",
"N_X",
",",
"self",
".",
"pe",
".",
"N_Y",
",",
"3",
")",
")",
"for",
"i_theta",
",",
"theta_",
"in",
"enumerate",
"(",
"self",
".",
"theta",
")",
":",
"im_abs",
"=",
"np",
".",
"absolute",
"(",
"z",
"[",
":",
",",
":",
",",
"i_theta",
",",
"i_sf_0",
"]",
")",
"RGB",
"=",
"np",
".",
"array",
"(",
"[",
".5",
"*",
"np",
".",
"sin",
"(",
"2",
"*",
"theta_",
"+",
"2",
"*",
"i",
"*",
"np",
".",
"pi",
"/",
"3",
")",
"+",
".5",
"for",
"i",
"in",
"range",
"(",
"3",
")",
"]",
")",
"im_RGB",
"+=",
"im_abs",
"[",
":",
",",
":",
",",
"np",
".",
"newaxis",
"]",
"*",
"RGB",
"[",
"np",
".",
"newaxis",
",",
"np",
".",
"newaxis",
",",
":",
"]",
"im_RGB",
"/=",
"im_RGB",
".",
"max",
"(",
")",
"ax",
".",
"imshow",
"(",
"1",
"-",
"im_RGB",
",",
"*",
"*",
"opts",
")",
"#ax.grid(b=False, which=\"both\")",
"if",
"mask",
":",
"linewidth_mask",
"=",
"1",
"#",
"from",
"matplotlib",
".",
"patches",
"import",
"Ellipse",
"circ",
"=",
"Ellipse",
"(",
"(",
".5",
"*",
"self",
".",
"pe",
".",
"N_Y",
",",
".5",
"*",
"self",
".",
"pe",
".",
"N_X",
")",
",",
"self",
".",
"pe",
".",
"N_Y",
"-",
"linewidth_mask",
",",
"self",
".",
"pe",
".",
"N_X",
"-",
"linewidth_mask",
",",
"fill",
"=",
"False",
",",
"facecolor",
"=",
"'none'",
",",
"edgecolor",
"=",
"'black'",
",",
"alpha",
"=",
"0.5",
",",
"ls",
"=",
"'dashed'",
",",
"lw",
"=",
"linewidth_mask",
")",
"ax",
".",
"add_patch",
"(",
"circ",
")",
"if",
"spiral",
":",
"i_orientation",
"=",
"np",
".",
"mod",
"(",
"i_sf_0",
",",
"4",
")",
"if",
"i_orientation",
"==",
"0",
":",
"xmin",
"+=",
"size",
"ymin",
"+=",
"size",
"/",
"phi",
"**",
"2",
"elif",
"i_orientation",
"==",
"1",
":",
"xmin",
"+=",
"size",
"/",
"phi",
"**",
"2",
"ymin",
"+=",
"-",
"size",
"/",
"phi",
"elif",
"i_orientation",
"==",
"2",
":",
"xmin",
"+=",
"-",
"size",
"/",
"phi",
"elif",
"i_orientation",
"==",
"3",
":",
"ymin",
"+=",
"size",
"size",
"/=",
"phi",
"axs",
".",
"append",
"(",
"ax",
")",
"return",
"fig",
",",
"axs"
] | The Golden Laplacian Pyramid.
To represent the edges of the image at different levels, we may use a simple recursive approach constructing progressively a set of images of decreasing sizes, from a base to the summit of a pyramid. Using simple down-scaling and up-scaling operators we may approximate well a Laplacian operator. This is represented here by stacking images on a Golden Rectangle, that is where the aspect ratio is the golden section $\phi \eqdef \frac{1+\sqrt{5}}{2}$. We present here the base image on the left and the successive levels of the pyramid in a clockwise fashion (for clarity, we stopped at level $8$). Note that here we also use $\phi^2$ (that is $\phi+1$) as the down-scaling factor so that the resolution of the pyramid images correspond across scales. Note at last that coefficient are very kurtotic: most are near zero, the distribution of coefficients has long tails. | [
"The",
"Golden",
"Laplacian",
"Pyramid",
".",
"To",
"represent",
"the",
"edges",
"of",
"the",
"image",
"at",
"different",
"levels",
"we",
"may",
"use",
"a",
"simple",
"recursive",
"approach",
"constructing",
"progressively",
"a",
"set",
"of",
"images",
"of",
"decreasing",
"sizes",
"from",
"a",
"base",
"to",
"the",
"summit",
"of",
"a",
"pyramid",
".",
"Using",
"simple",
"down",
"-",
"scaling",
"and",
"up",
"-",
"scaling",
"operators",
"we",
"may",
"approximate",
"well",
"a",
"Laplacian",
"operator",
".",
"This",
"is",
"represented",
"here",
"by",
"stacking",
"images",
"on",
"a",
"Golden",
"Rectangle",
"that",
"is",
"where",
"the",
"aspect",
"ratio",
"is",
"the",
"golden",
"section",
"$",
"\\",
"phi",
"\\",
"eqdef",
"\\",
"frac",
"{",
"1",
"+",
"\\",
"sqrt",
"{",
"5",
"}}",
"{",
"2",
"}",
"$",
".",
"We",
"present",
"here",
"the",
"base",
"image",
"on",
"the",
"left",
"and",
"the",
"successive",
"levels",
"of",
"the",
"pyramid",
"in",
"a",
"clockwise",
"fashion",
"(",
"for",
"clarity",
"we",
"stopped",
"at",
"level",
"$8$",
")",
".",
"Note",
"that",
"here",
"we",
"also",
"use",
"$",
"\\",
"phi^2$",
"(",
"that",
"is",
"$",
"\\",
"phi",
"+",
"1$",
")",
"as",
"the",
"down",
"-",
"scaling",
"factor",
"so",
"that",
"the",
"resolution",
"of",
"the",
"pyramid",
"images",
"correspond",
"across",
"scales",
".",
"Note",
"at",
"last",
"that",
"coefficient",
"are",
"very",
"kurtotic",
":",
"most",
"are",
"near",
"zero",
"the",
"distribution",
"of",
"coefficients",
"has",
"long",
"tails",
"."
] | python | test |
tensorflow/tensor2tensor | tensor2tensor/insights/graph.py | https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/insights/graph.py#L112-L126 | def get_vertex(self, key):
"""Returns or Creates a Vertex mapped by key.
Args:
key: A string reference for a vertex. May refer to a new Vertex in which
case it will be created.
Returns:
A the Vertex mapped to by key.
"""
if key in self.vertex_map:
return self.vertex_map[key]
vertex = self.new_vertex()
self.vertex_map[key] = vertex
return vertex | [
"def",
"get_vertex",
"(",
"self",
",",
"key",
")",
":",
"if",
"key",
"in",
"self",
".",
"vertex_map",
":",
"return",
"self",
".",
"vertex_map",
"[",
"key",
"]",
"vertex",
"=",
"self",
".",
"new_vertex",
"(",
")",
"self",
".",
"vertex_map",
"[",
"key",
"]",
"=",
"vertex",
"return",
"vertex"
] | Returns or Creates a Vertex mapped by key.
Args:
key: A string reference for a vertex. May refer to a new Vertex in which
case it will be created.
Returns:
A the Vertex mapped to by key. | [
"Returns",
"or",
"Creates",
"a",
"Vertex",
"mapped",
"by",
"key",
"."
] | python | train |
frictionlessdata/datapackage-py | datapackage/pushpull.py | https://github.com/frictionlessdata/datapackage-py/blob/aca085ea54541b087140b58a81332f8728baeeb2/datapackage/pushpull.py#L202-L229 | def _convert_schemas(mapping, schemas):
"""Convert schemas to be compatible with storage schemas.
Foreign keys related operations.
Args:
mapping (dict): mapping between resource name and table name
schemas (list): schemas
Raises:
ValueError: if there is no resource
for some foreign key in given mapping
Returns:
list: converted schemas
"""
schemas = deepcopy(schemas)
for schema in schemas:
for fk in schema.get('foreignKeys', []):
resource = fk['reference']['resource']
if resource != 'self':
if resource not in mapping:
message = 'Not resource "%s" for foreign key "%s"'
message = message % (resource, fk)
raise ValueError(message)
fk['reference']['resource'] = mapping[resource]
return schemas | [
"def",
"_convert_schemas",
"(",
"mapping",
",",
"schemas",
")",
":",
"schemas",
"=",
"deepcopy",
"(",
"schemas",
")",
"for",
"schema",
"in",
"schemas",
":",
"for",
"fk",
"in",
"schema",
".",
"get",
"(",
"'foreignKeys'",
",",
"[",
"]",
")",
":",
"resource",
"=",
"fk",
"[",
"'reference'",
"]",
"[",
"'resource'",
"]",
"if",
"resource",
"!=",
"'self'",
":",
"if",
"resource",
"not",
"in",
"mapping",
":",
"message",
"=",
"'Not resource \"%s\" for foreign key \"%s\"'",
"message",
"=",
"message",
"%",
"(",
"resource",
",",
"fk",
")",
"raise",
"ValueError",
"(",
"message",
")",
"fk",
"[",
"'reference'",
"]",
"[",
"'resource'",
"]",
"=",
"mapping",
"[",
"resource",
"]",
"return",
"schemas"
] | Convert schemas to be compatible with storage schemas.
Foreign keys related operations.
Args:
mapping (dict): mapping between resource name and table name
schemas (list): schemas
Raises:
ValueError: if there is no resource
for some foreign key in given mapping
Returns:
list: converted schemas | [
"Convert",
"schemas",
"to",
"be",
"compatible",
"with",
"storage",
"schemas",
"."
] | python | valid |
connectordb/connectordb-python | connectordb/logger.py | https://github.com/connectordb/connectordb-python/blob/2092b0cb30898139a247176bcf433d5a4abde7cb/connectordb/logger.py#L272-L284 | def start(self):
"""Start the logger background synchronization service. This allows you to not need to
worry about syncing with ConnectorDB - you just insert into the Logger, and the Logger
will by synced every syncperiod."""
with self.synclock:
if self.syncthread is not None:
logging.warn(
"Logger: Start called on a syncer that is already running")
return
self.sync() # Attempt a sync right away
self.__setsync() | [
"def",
"start",
"(",
"self",
")",
":",
"with",
"self",
".",
"synclock",
":",
"if",
"self",
".",
"syncthread",
"is",
"not",
"None",
":",
"logging",
".",
"warn",
"(",
"\"Logger: Start called on a syncer that is already running\"",
")",
"return",
"self",
".",
"sync",
"(",
")",
"# Attempt a sync right away",
"self",
".",
"__setsync",
"(",
")"
] | Start the logger background synchronization service. This allows you to not need to
worry about syncing with ConnectorDB - you just insert into the Logger, and the Logger
will by synced every syncperiod. | [
"Start",
"the",
"logger",
"background",
"synchronization",
"service",
".",
"This",
"allows",
"you",
"to",
"not",
"need",
"to",
"worry",
"about",
"syncing",
"with",
"ConnectorDB",
"-",
"you",
"just",
"insert",
"into",
"the",
"Logger",
"and",
"the",
"Logger",
"will",
"by",
"synced",
"every",
"syncperiod",
"."
] | python | test |
pytroll/trollimage | trollimage/image.py | https://github.com/pytroll/trollimage/blob/d35a7665ad475ff230e457085523e21f2cd3f454/trollimage/image.py#L984-L1014 | def stretch_linear(self, ch_nb, cutoffs=(0.005, 0.005)):
"""Stretch linearly the contrast of the current image on channel
*ch_nb*, using *cutoffs* for left and right trimming.
"""
logger.debug("Perform a linear contrast stretch.")
if((self.channels[ch_nb].size ==
np.ma.count_masked(self.channels[ch_nb])) or
self.channels[ch_nb].min() == self.channels[ch_nb].max()):
logger.warning("Nothing to stretch !")
return
arr = self.channels[ch_nb]
carr = arr.compressed()
logger.debug("Calculate the histogram percentiles: ")
logger.debug("Left and right percentiles: " +
str(cutoffs[0] * 100) + " " + str(cutoffs[1] * 100))
left, right = np.percentile(
carr, [cutoffs[0] * 100, 100. - cutoffs[1] * 100])
delta_x = (right - left)
logger.debug("Interval: left=%f, right=%f width=%f",
left, right, delta_x)
if delta_x > 0.0:
self.channels[ch_nb] = np.ma.array((arr - left) / delta_x,
mask=arr.mask)
else:
logger.warning("Unable to make a contrast stretch!") | [
"def",
"stretch_linear",
"(",
"self",
",",
"ch_nb",
",",
"cutoffs",
"=",
"(",
"0.005",
",",
"0.005",
")",
")",
":",
"logger",
".",
"debug",
"(",
"\"Perform a linear contrast stretch.\"",
")",
"if",
"(",
"(",
"self",
".",
"channels",
"[",
"ch_nb",
"]",
".",
"size",
"==",
"np",
".",
"ma",
".",
"count_masked",
"(",
"self",
".",
"channels",
"[",
"ch_nb",
"]",
")",
")",
"or",
"self",
".",
"channels",
"[",
"ch_nb",
"]",
".",
"min",
"(",
")",
"==",
"self",
".",
"channels",
"[",
"ch_nb",
"]",
".",
"max",
"(",
")",
")",
":",
"logger",
".",
"warning",
"(",
"\"Nothing to stretch !\"",
")",
"return",
"arr",
"=",
"self",
".",
"channels",
"[",
"ch_nb",
"]",
"carr",
"=",
"arr",
".",
"compressed",
"(",
")",
"logger",
".",
"debug",
"(",
"\"Calculate the histogram percentiles: \"",
")",
"logger",
".",
"debug",
"(",
"\"Left and right percentiles: \"",
"+",
"str",
"(",
"cutoffs",
"[",
"0",
"]",
"*",
"100",
")",
"+",
"\" \"",
"+",
"str",
"(",
"cutoffs",
"[",
"1",
"]",
"*",
"100",
")",
")",
"left",
",",
"right",
"=",
"np",
".",
"percentile",
"(",
"carr",
",",
"[",
"cutoffs",
"[",
"0",
"]",
"*",
"100",
",",
"100.",
"-",
"cutoffs",
"[",
"1",
"]",
"*",
"100",
"]",
")",
"delta_x",
"=",
"(",
"right",
"-",
"left",
")",
"logger",
".",
"debug",
"(",
"\"Interval: left=%f, right=%f width=%f\"",
",",
"left",
",",
"right",
",",
"delta_x",
")",
"if",
"delta_x",
">",
"0.0",
":",
"self",
".",
"channels",
"[",
"ch_nb",
"]",
"=",
"np",
".",
"ma",
".",
"array",
"(",
"(",
"arr",
"-",
"left",
")",
"/",
"delta_x",
",",
"mask",
"=",
"arr",
".",
"mask",
")",
"else",
":",
"logger",
".",
"warning",
"(",
"\"Unable to make a contrast stretch!\"",
")"
] | Stretch linearly the contrast of the current image on channel
*ch_nb*, using *cutoffs* for left and right trimming. | [
"Stretch",
"linearly",
"the",
"contrast",
"of",
"the",
"current",
"image",
"on",
"channel",
"*",
"ch_nb",
"*",
"using",
"*",
"cutoffs",
"*",
"for",
"left",
"and",
"right",
"trimming",
"."
] | python | train |
wadda/gps3 | gps3/gps3.py | https://github.com/wadda/gps3/blob/91adcd7073b891b135b2a46d039ce2125cf09a09/gps3/gps3.py#L128-L133 | def close(self):
"""turn off stream and close socket"""
if self.streamSock:
self.watch(enable=False)
self.streamSock.close()
self.streamSock = None | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"streamSock",
":",
"self",
".",
"watch",
"(",
"enable",
"=",
"False",
")",
"self",
".",
"streamSock",
".",
"close",
"(",
")",
"self",
".",
"streamSock",
"=",
"None"
] | turn off stream and close socket | [
"turn",
"off",
"stream",
"and",
"close",
"socket"
] | python | train |
alorence/pysvg-py3 | pysvg/builders.py | https://github.com/alorence/pysvg-py3/blob/ce217a4da3ada44a71d3e2f391d37c67d95c724e/pysvg/builders.py#L23-L44 | def createCircle(self, cx, cy, r, strokewidth=1, stroke='black', fill='none'):
"""
Creates a circle
@type cx: string or int
@param cx: starting x-coordinate
@type cy: string or int
@param cy: starting y-coordinate
@type r: string or int
@param r: radius
@type strokewidth: string or int
@param strokewidth: width of the pen used to draw
@type stroke: string (either css constants like "black" or numerical values like "#FFFFFF")
@param stroke: color with which to draw the outer limits
@type fill: string (either css constants like "black" or numerical values like "#FFFFFF")
@param fill: color with which to fill the element (default: no filling)
@return: a circle object
"""
style_dict = {'fill':fill, 'stroke-width':strokewidth, 'stroke':stroke}
myStyle = StyleBuilder(style_dict)
c = Circle(cx, cy, r)
c.set_style(myStyle.getStyle())
return c | [
"def",
"createCircle",
"(",
"self",
",",
"cx",
",",
"cy",
",",
"r",
",",
"strokewidth",
"=",
"1",
",",
"stroke",
"=",
"'black'",
",",
"fill",
"=",
"'none'",
")",
":",
"style_dict",
"=",
"{",
"'fill'",
":",
"fill",
",",
"'stroke-width'",
":",
"strokewidth",
",",
"'stroke'",
":",
"stroke",
"}",
"myStyle",
"=",
"StyleBuilder",
"(",
"style_dict",
")",
"c",
"=",
"Circle",
"(",
"cx",
",",
"cy",
",",
"r",
")",
"c",
".",
"set_style",
"(",
"myStyle",
".",
"getStyle",
"(",
")",
")",
"return",
"c"
] | Creates a circle
@type cx: string or int
@param cx: starting x-coordinate
@type cy: string or int
@param cy: starting y-coordinate
@type r: string or int
@param r: radius
@type strokewidth: string or int
@param strokewidth: width of the pen used to draw
@type stroke: string (either css constants like "black" or numerical values like "#FFFFFF")
@param stroke: color with which to draw the outer limits
@type fill: string (either css constants like "black" or numerical values like "#FFFFFF")
@param fill: color with which to fill the element (default: no filling)
@return: a circle object | [
"Creates",
"a",
"circle"
] | python | train |
gawel/irc3 | irc3/__init__.py | https://github.com/gawel/irc3/blob/cd27840a5809a1f803dc620860fe75d83d2a2ec8/irc3/__init__.py#L297-L301 | def kick(self, channel, target, reason=None):
"""kick target from channel"""
if reason:
target += ' :' + reason
self.send_line('KICK %s %s' % (channel, target), nowait=True) | [
"def",
"kick",
"(",
"self",
",",
"channel",
",",
"target",
",",
"reason",
"=",
"None",
")",
":",
"if",
"reason",
":",
"target",
"+=",
"' :'",
"+",
"reason",
"self",
".",
"send_line",
"(",
"'KICK %s %s'",
"%",
"(",
"channel",
",",
"target",
")",
",",
"nowait",
"=",
"True",
")"
] | kick target from channel | [
"kick",
"target",
"from",
"channel"
] | python | train |
openstax/cnx-easybake | cnxeasybake/oven.py | https://github.com/openstax/cnx-easybake/blob/f8edf018fb7499f6f18af0145c326b93a737a782/cnxeasybake/oven.py#L635-L675 | def lookup(self, vtype, vname, target_id=None):
"""Return value of vname from the variable store vtype.
Valid vtypes are `strings` 'counters', and `pending`. If the value
is not found in the current steps store, earlier steps will be
checked. If not found, '', 0, or (None, None) is returned.
"""
nullvals = {'strings': '', 'counters': 0, 'pending': (None, None)}
nullval = nullvals[vtype]
vstyle = None
if vtype == 'counters':
if len(vname) > 1:
vname, vstyle = vname
else:
vname = vname[0]
if target_id is not None:
try:
state = self.state[vtype][target_id]
steps = self.state[vtype][target_id].keys()
except KeyError:
log(WARN, u'Bad ID target lookup {}'.format(
target_id).encode('utf-8'))
return nullval
else:
state = self.state
steps = self.state['scope']
for step in steps:
if vname in state[step][vtype]:
if vtype == 'pending':
return(state[step][vtype][vname], step)
else:
val = state[step][vtype][vname]
if vstyle is not None:
return self.counter_style(val, vstyle)
return val
else:
return nullval | [
"def",
"lookup",
"(",
"self",
",",
"vtype",
",",
"vname",
",",
"target_id",
"=",
"None",
")",
":",
"nullvals",
"=",
"{",
"'strings'",
":",
"''",
",",
"'counters'",
":",
"0",
",",
"'pending'",
":",
"(",
"None",
",",
"None",
")",
"}",
"nullval",
"=",
"nullvals",
"[",
"vtype",
"]",
"vstyle",
"=",
"None",
"if",
"vtype",
"==",
"'counters'",
":",
"if",
"len",
"(",
"vname",
")",
">",
"1",
":",
"vname",
",",
"vstyle",
"=",
"vname",
"else",
":",
"vname",
"=",
"vname",
"[",
"0",
"]",
"if",
"target_id",
"is",
"not",
"None",
":",
"try",
":",
"state",
"=",
"self",
".",
"state",
"[",
"vtype",
"]",
"[",
"target_id",
"]",
"steps",
"=",
"self",
".",
"state",
"[",
"vtype",
"]",
"[",
"target_id",
"]",
".",
"keys",
"(",
")",
"except",
"KeyError",
":",
"log",
"(",
"WARN",
",",
"u'Bad ID target lookup {}'",
".",
"format",
"(",
"target_id",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"return",
"nullval",
"else",
":",
"state",
"=",
"self",
".",
"state",
"steps",
"=",
"self",
".",
"state",
"[",
"'scope'",
"]",
"for",
"step",
"in",
"steps",
":",
"if",
"vname",
"in",
"state",
"[",
"step",
"]",
"[",
"vtype",
"]",
":",
"if",
"vtype",
"==",
"'pending'",
":",
"return",
"(",
"state",
"[",
"step",
"]",
"[",
"vtype",
"]",
"[",
"vname",
"]",
",",
"step",
")",
"else",
":",
"val",
"=",
"state",
"[",
"step",
"]",
"[",
"vtype",
"]",
"[",
"vname",
"]",
"if",
"vstyle",
"is",
"not",
"None",
":",
"return",
"self",
".",
"counter_style",
"(",
"val",
",",
"vstyle",
")",
"return",
"val",
"else",
":",
"return",
"nullval"
] | Return value of vname from the variable store vtype.
Valid vtypes are `strings` 'counters', and `pending`. If the value
is not found in the current steps store, earlier steps will be
checked. If not found, '', 0, or (None, None) is returned. | [
"Return",
"value",
"of",
"vname",
"from",
"the",
"variable",
"store",
"vtype",
"."
] | python | train |
ga4gh/ga4gh-client | ga4gh/client/client.py | https://github.com/ga4gh/ga4gh-client/blob/d23b00b89112ef0930d45ee75aa3c6de3db615c5/ga4gh/client/client.py#L568-L582 | def search_continuous_sets(self, dataset_id):
"""
Returns an iterator over the ContinuousSets fulfilling the specified
conditions from the specified Dataset.
:param str dataset_id: The ID of the
:class:`ga4gh.protocol.Dataset` of interest.
:return: An iterator over the :class:`ga4gh.protocol.ContinuousSet`
objects defined by the query parameters.
"""
request = protocol.SearchContinuousSetsRequest()
request.dataset_id = dataset_id
request.page_size = pb.int(self._page_size)
return self._run_search_request(
request, "continuoussets", protocol.SearchContinuousSetsResponse) | [
"def",
"search_continuous_sets",
"(",
"self",
",",
"dataset_id",
")",
":",
"request",
"=",
"protocol",
".",
"SearchContinuousSetsRequest",
"(",
")",
"request",
".",
"dataset_id",
"=",
"dataset_id",
"request",
".",
"page_size",
"=",
"pb",
".",
"int",
"(",
"self",
".",
"_page_size",
")",
"return",
"self",
".",
"_run_search_request",
"(",
"request",
",",
"\"continuoussets\"",
",",
"protocol",
".",
"SearchContinuousSetsResponse",
")"
] | Returns an iterator over the ContinuousSets fulfilling the specified
conditions from the specified Dataset.
:param str dataset_id: The ID of the
:class:`ga4gh.protocol.Dataset` of interest.
:return: An iterator over the :class:`ga4gh.protocol.ContinuousSet`
objects defined by the query parameters. | [
"Returns",
"an",
"iterator",
"over",
"the",
"ContinuousSets",
"fulfilling",
"the",
"specified",
"conditions",
"from",
"the",
"specified",
"Dataset",
"."
] | python | train |
apache/incubator-heron | heron/tools/cli/src/python/result.py | https://github.com/apache/incubator-heron/blob/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac/heron/tools/cli/src/python/result.py#L94-L101 | def add_context(self, err_context, succ_context=None):
""" Prepend msg to add some context information
:param pmsg: context info
:return: None
"""
self.err_context = err_context
self.succ_context = succ_context | [
"def",
"add_context",
"(",
"self",
",",
"err_context",
",",
"succ_context",
"=",
"None",
")",
":",
"self",
".",
"err_context",
"=",
"err_context",
"self",
".",
"succ_context",
"=",
"succ_context"
] | Prepend msg to add some context information
:param pmsg: context info
:return: None | [
"Prepend",
"msg",
"to",
"add",
"some",
"context",
"information"
] | python | valid |
chhantyal/sorl-thumbnail-async | thumbnail/__init__.py | https://github.com/chhantyal/sorl-thumbnail-async/blob/023d20aac79090a691d563dc26f558bb87239811/thumbnail/__init__.py#L9-L17 | def get_thumbnail(file_, name):
"""
get_thumbnail version that uses aliasses defined in THUMBNAIL_OPTIONS_DICT
"""
options = settings.OPTIONS_DICT[name]
opt = copy(options)
geometry = opt.pop('geometry')
return original_get_thumbnail(file_, geometry, **opt) | [
"def",
"get_thumbnail",
"(",
"file_",
",",
"name",
")",
":",
"options",
"=",
"settings",
".",
"OPTIONS_DICT",
"[",
"name",
"]",
"opt",
"=",
"copy",
"(",
"options",
")",
"geometry",
"=",
"opt",
".",
"pop",
"(",
"'geometry'",
")",
"return",
"original_get_thumbnail",
"(",
"file_",
",",
"geometry",
",",
"*",
"*",
"opt",
")"
] | get_thumbnail version that uses aliasses defined in THUMBNAIL_OPTIONS_DICT | [
"get_thumbnail",
"version",
"that",
"uses",
"aliasses",
"defined",
"in",
"THUMBNAIL_OPTIONS_DICT"
] | python | train |
hollenstein/maspy | maspy/auxiliary.py | https://github.com/hollenstein/maspy/blob/f15fcfd24df306d8420540460d902aa3073ec133/maspy/auxiliary.py#L115-L133 | def openSafeReplace(filepath, mode='w+b'):
"""Context manager to open a temporary file and replace the original file on
closing.
"""
tempfileName = None
#Check if the filepath can be accessed and is writable before creating the
#tempfile
if not _isFileAccessible(filepath):
raise IOError('File %s is not writtable' % (filepath, ))
with tempfile.NamedTemporaryFile(delete=False, mode=mode) as tmpf:
tempfileName = tmpf.name
yield tmpf
#Check if the filepath can be accessed and is writable before moving the
#tempfile
if not _isFileAccessible(filepath):
raise IOError('File %s is not writtable' % (filepath, ))
#Note: here unhandled exceptions may still occur because of race conditions,
#messing things up.
shutil.move(tempfileName, filepath) | [
"def",
"openSafeReplace",
"(",
"filepath",
",",
"mode",
"=",
"'w+b'",
")",
":",
"tempfileName",
"=",
"None",
"#Check if the filepath can be accessed and is writable before creating the",
"#tempfile",
"if",
"not",
"_isFileAccessible",
"(",
"filepath",
")",
":",
"raise",
"IOError",
"(",
"'File %s is not writtable'",
"%",
"(",
"filepath",
",",
")",
")",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"delete",
"=",
"False",
",",
"mode",
"=",
"mode",
")",
"as",
"tmpf",
":",
"tempfileName",
"=",
"tmpf",
".",
"name",
"yield",
"tmpf",
"#Check if the filepath can be accessed and is writable before moving the",
"#tempfile",
"if",
"not",
"_isFileAccessible",
"(",
"filepath",
")",
":",
"raise",
"IOError",
"(",
"'File %s is not writtable'",
"%",
"(",
"filepath",
",",
")",
")",
"#Note: here unhandled exceptions may still occur because of race conditions,",
"#messing things up.",
"shutil",
".",
"move",
"(",
"tempfileName",
",",
"filepath",
")"
] | Context manager to open a temporary file and replace the original file on
closing. | [
"Context",
"manager",
"to",
"open",
"a",
"temporary",
"file",
"and",
"replace",
"the",
"original",
"file",
"on",
"closing",
"."
] | python | train |
F5Networks/f5-common-python | f5/bigip/tm/ltm/node.py | https://github.com/F5Networks/f5-common-python/blob/7e67d5acd757a60e3d5f8c88c534bd72208f5494/f5/bigip/tm/ltm/node.py#L118-L134 | def _modify(self, **patch):
"""Override modify to check kwargs before request sent to device."""
if 'state' in patch:
if patch['state'] not in ['user-up', 'user-down', 'unchecked', 'fqdn-up']:
msg = "The node resource does not support a modify with the " \
"value of the 'state' attribute as %s. The accepted " \
"values are 'user-up', 'user-down', 'unchecked', or 'fqdn-up'" \
% patch['state']
raise NodeStateModifyUnsupported(msg)
if 'session' in patch:
if patch['session'] not in ['user-enabled', 'user-disabled']:
msg = "The node resource does not support a modify with the " \
"value of the 'session' attribute as %s. " \
"The accepted values are 'user-enabled' or " \
"'user-disabled'" % patch['session']
raise NodeStateModifyUnsupported(msg)
super(Node, self)._modify(**patch) | [
"def",
"_modify",
"(",
"self",
",",
"*",
"*",
"patch",
")",
":",
"if",
"'state'",
"in",
"patch",
":",
"if",
"patch",
"[",
"'state'",
"]",
"not",
"in",
"[",
"'user-up'",
",",
"'user-down'",
",",
"'unchecked'",
",",
"'fqdn-up'",
"]",
":",
"msg",
"=",
"\"The node resource does not support a modify with the \"",
"\"value of the 'state' attribute as %s. The accepted \"",
"\"values are 'user-up', 'user-down', 'unchecked', or 'fqdn-up'\"",
"%",
"patch",
"[",
"'state'",
"]",
"raise",
"NodeStateModifyUnsupported",
"(",
"msg",
")",
"if",
"'session'",
"in",
"patch",
":",
"if",
"patch",
"[",
"'session'",
"]",
"not",
"in",
"[",
"'user-enabled'",
",",
"'user-disabled'",
"]",
":",
"msg",
"=",
"\"The node resource does not support a modify with the \"",
"\"value of the 'session' attribute as %s. \"",
"\"The accepted values are 'user-enabled' or \"",
"\"'user-disabled'\"",
"%",
"patch",
"[",
"'session'",
"]",
"raise",
"NodeStateModifyUnsupported",
"(",
"msg",
")",
"super",
"(",
"Node",
",",
"self",
")",
".",
"_modify",
"(",
"*",
"*",
"patch",
")"
] | Override modify to check kwargs before request sent to device. | [
"Override",
"modify",
"to",
"check",
"kwargs",
"before",
"request",
"sent",
"to",
"device",
"."
] | python | train |
jhshi/wltrace | wltrace/wltrace.py | https://github.com/jhshi/wltrace/blob/4c8441162f7cddd47375da2effc52c95b97dc81d/wltrace/wltrace.py#L59-L77 | def load_trace(path, *args, **kwargs):
"""Read a packet trace file, return a :class:`wltrace.common.WlTrace` object.
This function first reads the file's magic
(first ``FILE_TYPE_HANDLER`` bytes), and automatically determine the
file type, and call appropriate handler to process the file.
Args:
path (str): the file's path to be loaded.
Returns:
``WlTrace`` object.
"""
with open(path, 'rb') as f:
magic = f.read(MAGIC_LEN)
if magic not in FILE_TYPE_HANDLER:
raise Exception('Unknown file magic: %s' % (binascii.hexlify(magic)))
return FILE_TYPE_HANDLER[magic](path, *args, **kwargs) | [
"def",
"load_trace",
"(",
"path",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"open",
"(",
"path",
",",
"'rb'",
")",
"as",
"f",
":",
"magic",
"=",
"f",
".",
"read",
"(",
"MAGIC_LEN",
")",
"if",
"magic",
"not",
"in",
"FILE_TYPE_HANDLER",
":",
"raise",
"Exception",
"(",
"'Unknown file magic: %s'",
"%",
"(",
"binascii",
".",
"hexlify",
"(",
"magic",
")",
")",
")",
"return",
"FILE_TYPE_HANDLER",
"[",
"magic",
"]",
"(",
"path",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Read a packet trace file, return a :class:`wltrace.common.WlTrace` object.
This function first reads the file's magic
(first ``FILE_TYPE_HANDLER`` bytes), and automatically determine the
file type, and call appropriate handler to process the file.
Args:
path (str): the file's path to be loaded.
Returns:
``WlTrace`` object. | [
"Read",
"a",
"packet",
"trace",
"file",
"return",
"a",
":",
"class",
":",
"wltrace",
".",
"common",
".",
"WlTrace",
"object",
"."
] | python | train |
72squared/redpipe | redpipe/keyspaces.py | https://github.com/72squared/redpipe/blob/e6ee518bc9f3e2fee323c8c53d08997799bd9b1b/redpipe/keyspaces.py#L1263-L1280 | def lrange(self, name, start, stop):
"""
Returns a range of items.
:param name: str the name of the redis key
:param start: integer representing the start index of the range
:param stop: integer representing the size of the list.
:return: Future()
"""
with self.pipe as pipe:
f = Future()
res = pipe.lrange(self.redis_key(name), start, stop)
def cb():
f.set([self.valueparse.decode(v) for v in res.result])
pipe.on_execute(cb)
return f | [
"def",
"lrange",
"(",
"self",
",",
"name",
",",
"start",
",",
"stop",
")",
":",
"with",
"self",
".",
"pipe",
"as",
"pipe",
":",
"f",
"=",
"Future",
"(",
")",
"res",
"=",
"pipe",
".",
"lrange",
"(",
"self",
".",
"redis_key",
"(",
"name",
")",
",",
"start",
",",
"stop",
")",
"def",
"cb",
"(",
")",
":",
"f",
".",
"set",
"(",
"[",
"self",
".",
"valueparse",
".",
"decode",
"(",
"v",
")",
"for",
"v",
"in",
"res",
".",
"result",
"]",
")",
"pipe",
".",
"on_execute",
"(",
"cb",
")",
"return",
"f"
] | Returns a range of items.
:param name: str the name of the redis key
:param start: integer representing the start index of the range
:param stop: integer representing the size of the list.
:return: Future() | [
"Returns",
"a",
"range",
"of",
"items",
"."
] | python | train |
btel/svg_utils | src/svgutils/transform.py | https://github.com/btel/svg_utils/blob/ee00726ebed1bd97fd496b15b6a8e7f233ebb5e3/src/svgutils/transform.py#L38-L50 | def rotate(self, angle, x=0, y=0):
"""Rotate element by given angle around given pivot.
Parameters
----------
angle : float
rotation angle in degrees
x, y : float
pivot coordinates in user coordinate system (defaults to top-left
corner of the figure)
"""
self.root.set("transform", "%s rotate(%f %f %f)" %
(self.root.get("transform") or '', angle, x, y)) | [
"def",
"rotate",
"(",
"self",
",",
"angle",
",",
"x",
"=",
"0",
",",
"y",
"=",
"0",
")",
":",
"self",
".",
"root",
".",
"set",
"(",
"\"transform\"",
",",
"\"%s rotate(%f %f %f)\"",
"%",
"(",
"self",
".",
"root",
".",
"get",
"(",
"\"transform\"",
")",
"or",
"''",
",",
"angle",
",",
"x",
",",
"y",
")",
")"
] | Rotate element by given angle around given pivot.
Parameters
----------
angle : float
rotation angle in degrees
x, y : float
pivot coordinates in user coordinate system (defaults to top-left
corner of the figure) | [
"Rotate",
"element",
"by",
"given",
"angle",
"around",
"given",
"pivot",
"."
] | python | train |
wummel/linkchecker | linkcheck/fileutil.py | https://github.com/wummel/linkchecker/blob/c2ce810c3fb00b895a841a7be6b2e78c64e7b042/linkcheck/fileutil.py#L218-L227 | def is_writable(filename):
"""Check if
- the file is a regular file and is writable, or
- the file does not exist and its parent directory exists and is
writable
"""
if not os.path.exists(filename):
parentdir = os.path.dirname(filename)
return os.path.isdir(parentdir) and os.access(parentdir, os.W_OK)
return os.path.isfile(filename) and os.access(filename, os.W_OK) | [
"def",
"is_writable",
"(",
"filename",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"parentdir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"filename",
")",
"return",
"os",
".",
"path",
".",
"isdir",
"(",
"parentdir",
")",
"and",
"os",
".",
"access",
"(",
"parentdir",
",",
"os",
".",
"W_OK",
")",
"return",
"os",
".",
"path",
".",
"isfile",
"(",
"filename",
")",
"and",
"os",
".",
"access",
"(",
"filename",
",",
"os",
".",
"W_OK",
")"
] | Check if
- the file is a regular file and is writable, or
- the file does not exist and its parent directory exists and is
writable | [
"Check",
"if",
"-",
"the",
"file",
"is",
"a",
"regular",
"file",
"and",
"is",
"writable",
"or",
"-",
"the",
"file",
"does",
"not",
"exist",
"and",
"its",
"parent",
"directory",
"exists",
"and",
"is",
"writable"
] | python | train |
psphere-project/psphere | psphere/__init__.py | https://github.com/psphere-project/psphere/blob/83a252e037c3d6e4f18bcd37380998bc9535e591/psphere/__init__.py#L153-L168 | def update(self, properties=None):
"""Updates the properties being held for this instance.
:param properties: The list of properties to update.
:type properties: list or None (default). If None, update all
currently cached properties.
"""
if properties is None:
try:
self.update_view_data(properties=list(self._cache.keys()))
except AttributeError:
# We end up here and ignore it self._cache doesn't exist
pass
else:
self.update_view_data(properties=properties) | [
"def",
"update",
"(",
"self",
",",
"properties",
"=",
"None",
")",
":",
"if",
"properties",
"is",
"None",
":",
"try",
":",
"self",
".",
"update_view_data",
"(",
"properties",
"=",
"list",
"(",
"self",
".",
"_cache",
".",
"keys",
"(",
")",
")",
")",
"except",
"AttributeError",
":",
"# We end up here and ignore it self._cache doesn't exist",
"pass",
"else",
":",
"self",
".",
"update_view_data",
"(",
"properties",
"=",
"properties",
")"
] | Updates the properties being held for this instance.
:param properties: The list of properties to update.
:type properties: list or None (default). If None, update all
currently cached properties. | [
"Updates",
"the",
"properties",
"being",
"held",
"for",
"this",
"instance",
"."
] | python | train |
kamikaze/webdav | src/webdav/client.py | https://github.com/kamikaze/webdav/blob/6facff7224023d3e28c8e1592f3c58401c91a0e6/src/webdav/client.py#L335-L355 | def download_directory(self, remote_path, local_path, progress=None):
"""Downloads directory and downloads all nested files and directories from remote WebDAV to local.
If there is something on local path it deletes directories and files then creates new.
:param remote_path: the path to directory for downloading form WebDAV server.
:param local_path: the path to local directory for saving downloaded files and directories.
:param progress: Progress function. Not supported now.
"""
urn = Urn(remote_path, directory=True)
if not self.is_dir(urn.path()):
raise OptionNotValid(name='remote_path', value=remote_path)
if os.path.exists(local_path):
shutil.rmtree(local_path)
os.makedirs(local_path)
for resource_name in self.list(urn.path()):
_remote_path = f'{urn.path()}{resource_name}'
_local_path = os.path.join(local_path, resource_name)
self.download(local_path=_local_path, remote_path=_remote_path, progress=progress) | [
"def",
"download_directory",
"(",
"self",
",",
"remote_path",
",",
"local_path",
",",
"progress",
"=",
"None",
")",
":",
"urn",
"=",
"Urn",
"(",
"remote_path",
",",
"directory",
"=",
"True",
")",
"if",
"not",
"self",
".",
"is_dir",
"(",
"urn",
".",
"path",
"(",
")",
")",
":",
"raise",
"OptionNotValid",
"(",
"name",
"=",
"'remote_path'",
",",
"value",
"=",
"remote_path",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"local_path",
")",
":",
"shutil",
".",
"rmtree",
"(",
"local_path",
")",
"os",
".",
"makedirs",
"(",
"local_path",
")",
"for",
"resource_name",
"in",
"self",
".",
"list",
"(",
"urn",
".",
"path",
"(",
")",
")",
":",
"_remote_path",
"=",
"f'{urn.path()}{resource_name}'",
"_local_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"local_path",
",",
"resource_name",
")",
"self",
".",
"download",
"(",
"local_path",
"=",
"_local_path",
",",
"remote_path",
"=",
"_remote_path",
",",
"progress",
"=",
"progress",
")"
] | Downloads directory and downloads all nested files and directories from remote WebDAV to local.
If there is something on local path it deletes directories and files then creates new.
:param remote_path: the path to directory for downloading form WebDAV server.
:param local_path: the path to local directory for saving downloaded files and directories.
:param progress: Progress function. Not supported now. | [
"Downloads",
"directory",
"and",
"downloads",
"all",
"nested",
"files",
"and",
"directories",
"from",
"remote",
"WebDAV",
"to",
"local",
".",
"If",
"there",
"is",
"something",
"on",
"local",
"path",
"it",
"deletes",
"directories",
"and",
"files",
"then",
"creates",
"new",
"."
] | python | train |
aegirhall/console-menu | consolemenu/selection_menu.py | https://github.com/aegirhall/console-menu/blob/1a28959d6f1dd6ac79c87b11efd8529d05532422/consolemenu/selection_menu.py#L29-L50 | def get_selection(cls, strings, title="Select an option", subtitle=None, exit_option=True, _menu=None):
"""
Single-method way of getting a selection out of a list of strings.
Args:
strings (:obj:`list` of :obj:`str`): The list of strings this menu should be built from.
title (str): The title of the menu.
subtitle (str): The subtitle of the menu.
exit_option (bool): Specifies whether this menu should show an exit item by default. Defaults to True.
_menu: Should probably only be used for testing, pass in a list and the created menu used internally by
the method will be appended to it
Returns:
int: The index of the selected option.
"""
menu = cls(strings, title, subtitle, exit_option)
if _menu is not None:
_menu.append(menu)
menu.show()
menu.join()
return menu.selected_option | [
"def",
"get_selection",
"(",
"cls",
",",
"strings",
",",
"title",
"=",
"\"Select an option\"",
",",
"subtitle",
"=",
"None",
",",
"exit_option",
"=",
"True",
",",
"_menu",
"=",
"None",
")",
":",
"menu",
"=",
"cls",
"(",
"strings",
",",
"title",
",",
"subtitle",
",",
"exit_option",
")",
"if",
"_menu",
"is",
"not",
"None",
":",
"_menu",
".",
"append",
"(",
"menu",
")",
"menu",
".",
"show",
"(",
")",
"menu",
".",
"join",
"(",
")",
"return",
"menu",
".",
"selected_option"
] | Single-method way of getting a selection out of a list of strings.
Args:
strings (:obj:`list` of :obj:`str`): The list of strings this menu should be built from.
title (str): The title of the menu.
subtitle (str): The subtitle of the menu.
exit_option (bool): Specifies whether this menu should show an exit item by default. Defaults to True.
_menu: Should probably only be used for testing, pass in a list and the created menu used internally by
the method will be appended to it
Returns:
int: The index of the selected option. | [
"Single",
"-",
"method",
"way",
"of",
"getting",
"a",
"selection",
"out",
"of",
"a",
"list",
"of",
"strings",
"."
] | python | train |
tensorflow/mesh | mesh_tensorflow/transformer/dataset.py | https://github.com/tensorflow/mesh/blob/3921196e5e43302e820da0a87329f25d7e2a3016/mesh_tensorflow/transformer/dataset.py#L330-L376 | def pretokenized_tfrecord_dataset(filenames,
text2self,
eos_included,
repeat,
batch_size,
sequence_length):
"""Reads tensor2tensor-style data files.
The dataset is defined by sets of TFRecord files of TFExample protos.
There should be a "targets" feature (a 1d tensor of integers)
If not text2self, there should also be an "inputs" feature.
Other features get ignored.
eos_included specifies whether the inputs and targets were written with an
EOS token, as in tensor2tensor
Args:
filenames: a list of strings
text2self: a boolean
eos_included: a boolean
repeat: a boolean
batch_size: an integer
sequence_length: an integer
Returns:
A tf.data.Dataset of batches
"""
dataset = tf.data.TFRecordDataset(filenames, buffer_size=64 * 1024 * 1024)
if repeat:
dataset = dataset.repeat()
keys = ["targets"] if text2self else ["inputs", "targets"]
def decode_example(serialized_example):
"""Return a dict of Tensors from a serialized tensorflow.Example."""
data_fields = {}
data_items_to_decoders = {}
for k in keys:
data_fields[k] = tf.VarLenFeature(tf.int64)
data_items_to_decoders[k] = tf.contrib.slim.tfexample_decoder.Tensor(k)
decoder = tf.contrib.slim.tfexample_decoder.TFExampleDecoder(
data_fields, data_items_to_decoders)
decode_items = list(sorted(data_items_to_decoders))
decoded = decoder.decode(serialized_example, items=decode_items)
if not eos_included:
decoded = [tf.concat([v, [1]], 0) for v in decoded]
return dict(zip(decode_items, decoded))
dataset = dataset.map(decode_example,
num_parallel_calls=tf.data.experimental.AUTOTUNE)
return pack_and_batch(dataset, batch_size, sequence_length) | [
"def",
"pretokenized_tfrecord_dataset",
"(",
"filenames",
",",
"text2self",
",",
"eos_included",
",",
"repeat",
",",
"batch_size",
",",
"sequence_length",
")",
":",
"dataset",
"=",
"tf",
".",
"data",
".",
"TFRecordDataset",
"(",
"filenames",
",",
"buffer_size",
"=",
"64",
"*",
"1024",
"*",
"1024",
")",
"if",
"repeat",
":",
"dataset",
"=",
"dataset",
".",
"repeat",
"(",
")",
"keys",
"=",
"[",
"\"targets\"",
"]",
"if",
"text2self",
"else",
"[",
"\"inputs\"",
",",
"\"targets\"",
"]",
"def",
"decode_example",
"(",
"serialized_example",
")",
":",
"\"\"\"Return a dict of Tensors from a serialized tensorflow.Example.\"\"\"",
"data_fields",
"=",
"{",
"}",
"data_items_to_decoders",
"=",
"{",
"}",
"for",
"k",
"in",
"keys",
":",
"data_fields",
"[",
"k",
"]",
"=",
"tf",
".",
"VarLenFeature",
"(",
"tf",
".",
"int64",
")",
"data_items_to_decoders",
"[",
"k",
"]",
"=",
"tf",
".",
"contrib",
".",
"slim",
".",
"tfexample_decoder",
".",
"Tensor",
"(",
"k",
")",
"decoder",
"=",
"tf",
".",
"contrib",
".",
"slim",
".",
"tfexample_decoder",
".",
"TFExampleDecoder",
"(",
"data_fields",
",",
"data_items_to_decoders",
")",
"decode_items",
"=",
"list",
"(",
"sorted",
"(",
"data_items_to_decoders",
")",
")",
"decoded",
"=",
"decoder",
".",
"decode",
"(",
"serialized_example",
",",
"items",
"=",
"decode_items",
")",
"if",
"not",
"eos_included",
":",
"decoded",
"=",
"[",
"tf",
".",
"concat",
"(",
"[",
"v",
",",
"[",
"1",
"]",
"]",
",",
"0",
")",
"for",
"v",
"in",
"decoded",
"]",
"return",
"dict",
"(",
"zip",
"(",
"decode_items",
",",
"decoded",
")",
")",
"dataset",
"=",
"dataset",
".",
"map",
"(",
"decode_example",
",",
"num_parallel_calls",
"=",
"tf",
".",
"data",
".",
"experimental",
".",
"AUTOTUNE",
")",
"return",
"pack_and_batch",
"(",
"dataset",
",",
"batch_size",
",",
"sequence_length",
")"
] | Reads tensor2tensor-style data files.
The dataset is defined by sets of TFRecord files of TFExample protos.
There should be a "targets" feature (a 1d tensor of integers)
If not text2self, there should also be an "inputs" feature.
Other features get ignored.
eos_included specifies whether the inputs and targets were written with an
EOS token, as in tensor2tensor
Args:
filenames: a list of strings
text2self: a boolean
eos_included: a boolean
repeat: a boolean
batch_size: an integer
sequence_length: an integer
Returns:
A tf.data.Dataset of batches | [
"Reads",
"tensor2tensor",
"-",
"style",
"data",
"files",
"."
] | python | train |
wmayner/pyphi | pyphi/db.py | https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/db.py#L34-L45 | def find(key):
"""Return the value associated with a key.
If there is no value with the given key, returns ``None``.
"""
docs = list(collection.find({KEY_FIELD: key}))
# Return None if we didn't find anything.
if not docs:
return None
pickled_value = docs[0][VALUE_FIELD]
# Unpickle and return the value.
return pickle.loads(pickled_value) | [
"def",
"find",
"(",
"key",
")",
":",
"docs",
"=",
"list",
"(",
"collection",
".",
"find",
"(",
"{",
"KEY_FIELD",
":",
"key",
"}",
")",
")",
"# Return None if we didn't find anything.",
"if",
"not",
"docs",
":",
"return",
"None",
"pickled_value",
"=",
"docs",
"[",
"0",
"]",
"[",
"VALUE_FIELD",
"]",
"# Unpickle and return the value.",
"return",
"pickle",
".",
"loads",
"(",
"pickled_value",
")"
] | Return the value associated with a key.
If there is no value with the given key, returns ``None``. | [
"Return",
"the",
"value",
"associated",
"with",
"a",
"key",
"."
] | python | train |
trailofbits/manticore | manticore/utils/fallback_emulator.py | https://github.com/trailofbits/manticore/blob/54c5a15b1119c523ae54c09972413e8b97f11629/manticore/utils/fallback_emulator.py#L64-L86 | def _create_emulated_mapping(self, uc, address):
"""
Create a mapping in Unicorn and note that we'll need it if we retry.
:param uc: The Unicorn instance.
:param address: The address which is contained by the mapping.
:rtype Map
"""
m = self._cpu.memory.map_containing(address)
permissions = UC_PROT_NONE
if 'r' in m.perms:
permissions |= UC_PROT_READ
if 'w' in m.perms:
permissions |= UC_PROT_WRITE
if 'x' in m.perms:
permissions |= UC_PROT_EXEC
uc.mem_map(m.start, len(m), permissions)
self._should_be_mapped[m.start] = (len(m), permissions)
return m | [
"def",
"_create_emulated_mapping",
"(",
"self",
",",
"uc",
",",
"address",
")",
":",
"m",
"=",
"self",
".",
"_cpu",
".",
"memory",
".",
"map_containing",
"(",
"address",
")",
"permissions",
"=",
"UC_PROT_NONE",
"if",
"'r'",
"in",
"m",
".",
"perms",
":",
"permissions",
"|=",
"UC_PROT_READ",
"if",
"'w'",
"in",
"m",
".",
"perms",
":",
"permissions",
"|=",
"UC_PROT_WRITE",
"if",
"'x'",
"in",
"m",
".",
"perms",
":",
"permissions",
"|=",
"UC_PROT_EXEC",
"uc",
".",
"mem_map",
"(",
"m",
".",
"start",
",",
"len",
"(",
"m",
")",
",",
"permissions",
")",
"self",
".",
"_should_be_mapped",
"[",
"m",
".",
"start",
"]",
"=",
"(",
"len",
"(",
"m",
")",
",",
"permissions",
")",
"return",
"m"
] | Create a mapping in Unicorn and note that we'll need it if we retry.
:param uc: The Unicorn instance.
:param address: The address which is contained by the mapping.
:rtype Map | [
"Create",
"a",
"mapping",
"in",
"Unicorn",
"and",
"note",
"that",
"we",
"ll",
"need",
"it",
"if",
"we",
"retry",
".",
":",
"param",
"uc",
":",
"The",
"Unicorn",
"instance",
".",
":",
"param",
"address",
":",
"The",
"address",
"which",
"is",
"contained",
"by",
"the",
"mapping",
".",
":",
"rtype",
"Map"
] | python | valid |
flatangle/flatlib | flatlib/dignities/accidental.py | https://github.com/flatangle/flatlib/blob/44e05b2991a296c678adbc17a1d51b6a21bc867c/flatlib/dignities/accidental.py#L237-L252 | def eqMutualReceptions(self):
""" Returns a list with mutual receptions with the
object and other planets, when the reception is the
same for both (both ruler or both exaltation).
It basically return a list with every ruler-ruler and
exalt-exalt mutual receptions
"""
mrs = self.reMutualReceptions()
res = []
for ID, receptions in mrs.items():
for pair in receptions:
if pair[0] == pair[1]:
res.append(pair[0])
return res | [
"def",
"eqMutualReceptions",
"(",
"self",
")",
":",
"mrs",
"=",
"self",
".",
"reMutualReceptions",
"(",
")",
"res",
"=",
"[",
"]",
"for",
"ID",
",",
"receptions",
"in",
"mrs",
".",
"items",
"(",
")",
":",
"for",
"pair",
"in",
"receptions",
":",
"if",
"pair",
"[",
"0",
"]",
"==",
"pair",
"[",
"1",
"]",
":",
"res",
".",
"append",
"(",
"pair",
"[",
"0",
"]",
")",
"return",
"res"
] | Returns a list with mutual receptions with the
object and other planets, when the reception is the
same for both (both ruler or both exaltation).
It basically return a list with every ruler-ruler and
exalt-exalt mutual receptions | [
"Returns",
"a",
"list",
"with",
"mutual",
"receptions",
"with",
"the",
"object",
"and",
"other",
"planets",
"when",
"the",
"reception",
"is",
"the",
"same",
"for",
"both",
"(",
"both",
"ruler",
"or",
"both",
"exaltation",
")",
".",
"It",
"basically",
"return",
"a",
"list",
"with",
"every",
"ruler",
"-",
"ruler",
"and",
"exalt",
"-",
"exalt",
"mutual",
"receptions"
] | python | train |
goldsmith/Wikipedia | wikipedia/wikipedia.py | https://github.com/goldsmith/Wikipedia/blob/2065c568502b19b8634241b47fd96930d1bf948d/wikipedia/wikipedia.py#L653-L676 | def section(self, section_title):
'''
Get the plain text content of a section from `self.sections`.
Returns None if `section_title` isn't found, otherwise returns a whitespace stripped string.
This is a convenience method that wraps self.content.
.. warning:: Calling `section` on a section that has subheadings will NOT return
the full text of all of the subsections. It only gets the text between
`section_title` and the next subheading, which is often empty.
'''
section = u"== {} ==".format(section_title)
try:
index = self.content.index(section) + len(section)
except ValueError:
return None
try:
next_index = self.content.index("==", index)
except ValueError:
next_index = len(self.content)
return self.content[index:next_index].lstrip("=").strip() | [
"def",
"section",
"(",
"self",
",",
"section_title",
")",
":",
"section",
"=",
"u\"== {} ==\"",
".",
"format",
"(",
"section_title",
")",
"try",
":",
"index",
"=",
"self",
".",
"content",
".",
"index",
"(",
"section",
")",
"+",
"len",
"(",
"section",
")",
"except",
"ValueError",
":",
"return",
"None",
"try",
":",
"next_index",
"=",
"self",
".",
"content",
".",
"index",
"(",
"\"==\"",
",",
"index",
")",
"except",
"ValueError",
":",
"next_index",
"=",
"len",
"(",
"self",
".",
"content",
")",
"return",
"self",
".",
"content",
"[",
"index",
":",
"next_index",
"]",
".",
"lstrip",
"(",
"\"=\"",
")",
".",
"strip",
"(",
")"
] | Get the plain text content of a section from `self.sections`.
Returns None if `section_title` isn't found, otherwise returns a whitespace stripped string.
This is a convenience method that wraps self.content.
.. warning:: Calling `section` on a section that has subheadings will NOT return
the full text of all of the subsections. It only gets the text between
`section_title` and the next subheading, which is often empty. | [
"Get",
"the",
"plain",
"text",
"content",
"of",
"a",
"section",
"from",
"self",
".",
"sections",
".",
"Returns",
"None",
"if",
"section_title",
"isn",
"t",
"found",
"otherwise",
"returns",
"a",
"whitespace",
"stripped",
"string",
"."
] | python | train |
fastai/fastai | docs_src/nbval/plugin.py | https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/docs_src/nbval/plugin.py#L796-L831 | def coalesce_streams(outputs):
"""
Merge all stream outputs with shared names into single streams
to ensure deterministic outputs.
Parameters
----------
outputs : iterable of NotebookNodes
Outputs being processed
"""
if not outputs:
return outputs
new_outputs = []
streams = {}
for output in outputs:
if (output.output_type == 'stream'):
if output.name in streams:
streams[output.name].text += output.text
else:
new_outputs.append(output)
streams[output.name] = output
else:
new_outputs.append(output)
# process \r and \b characters
for output in streams.values():
old = output.text
while len(output.text) < len(old):
old = output.text
# Cancel out anything-but-newline followed by backspace
output.text = backspace_pat.sub('', output.text)
# Replace all carriage returns not followed by newline
output.text = carriagereturn_pat.sub('', output.text)
return new_outputs | [
"def",
"coalesce_streams",
"(",
"outputs",
")",
":",
"if",
"not",
"outputs",
":",
"return",
"outputs",
"new_outputs",
"=",
"[",
"]",
"streams",
"=",
"{",
"}",
"for",
"output",
"in",
"outputs",
":",
"if",
"(",
"output",
".",
"output_type",
"==",
"'stream'",
")",
":",
"if",
"output",
".",
"name",
"in",
"streams",
":",
"streams",
"[",
"output",
".",
"name",
"]",
".",
"text",
"+=",
"output",
".",
"text",
"else",
":",
"new_outputs",
".",
"append",
"(",
"output",
")",
"streams",
"[",
"output",
".",
"name",
"]",
"=",
"output",
"else",
":",
"new_outputs",
".",
"append",
"(",
"output",
")",
"# process \\r and \\b characters",
"for",
"output",
"in",
"streams",
".",
"values",
"(",
")",
":",
"old",
"=",
"output",
".",
"text",
"while",
"len",
"(",
"output",
".",
"text",
")",
"<",
"len",
"(",
"old",
")",
":",
"old",
"=",
"output",
".",
"text",
"# Cancel out anything-but-newline followed by backspace",
"output",
".",
"text",
"=",
"backspace_pat",
".",
"sub",
"(",
"''",
",",
"output",
".",
"text",
")",
"# Replace all carriage returns not followed by newline",
"output",
".",
"text",
"=",
"carriagereturn_pat",
".",
"sub",
"(",
"''",
",",
"output",
".",
"text",
")",
"return",
"new_outputs"
] | Merge all stream outputs with shared names into single streams
to ensure deterministic outputs.
Parameters
----------
outputs : iterable of NotebookNodes
Outputs being processed | [
"Merge",
"all",
"stream",
"outputs",
"with",
"shared",
"names",
"into",
"single",
"streams",
"to",
"ensure",
"deterministic",
"outputs",
"."
] | python | train |
ArchiveTeam/wpull | wpull/url.py | https://github.com/ArchiveTeam/wpull/blob/ddf051aa3322479325ba20aa778cb2cb97606bf5/wpull/url.py#L402-L415 | def parse_url_or_log(url, encoding='utf-8'):
'''Parse and return a URLInfo.
This function logs a warning if the URL cannot be parsed and returns
None.
'''
try:
url_info = URLInfo.parse(url, encoding=encoding)
except ValueError as error:
_logger.warning(__(
_('Unable to parse URL ‘{url}’: {error}.'),
url=wpull.string.printable_str(url), error=error))
else:
return url_info | [
"def",
"parse_url_or_log",
"(",
"url",
",",
"encoding",
"=",
"'utf-8'",
")",
":",
"try",
":",
"url_info",
"=",
"URLInfo",
".",
"parse",
"(",
"url",
",",
"encoding",
"=",
"encoding",
")",
"except",
"ValueError",
"as",
"error",
":",
"_logger",
".",
"warning",
"(",
"__",
"(",
"_",
"(",
"'Unable to parse URL ‘{url}’: {error}.'),",
"",
"",
"url",
"=",
"wpull",
".",
"string",
".",
"printable_str",
"(",
"url",
")",
",",
"error",
"=",
"error",
")",
")",
"else",
":",
"return",
"url_info"
] | Parse and return a URLInfo.
This function logs a warning if the URL cannot be parsed and returns
None. | [
"Parse",
"and",
"return",
"a",
"URLInfo",
"."
] | python | train |
rackerlabs/fastfood | fastfood/shell.py | https://github.com/rackerlabs/fastfood/blob/543970c4cedbb3956e84a7986469fdd7e4ee8fc8/fastfood/shell.py#L117-L252 | def main(argv=None):
"""fastfood command line interface."""
# pylint: disable=missing-docstring
import argparse
import traceback
class HelpfulParser(argparse.ArgumentParser):
def error(self, message, print_help=False):
if 'too few arguments' in message:
sys.argv.insert(0, os.path.basename(sys.argv.pop(0)))
message = ("%s. Try getting help with `%s -h`"
% (message, " ".join(sys.argv)))
if print_help:
self.print_help()
sys.stderr.write('\nerror: %s\n' % message)
sys.exit(2)
parser = HelpfulParser(
prog=NAMESPACE,
description=__doc__.splitlines()[0],
epilog="\n".join(__doc__.splitlines()[1:]),
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
version_string = 'version %s' % fastfood.__version__
parser.description = '%s ( %s )' % (parser.description, version_string)
# version_group = subparsers.add_group()
version_group = parser.add_argument_group(
title='version info',
description='Use these arguments to get version info.')
vers_arg = version_group.add_argument(
'-V', '--version', action='version',
help="Return the current fastfood version.",
version='%s %s' % (parser.prog, version_string))
class LatestVersionAction(vers_arg.__class__):
def __call__(self, prsr, *args, **kw):
info = _release_info()
vers = info['info']['version']
release = info['releases'][vers][0]
uploaded = datetime.strptime(
release['upload_time'], '%Y-%m-%dT%H:%M:%S')
sym = EXCLAIM if vers != fastfood.__version__ else CHECK
message = u"{} fastfood version {} uploaded {}\n"
message = message.format(sym, vers, uploaded.ctime())
prsr.exit(message=message)
version_group.add_argument(
'-L', '--latest', action=LatestVersionAction,
help="Lookup the latest relase from PyPI.")
verbose = parser.add_mutually_exclusive_group()
verbose.add_argument('-v', dest='loglevel', action='store_const',
const=logging.INFO,
help="Set log-level to INFO.")
verbose.add_argument('-vv', dest='loglevel', action='store_const',
const=logging.DEBUG,
help="Set log-level to DEBUG.")
parser.set_defaults(loglevel=logging.WARNING)
home = os.getenv('HOME') or os.path.expanduser('~') or os.getcwd()
parser.add_argument(
'--template-pack', help='template pack location',
default=getenv(
'template_pack', os.path.join(home, '.fastfood')))
parser.add_argument(
'--cookbooks', help='cookbooks directory',
default=getenv(
'cookbooks', os.path.join(home, 'cookbooks')))
subparsers = parser.add_subparsers(
dest='_subparsers', title='fastfood commands',
description='operations...',
help='...')
#
# `fastfood list`
#
list_parser = subparsers.add_parser(
'list', help='List available stencils',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
list_parser.add_argument('stencil_set', nargs='?',
help="Stencil set to list stencils from")
list_parser.set_defaults(func=_fastfood_list)
#
# `fastfood show <stencil_set>`
#
show_parser = subparsers.add_parser(
'show', help='Show stencil set information',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
show_parser.add_argument('stencil_set',
help="Stencil set to list stencils from")
show_parser.set_defaults(func=_fastfood_show)
#
# `fastfood build`
#
build_parser = subparsers.add_parser(
'build', help='Create or update a cookbook using a config',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
build_parser.add_argument('config_file',
help="JSON config file")
build_parser.add_argument('--force', '-f', action='store_true',
default=False, help="Overwrite existing files.")
build_parser.set_defaults(func=_fastfood_build)
setattr(_LOCAL, 'argparser', parser)
if not argv:
argv = None
args = parser.parse_args(args=argv)
if hasattr(args, 'options'):
args.options = {k: v for k, v in args.options}
logging.basicConfig(level=args.loglevel)
try:
args.func(args)
except exc.FastfoodError as err:
title = exc.get_friendly_title(err)
print('%s %s: %s' % (RED_X, title, str(err)),
file=sys.stderr)
sys.stderr.flush()
sys.exit(1)
except Exception as err:
print('%s Unexpected error. Please report this traceback.'
% INTERROBANG,
file=sys.stderr)
traceback.print_exc()
# todo: tracack in -v or -vv mode?
sys.stderr.flush()
sys.exit(1)
except KeyboardInterrupt:
sys.exit("\nStahp") | [
"def",
"main",
"(",
"argv",
"=",
"None",
")",
":",
"# pylint: disable=missing-docstring",
"import",
"argparse",
"import",
"traceback",
"class",
"HelpfulParser",
"(",
"argparse",
".",
"ArgumentParser",
")",
":",
"def",
"error",
"(",
"self",
",",
"message",
",",
"print_help",
"=",
"False",
")",
":",
"if",
"'too few arguments'",
"in",
"message",
":",
"sys",
".",
"argv",
".",
"insert",
"(",
"0",
",",
"os",
".",
"path",
".",
"basename",
"(",
"sys",
".",
"argv",
".",
"pop",
"(",
"0",
")",
")",
")",
"message",
"=",
"(",
"\"%s. Try getting help with `%s -h`\"",
"%",
"(",
"message",
",",
"\" \"",
".",
"join",
"(",
"sys",
".",
"argv",
")",
")",
")",
"if",
"print_help",
":",
"self",
".",
"print_help",
"(",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'\\nerror: %s\\n'",
"%",
"message",
")",
"sys",
".",
"exit",
"(",
"2",
")",
"parser",
"=",
"HelpfulParser",
"(",
"prog",
"=",
"NAMESPACE",
",",
"description",
"=",
"__doc__",
".",
"splitlines",
"(",
")",
"[",
"0",
"]",
",",
"epilog",
"=",
"\"\\n\"",
".",
"join",
"(",
"__doc__",
".",
"splitlines",
"(",
")",
"[",
"1",
":",
"]",
")",
",",
"formatter_class",
"=",
"argparse",
".",
"ArgumentDefaultsHelpFormatter",
",",
")",
"version_string",
"=",
"'version %s'",
"%",
"fastfood",
".",
"__version__",
"parser",
".",
"description",
"=",
"'%s ( %s )'",
"%",
"(",
"parser",
".",
"description",
",",
"version_string",
")",
"# version_group = subparsers.add_group()",
"version_group",
"=",
"parser",
".",
"add_argument_group",
"(",
"title",
"=",
"'version info'",
",",
"description",
"=",
"'Use these arguments to get version info.'",
")",
"vers_arg",
"=",
"version_group",
".",
"add_argument",
"(",
"'-V'",
",",
"'--version'",
",",
"action",
"=",
"'version'",
",",
"help",
"=",
"\"Return the current fastfood version.\"",
",",
"version",
"=",
"'%s %s'",
"%",
"(",
"parser",
".",
"prog",
",",
"version_string",
")",
")",
"class",
"LatestVersionAction",
"(",
"vers_arg",
".",
"__class__",
")",
":",
"def",
"__call__",
"(",
"self",
",",
"prsr",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"info",
"=",
"_release_info",
"(",
")",
"vers",
"=",
"info",
"[",
"'info'",
"]",
"[",
"'version'",
"]",
"release",
"=",
"info",
"[",
"'releases'",
"]",
"[",
"vers",
"]",
"[",
"0",
"]",
"uploaded",
"=",
"datetime",
".",
"strptime",
"(",
"release",
"[",
"'upload_time'",
"]",
",",
"'%Y-%m-%dT%H:%M:%S'",
")",
"sym",
"=",
"EXCLAIM",
"if",
"vers",
"!=",
"fastfood",
".",
"__version__",
"else",
"CHECK",
"message",
"=",
"u\"{} fastfood version {} uploaded {}\\n\"",
"message",
"=",
"message",
".",
"format",
"(",
"sym",
",",
"vers",
",",
"uploaded",
".",
"ctime",
"(",
")",
")",
"prsr",
".",
"exit",
"(",
"message",
"=",
"message",
")",
"version_group",
".",
"add_argument",
"(",
"'-L'",
",",
"'--latest'",
",",
"action",
"=",
"LatestVersionAction",
",",
"help",
"=",
"\"Lookup the latest relase from PyPI.\"",
")",
"verbose",
"=",
"parser",
".",
"add_mutually_exclusive_group",
"(",
")",
"verbose",
".",
"add_argument",
"(",
"'-v'",
",",
"dest",
"=",
"'loglevel'",
",",
"action",
"=",
"'store_const'",
",",
"const",
"=",
"logging",
".",
"INFO",
",",
"help",
"=",
"\"Set log-level to INFO.\"",
")",
"verbose",
".",
"add_argument",
"(",
"'-vv'",
",",
"dest",
"=",
"'loglevel'",
",",
"action",
"=",
"'store_const'",
",",
"const",
"=",
"logging",
".",
"DEBUG",
",",
"help",
"=",
"\"Set log-level to DEBUG.\"",
")",
"parser",
".",
"set_defaults",
"(",
"loglevel",
"=",
"logging",
".",
"WARNING",
")",
"home",
"=",
"os",
".",
"getenv",
"(",
"'HOME'",
")",
"or",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~'",
")",
"or",
"os",
".",
"getcwd",
"(",
")",
"parser",
".",
"add_argument",
"(",
"'--template-pack'",
",",
"help",
"=",
"'template pack location'",
",",
"default",
"=",
"getenv",
"(",
"'template_pack'",
",",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"'.fastfood'",
")",
")",
")",
"parser",
".",
"add_argument",
"(",
"'--cookbooks'",
",",
"help",
"=",
"'cookbooks directory'",
",",
"default",
"=",
"getenv",
"(",
"'cookbooks'",
",",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"'cookbooks'",
")",
")",
")",
"subparsers",
"=",
"parser",
".",
"add_subparsers",
"(",
"dest",
"=",
"'_subparsers'",
",",
"title",
"=",
"'fastfood commands'",
",",
"description",
"=",
"'operations...'",
",",
"help",
"=",
"'...'",
")",
"#",
"# `fastfood list`",
"#",
"list_parser",
"=",
"subparsers",
".",
"add_parser",
"(",
"'list'",
",",
"help",
"=",
"'List available stencils'",
",",
"formatter_class",
"=",
"argparse",
".",
"ArgumentDefaultsHelpFormatter",
")",
"list_parser",
".",
"add_argument",
"(",
"'stencil_set'",
",",
"nargs",
"=",
"'?'",
",",
"help",
"=",
"\"Stencil set to list stencils from\"",
")",
"list_parser",
".",
"set_defaults",
"(",
"func",
"=",
"_fastfood_list",
")",
"#",
"# `fastfood show <stencil_set>`",
"#",
"show_parser",
"=",
"subparsers",
".",
"add_parser",
"(",
"'show'",
",",
"help",
"=",
"'Show stencil set information'",
",",
"formatter_class",
"=",
"argparse",
".",
"ArgumentDefaultsHelpFormatter",
")",
"show_parser",
".",
"add_argument",
"(",
"'stencil_set'",
",",
"help",
"=",
"\"Stencil set to list stencils from\"",
")",
"show_parser",
".",
"set_defaults",
"(",
"func",
"=",
"_fastfood_show",
")",
"#",
"# `fastfood build`",
"#",
"build_parser",
"=",
"subparsers",
".",
"add_parser",
"(",
"'build'",
",",
"help",
"=",
"'Create or update a cookbook using a config'",
",",
"formatter_class",
"=",
"argparse",
".",
"ArgumentDefaultsHelpFormatter",
")",
"build_parser",
".",
"add_argument",
"(",
"'config_file'",
",",
"help",
"=",
"\"JSON config file\"",
")",
"build_parser",
".",
"add_argument",
"(",
"'--force'",
",",
"'-f'",
",",
"action",
"=",
"'store_true'",
",",
"default",
"=",
"False",
",",
"help",
"=",
"\"Overwrite existing files.\"",
")",
"build_parser",
".",
"set_defaults",
"(",
"func",
"=",
"_fastfood_build",
")",
"setattr",
"(",
"_LOCAL",
",",
"'argparser'",
",",
"parser",
")",
"if",
"not",
"argv",
":",
"argv",
"=",
"None",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"args",
"=",
"argv",
")",
"if",
"hasattr",
"(",
"args",
",",
"'options'",
")",
":",
"args",
".",
"options",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"args",
".",
"options",
"}",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"args",
".",
"loglevel",
")",
"try",
":",
"args",
".",
"func",
"(",
"args",
")",
"except",
"exc",
".",
"FastfoodError",
"as",
"err",
":",
"title",
"=",
"exc",
".",
"get_friendly_title",
"(",
"err",
")",
"print",
"(",
"'%s %s: %s'",
"%",
"(",
"RED_X",
",",
"title",
",",
"str",
"(",
"err",
")",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"stderr",
".",
"flush",
"(",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"except",
"Exception",
"as",
"err",
":",
"print",
"(",
"'%s Unexpected error. Please report this traceback.'",
"%",
"INTERROBANG",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"traceback",
".",
"print_exc",
"(",
")",
"# todo: tracack in -v or -vv mode?",
"sys",
".",
"stderr",
".",
"flush",
"(",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"except",
"KeyboardInterrupt",
":",
"sys",
".",
"exit",
"(",
"\"\\nStahp\"",
")"
] | fastfood command line interface. | [
"fastfood",
"command",
"line",
"interface",
"."
] | python | train |
annoviko/pyclustering | pyclustering/nnet/sync.py | https://github.com/annoviko/pyclustering/blob/98aa0dd89fd36f701668fb1eb29c8fb5662bf7d0/pyclustering/nnet/sync.py#L120-L128 | def output(self):
"""!
@brief (list) Returns output dynamic of the Sync network (phase coordinates of each oscillator in the network) during simulation.
"""
if ( (self._ccore_sync_dynamic_pointer is not None) and ( (self._dynamic is None) or (len(self._dynamic) == 0) ) ):
self._dynamic = wrapper.sync_dynamic_get_output(self._ccore_sync_dynamic_pointer);
return self._dynamic; | [
"def",
"output",
"(",
"self",
")",
":",
"if",
"(",
"(",
"self",
".",
"_ccore_sync_dynamic_pointer",
"is",
"not",
"None",
")",
"and",
"(",
"(",
"self",
".",
"_dynamic",
"is",
"None",
")",
"or",
"(",
"len",
"(",
"self",
".",
"_dynamic",
")",
"==",
"0",
")",
")",
")",
":",
"self",
".",
"_dynamic",
"=",
"wrapper",
".",
"sync_dynamic_get_output",
"(",
"self",
".",
"_ccore_sync_dynamic_pointer",
")",
"return",
"self",
".",
"_dynamic"
] | !
@brief (list) Returns output dynamic of the Sync network (phase coordinates of each oscillator in the network) during simulation. | [
"!"
] | python | valid |
obulpathi/cdn-fastly-python | fastly/__init__.py | https://github.com/obulpathi/cdn-fastly-python/blob/db2564b047e8af4bce72c3b88d6c27d3d0291425/fastly/__init__.py#L608-L611 | def get_request_setting(self, service_id, version_number, name):
"""Gets the specified Request Settings object."""
content = self._fetch("/service/%s/version/%d/request_settings/%s" % (service_id, version_number, name))
return FastlyRequestSetting(self, content) | [
"def",
"get_request_setting",
"(",
"self",
",",
"service_id",
",",
"version_number",
",",
"name",
")",
":",
"content",
"=",
"self",
".",
"_fetch",
"(",
"\"/service/%s/version/%d/request_settings/%s\"",
"%",
"(",
"service_id",
",",
"version_number",
",",
"name",
")",
")",
"return",
"FastlyRequestSetting",
"(",
"self",
",",
"content",
")"
] | Gets the specified Request Settings object. | [
"Gets",
"the",
"specified",
"Request",
"Settings",
"object",
"."
] | python | train |
fastai/fastai | fastai/callbacks/mlflow.py | https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/mlflow.py#L16-L24 | def on_train_begin(self, **kwargs: Any) -> None:
"Prepare MLflow experiment and log params"
self.client = mlflow.tracking.MlflowClient(self.uri)
exp = self.client.get_experiment_by_name(self.exp_name)
self.exp_id = self.client.create_experiment(self.exp_name) if exp is None else exp.experiment_id
run = self.client.create_run(experiment_id=self.exp_id)
self.run = run.info.run_uuid
for k,v in self.params.items():
self.client.log_param(run_id=self.run, key=k, value=v) | [
"def",
"on_train_begin",
"(",
"self",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"None",
":",
"self",
".",
"client",
"=",
"mlflow",
".",
"tracking",
".",
"MlflowClient",
"(",
"self",
".",
"uri",
")",
"exp",
"=",
"self",
".",
"client",
".",
"get_experiment_by_name",
"(",
"self",
".",
"exp_name",
")",
"self",
".",
"exp_id",
"=",
"self",
".",
"client",
".",
"create_experiment",
"(",
"self",
".",
"exp_name",
")",
"if",
"exp",
"is",
"None",
"else",
"exp",
".",
"experiment_id",
"run",
"=",
"self",
".",
"client",
".",
"create_run",
"(",
"experiment_id",
"=",
"self",
".",
"exp_id",
")",
"self",
".",
"run",
"=",
"run",
".",
"info",
".",
"run_uuid",
"for",
"k",
",",
"v",
"in",
"self",
".",
"params",
".",
"items",
"(",
")",
":",
"self",
".",
"client",
".",
"log_param",
"(",
"run_id",
"=",
"self",
".",
"run",
",",
"key",
"=",
"k",
",",
"value",
"=",
"v",
")"
] | Prepare MLflow experiment and log params | [
"Prepare",
"MLflow",
"experiment",
"and",
"log",
"params"
] | python | train |
20c/facsimile | facsimile/base.py | https://github.com/20c/facsimile/blob/570e28568475d5be1b1a2c95b8e941fbfbc167eb/facsimile/base.py#L294-L376 | def check_config(self):
"""
called after config was modified to sanity check
raises on error
"""
# sanity checks - no config access past here
if not getattr(self, 'stages', None):
raise NotImplementedError("member variable 'stages' must be defined")
# start at stage
if self.__start:
self.__stage_start = self.find_stage(self.__start)
else:
self.__stage_start = 0
# end at stage
if self.__end:
self.__stage_end = self.find_stage(self.__end) + 1
self.opt_end = self.__end
else:
self.__stage_end = len(self.stages)
# only stage
if self.__only:
if self.__start or self.__end:
raise Exception(
"stage option 'only' cannot be used with start or end")
self.__stage_start = self.find_stage(self.__only)
self.__stage_end = self.__stage_start + 1
if self.__devel:
self.__devel = True
# force deploy skip
if self.__stage_end >= len(self.stages):
self.status_msg("removing deploy stage for development build")
# XXX self.__stage_end = self.__stage_end - 1
if self.stage_start >= self.stage_end:
raise Exception("start and end produce no stages")
if self.bits not in [32, 64]:
raise Exception(
"can't do a %d bit build: unknown build process" % self.bits)
if self.bits == 64 and not self.is_64b:
raise Exception(
"this machine is not 64 bit, cannot perform 64 bit build")
if self.system == 'windows':
self.compilertag = 'vc10'
elif self.system == 'linux':
self.compilertag = 'gcc44'
else:
raise RuntimeError("can't decide compilertag on " + self.system)
self.build_suffix = ''
if not self.is_unixy:
if self.__static:
runtime = 'MT'
else:
runtime = 'MD'
if self.__release:
self.configuration_name = 'Release'
else:
runtime += 'd'
self.configuration_name = 'Debug'
self.build_suffix = '-' + runtime
self.runtime = runtime
else:
self.configuration_name = 'CFNAME_INVALID_ON_LINUX'
self.runtime = 'RUNTIME_INVALID_ON_LINUX'
if self.test_config != '-':
self.test_config = os.path.abspath(self.test_config)
# split version
if self.version:
ver = self.version.split('.')
self.version_major = int(ver[0])
self.version_minor = int(ver[1])
self.version_patch = int(ver[2])
if(len(ver) == 4):
self.version_build = int(ver[3]) | [
"def",
"check_config",
"(",
"self",
")",
":",
"# sanity checks - no config access past here",
"if",
"not",
"getattr",
"(",
"self",
",",
"'stages'",
",",
"None",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"member variable 'stages' must be defined\"",
")",
"# start at stage",
"if",
"self",
".",
"__start",
":",
"self",
".",
"__stage_start",
"=",
"self",
".",
"find_stage",
"(",
"self",
".",
"__start",
")",
"else",
":",
"self",
".",
"__stage_start",
"=",
"0",
"# end at stage",
"if",
"self",
".",
"__end",
":",
"self",
".",
"__stage_end",
"=",
"self",
".",
"find_stage",
"(",
"self",
".",
"__end",
")",
"+",
"1",
"self",
".",
"opt_end",
"=",
"self",
".",
"__end",
"else",
":",
"self",
".",
"__stage_end",
"=",
"len",
"(",
"self",
".",
"stages",
")",
"# only stage",
"if",
"self",
".",
"__only",
":",
"if",
"self",
".",
"__start",
"or",
"self",
".",
"__end",
":",
"raise",
"Exception",
"(",
"\"stage option 'only' cannot be used with start or end\"",
")",
"self",
".",
"__stage_start",
"=",
"self",
".",
"find_stage",
"(",
"self",
".",
"__only",
")",
"self",
".",
"__stage_end",
"=",
"self",
".",
"__stage_start",
"+",
"1",
"if",
"self",
".",
"__devel",
":",
"self",
".",
"__devel",
"=",
"True",
"# force deploy skip",
"if",
"self",
".",
"__stage_end",
">=",
"len",
"(",
"self",
".",
"stages",
")",
":",
"self",
".",
"status_msg",
"(",
"\"removing deploy stage for development build\"",
")",
"# XXX self.__stage_end = self.__stage_end - 1",
"if",
"self",
".",
"stage_start",
">=",
"self",
".",
"stage_end",
":",
"raise",
"Exception",
"(",
"\"start and end produce no stages\"",
")",
"if",
"self",
".",
"bits",
"not",
"in",
"[",
"32",
",",
"64",
"]",
":",
"raise",
"Exception",
"(",
"\"can't do a %d bit build: unknown build process\"",
"%",
"self",
".",
"bits",
")",
"if",
"self",
".",
"bits",
"==",
"64",
"and",
"not",
"self",
".",
"is_64b",
":",
"raise",
"Exception",
"(",
"\"this machine is not 64 bit, cannot perform 64 bit build\"",
")",
"if",
"self",
".",
"system",
"==",
"'windows'",
":",
"self",
".",
"compilertag",
"=",
"'vc10'",
"elif",
"self",
".",
"system",
"==",
"'linux'",
":",
"self",
".",
"compilertag",
"=",
"'gcc44'",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"can't decide compilertag on \"",
"+",
"self",
".",
"system",
")",
"self",
".",
"build_suffix",
"=",
"''",
"if",
"not",
"self",
".",
"is_unixy",
":",
"if",
"self",
".",
"__static",
":",
"runtime",
"=",
"'MT'",
"else",
":",
"runtime",
"=",
"'MD'",
"if",
"self",
".",
"__release",
":",
"self",
".",
"configuration_name",
"=",
"'Release'",
"else",
":",
"runtime",
"+=",
"'d'",
"self",
".",
"configuration_name",
"=",
"'Debug'",
"self",
".",
"build_suffix",
"=",
"'-'",
"+",
"runtime",
"self",
".",
"runtime",
"=",
"runtime",
"else",
":",
"self",
".",
"configuration_name",
"=",
"'CFNAME_INVALID_ON_LINUX'",
"self",
".",
"runtime",
"=",
"'RUNTIME_INVALID_ON_LINUX'",
"if",
"self",
".",
"test_config",
"!=",
"'-'",
":",
"self",
".",
"test_config",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"self",
".",
"test_config",
")",
"# split version",
"if",
"self",
".",
"version",
":",
"ver",
"=",
"self",
".",
"version",
".",
"split",
"(",
"'.'",
")",
"self",
".",
"version_major",
"=",
"int",
"(",
"ver",
"[",
"0",
"]",
")",
"self",
".",
"version_minor",
"=",
"int",
"(",
"ver",
"[",
"1",
"]",
")",
"self",
".",
"version_patch",
"=",
"int",
"(",
"ver",
"[",
"2",
"]",
")",
"if",
"(",
"len",
"(",
"ver",
")",
"==",
"4",
")",
":",
"self",
".",
"version_build",
"=",
"int",
"(",
"ver",
"[",
"3",
"]",
")"
] | called after config was modified to sanity check
raises on error | [
"called",
"after",
"config",
"was",
"modified",
"to",
"sanity",
"check",
"raises",
"on",
"error"
] | python | train |
HazyResearch/metal | metal/label_model/label_model.py | https://github.com/HazyResearch/metal/blob/c24e3772e25ac6d0917b8b7af4c1bcb92928f84a/metal/label_model/label_model.py#L41-L62 | def _create_L_ind(self, L):
"""Convert a label matrix with labels in 0...k to a one-hot format
Args:
L: An [n,m] scipy.sparse label matrix with values in {0,1,...,k}
Returns:
L_ind: An [n,m*k] dense np.ndarray with values in {0,1}
Note that no column is required for 0 (abstain) labels.
"""
# TODO: Update LabelModel to keep L variants as sparse matrices
# throughout and remove this line.
if issparse(L):
L = L.todense()
L_ind = np.zeros((self.n, self.m * self.k))
for y in range(1, self.k + 1):
# A[x::y] slices A starting at x at intervals of y
# e.g., np.arange(9)[0::3] == np.array([0,3,6])
L_ind[:, (y - 1) :: self.k] = np.where(L == y, 1, 0)
return L_ind | [
"def",
"_create_L_ind",
"(",
"self",
",",
"L",
")",
":",
"# TODO: Update LabelModel to keep L variants as sparse matrices",
"# throughout and remove this line.",
"if",
"issparse",
"(",
"L",
")",
":",
"L",
"=",
"L",
".",
"todense",
"(",
")",
"L_ind",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"n",
",",
"self",
".",
"m",
"*",
"self",
".",
"k",
")",
")",
"for",
"y",
"in",
"range",
"(",
"1",
",",
"self",
".",
"k",
"+",
"1",
")",
":",
"# A[x::y] slices A starting at x at intervals of y",
"# e.g., np.arange(9)[0::3] == np.array([0,3,6])",
"L_ind",
"[",
":",
",",
"(",
"y",
"-",
"1",
")",
":",
":",
"self",
".",
"k",
"]",
"=",
"np",
".",
"where",
"(",
"L",
"==",
"y",
",",
"1",
",",
"0",
")",
"return",
"L_ind"
] | Convert a label matrix with labels in 0...k to a one-hot format
Args:
L: An [n,m] scipy.sparse label matrix with values in {0,1,...,k}
Returns:
L_ind: An [n,m*k] dense np.ndarray with values in {0,1}
Note that no column is required for 0 (abstain) labels. | [
"Convert",
"a",
"label",
"matrix",
"with",
"labels",
"in",
"0",
"...",
"k",
"to",
"a",
"one",
"-",
"hot",
"format"
] | python | train |
rpcope1/PythonConfluenceAPI | PythonConfluenceAPI/cfapi.py | https://github.com/rpcope1/PythonConfluenceAPI/blob/b7f0ca2a390f964715fdf3a60b5b0c5ef7116d40/PythonConfluenceAPI/cfapi.py#L69-L106 | def _service_request(self, request_type, sub_uri, params=None, callback=None,
raise_for_status=True, raw=False, **kwargs):
"""
Base method for handling HTTP requests via the current requests session.
:param request_type: The request type as a string (e.g. "POST", "GET", "PUT", etc.)
:param sub_uri: The REST end point (sub-uri) to communicate with.
:param params: (Optional) HTTP Request parameters. Default: none
:param callback: (Optional) A callback function to be excuted on the resulting requests response.
This synchronous implementation will return the results of the callback.
Default: None. This method returns either the decoded JSON or the raw request content.
:param raise_for_status: (Optional) When set True, we raise requests.HTTPError on 4xx or 5xx status. When
set False, non-2xx/3xx status code is ignored. Default: True
:param raw: (Optional) If no callback is set, return the raw content from the request if this is set True.
If False, the method attempts to parse the request as JSON data and return the resutls.
Default: False
:param kwargs: Additional parameters to pass to the session request call.
:return: The concurrent.futures object that holds the future for the API method call.
"""
api_logger.debug("Sending request: {} ({})".format(sub_uri, request_type))
if not self.session:
self._start_http_session()
uri = urljoin(self.uri_base, sub_uri)
if params:
kwargs.update(params=params)
if callback:
def base_callback(_, response):
if raise_for_status:
response.raise_for_status()
response.encoding = 'utf-8'
return callback(response)
else:
def base_callback(_, response):
if raise_for_status:
response.raise_for_status()
response.encoding = 'utf-8'
return response.content if raw else json.loads(response.text)
response_future = self.session.request(request_type, uri, background_callback=base_callback, **kwargs)
return response_future | [
"def",
"_service_request",
"(",
"self",
",",
"request_type",
",",
"sub_uri",
",",
"params",
"=",
"None",
",",
"callback",
"=",
"None",
",",
"raise_for_status",
"=",
"True",
",",
"raw",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"api_logger",
".",
"debug",
"(",
"\"Sending request: {} ({})\"",
".",
"format",
"(",
"sub_uri",
",",
"request_type",
")",
")",
"if",
"not",
"self",
".",
"session",
":",
"self",
".",
"_start_http_session",
"(",
")",
"uri",
"=",
"urljoin",
"(",
"self",
".",
"uri_base",
",",
"sub_uri",
")",
"if",
"params",
":",
"kwargs",
".",
"update",
"(",
"params",
"=",
"params",
")",
"if",
"callback",
":",
"def",
"base_callback",
"(",
"_",
",",
"response",
")",
":",
"if",
"raise_for_status",
":",
"response",
".",
"raise_for_status",
"(",
")",
"response",
".",
"encoding",
"=",
"'utf-8'",
"return",
"callback",
"(",
"response",
")",
"else",
":",
"def",
"base_callback",
"(",
"_",
",",
"response",
")",
":",
"if",
"raise_for_status",
":",
"response",
".",
"raise_for_status",
"(",
")",
"response",
".",
"encoding",
"=",
"'utf-8'",
"return",
"response",
".",
"content",
"if",
"raw",
"else",
"json",
".",
"loads",
"(",
"response",
".",
"text",
")",
"response_future",
"=",
"self",
".",
"session",
".",
"request",
"(",
"request_type",
",",
"uri",
",",
"background_callback",
"=",
"base_callback",
",",
"*",
"*",
"kwargs",
")",
"return",
"response_future"
] | Base method for handling HTTP requests via the current requests session.
:param request_type: The request type as a string (e.g. "POST", "GET", "PUT", etc.)
:param sub_uri: The REST end point (sub-uri) to communicate with.
:param params: (Optional) HTTP Request parameters. Default: none
:param callback: (Optional) A callback function to be excuted on the resulting requests response.
This synchronous implementation will return the results of the callback.
Default: None. This method returns either the decoded JSON or the raw request content.
:param raise_for_status: (Optional) When set True, we raise requests.HTTPError on 4xx or 5xx status. When
set False, non-2xx/3xx status code is ignored. Default: True
:param raw: (Optional) If no callback is set, return the raw content from the request if this is set True.
If False, the method attempts to parse the request as JSON data and return the resutls.
Default: False
:param kwargs: Additional parameters to pass to the session request call.
:return: The concurrent.futures object that holds the future for the API method call. | [
"Base",
"method",
"for",
"handling",
"HTTP",
"requests",
"via",
"the",
"current",
"requests",
"session",
".",
":",
"param",
"request_type",
":",
"The",
"request",
"type",
"as",
"a",
"string",
"(",
"e",
".",
"g",
".",
"POST",
"GET",
"PUT",
"etc",
".",
")",
":",
"param",
"sub_uri",
":",
"The",
"REST",
"end",
"point",
"(",
"sub",
"-",
"uri",
")",
"to",
"communicate",
"with",
".",
":",
"param",
"params",
":",
"(",
"Optional",
")",
"HTTP",
"Request",
"parameters",
".",
"Default",
":",
"none",
":",
"param",
"callback",
":",
"(",
"Optional",
")",
"A",
"callback",
"function",
"to",
"be",
"excuted",
"on",
"the",
"resulting",
"requests",
"response",
".",
"This",
"synchronous",
"implementation",
"will",
"return",
"the",
"results",
"of",
"the",
"callback",
".",
"Default",
":",
"None",
".",
"This",
"method",
"returns",
"either",
"the",
"decoded",
"JSON",
"or",
"the",
"raw",
"request",
"content",
".",
":",
"param",
"raise_for_status",
":",
"(",
"Optional",
")",
"When",
"set",
"True",
"we",
"raise",
"requests",
".",
"HTTPError",
"on",
"4xx",
"or",
"5xx",
"status",
".",
"When",
"set",
"False",
"non",
"-",
"2xx",
"/",
"3xx",
"status",
"code",
"is",
"ignored",
".",
"Default",
":",
"True",
":",
"param",
"raw",
":",
"(",
"Optional",
")",
"If",
"no",
"callback",
"is",
"set",
"return",
"the",
"raw",
"content",
"from",
"the",
"request",
"if",
"this",
"is",
"set",
"True",
".",
"If",
"False",
"the",
"method",
"attempts",
"to",
"parse",
"the",
"request",
"as",
"JSON",
"data",
"and",
"return",
"the",
"resutls",
".",
"Default",
":",
"False",
":",
"param",
"kwargs",
":",
"Additional",
"parameters",
"to",
"pass",
"to",
"the",
"session",
"request",
"call",
".",
":",
"return",
":",
"The",
"concurrent",
".",
"futures",
"object",
"that",
"holds",
"the",
"future",
"for",
"the",
"API",
"method",
"call",
"."
] | python | train |
peterbrittain/asciimatics | asciimatics/screen.py | https://github.com/peterbrittain/asciimatics/blob/f471427d7786ce2d5f1eeb2dae0e67d19e46e085/asciimatics/screen.py#L1465-L1474 | def _unhandled_event_default(event):
"""
Default unhandled event handler for handling simple scene navigation.
"""
if isinstance(event, KeyboardEvent):
c = event.key_code
if c in (ord("X"), ord("x"), ord("Q"), ord("q")):
raise StopApplication("User terminated app")
if c in (ord(" "), ord("\n"), ord("\r")):
raise NextScene() | [
"def",
"_unhandled_event_default",
"(",
"event",
")",
":",
"if",
"isinstance",
"(",
"event",
",",
"KeyboardEvent",
")",
":",
"c",
"=",
"event",
".",
"key_code",
"if",
"c",
"in",
"(",
"ord",
"(",
"\"X\"",
")",
",",
"ord",
"(",
"\"x\"",
")",
",",
"ord",
"(",
"\"Q\"",
")",
",",
"ord",
"(",
"\"q\"",
")",
")",
":",
"raise",
"StopApplication",
"(",
"\"User terminated app\"",
")",
"if",
"c",
"in",
"(",
"ord",
"(",
"\" \"",
")",
",",
"ord",
"(",
"\"\\n\"",
")",
",",
"ord",
"(",
"\"\\r\"",
")",
")",
":",
"raise",
"NextScene",
"(",
")"
] | Default unhandled event handler for handling simple scene navigation. | [
"Default",
"unhandled",
"event",
"handler",
"for",
"handling",
"simple",
"scene",
"navigation",
"."
] | python | train |
codeinn/vcs | vcs/utils/diffs.py | https://github.com/codeinn/vcs/blob/e6cd94188e9c36d273411bf3adc0584ac6ab92a0/vcs/utils/diffs.py#L363-L370 | def raw_diff(self):
"""
Returns raw string as udiff
"""
udiff_copy = self.copy_iterator()
if self.__format == 'gitdiff':
udiff_copy = self._parse_gitdiff(udiff_copy)
return u''.join(udiff_copy) | [
"def",
"raw_diff",
"(",
"self",
")",
":",
"udiff_copy",
"=",
"self",
".",
"copy_iterator",
"(",
")",
"if",
"self",
".",
"__format",
"==",
"'gitdiff'",
":",
"udiff_copy",
"=",
"self",
".",
"_parse_gitdiff",
"(",
"udiff_copy",
")",
"return",
"u''",
".",
"join",
"(",
"udiff_copy",
")"
] | Returns raw string as udiff | [
"Returns",
"raw",
"string",
"as",
"udiff"
] | python | train |
RudolfCardinal/pythonlib | cardinal_pythonlib/sqlalchemy/list_types.py | https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/list_types.py#L271-L275 | def process_literal_param(self, value: Optional[List[int]],
dialect: Dialect) -> str:
"""Convert things on the way from Python to the database."""
retval = self._intlist_to_dbstr(value)
return retval | [
"def",
"process_literal_param",
"(",
"self",
",",
"value",
":",
"Optional",
"[",
"List",
"[",
"int",
"]",
"]",
",",
"dialect",
":",
"Dialect",
")",
"->",
"str",
":",
"retval",
"=",
"self",
".",
"_intlist_to_dbstr",
"(",
"value",
")",
"return",
"retval"
] | Convert things on the way from Python to the database. | [
"Convert",
"things",
"on",
"the",
"way",
"from",
"Python",
"to",
"the",
"database",
"."
] | python | train |
AndrewAnnex/SpiceyPy | spiceypy/spiceypy.py | https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/spiceypy.py#L9961-L9986 | def pxfrm2(frame_from, frame_to, etfrom, etto):
"""
Return the 3x3 matrix that transforms position vectors from one
specified frame at a specified epoch to another specified
frame at another specified epoch.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pxfrm2_c.html
:param frame_from: Name of the frame to transform from.
:type frame_from: str
:param frame_to: Name of the frame to transform to.
:type frame_to: str
:param etfrom: Evaluation time of frame_from.
:type etfrom: float
:param etto: Evaluation time of frame_to.
:type etto: float
:return: A position transformation matrix from frame_from to frame_to
:rtype: 3x3 Element Array of floats
"""
frame_from = stypes.stringToCharP(frame_from)
frame_to = stypes.stringToCharP(frame_to)
etfrom = ctypes.c_double(etfrom)
etto = ctypes.c_double(etto)
outmatrix = stypes.emptyDoubleMatrix()
libspice.pxfrm2_c(frame_from, frame_to, etfrom, etto, outmatrix)
return stypes.cMatrixToNumpy(outmatrix) | [
"def",
"pxfrm2",
"(",
"frame_from",
",",
"frame_to",
",",
"etfrom",
",",
"etto",
")",
":",
"frame_from",
"=",
"stypes",
".",
"stringToCharP",
"(",
"frame_from",
")",
"frame_to",
"=",
"stypes",
".",
"stringToCharP",
"(",
"frame_to",
")",
"etfrom",
"=",
"ctypes",
".",
"c_double",
"(",
"etfrom",
")",
"etto",
"=",
"ctypes",
".",
"c_double",
"(",
"etto",
")",
"outmatrix",
"=",
"stypes",
".",
"emptyDoubleMatrix",
"(",
")",
"libspice",
".",
"pxfrm2_c",
"(",
"frame_from",
",",
"frame_to",
",",
"etfrom",
",",
"etto",
",",
"outmatrix",
")",
"return",
"stypes",
".",
"cMatrixToNumpy",
"(",
"outmatrix",
")"
] | Return the 3x3 matrix that transforms position vectors from one
specified frame at a specified epoch to another specified
frame at another specified epoch.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pxfrm2_c.html
:param frame_from: Name of the frame to transform from.
:type frame_from: str
:param frame_to: Name of the frame to transform to.
:type frame_to: str
:param etfrom: Evaluation time of frame_from.
:type etfrom: float
:param etto: Evaluation time of frame_to.
:type etto: float
:return: A position transformation matrix from frame_from to frame_to
:rtype: 3x3 Element Array of floats | [
"Return",
"the",
"3x3",
"matrix",
"that",
"transforms",
"position",
"vectors",
"from",
"one",
"specified",
"frame",
"at",
"a",
"specified",
"epoch",
"to",
"another",
"specified",
"frame",
"at",
"another",
"specified",
"epoch",
"."
] | python | train |
quintusdias/glymur | glymur/jp2box.py | https://github.com/quintusdias/glymur/blob/8b8fb091130fff00f1028dc82219e69e3f9baf6d/glymur/jp2box.py#L929-L957 | def parse(cls, fptr, offset, length):
"""Parse component mapping box.
Parameters
----------
fptr : file
Open file object.
offset : int
Start position of box in bytes.
length : int
Length of the box in bytes.
Returns
-------
ComponentMappingBox
Instance of the current component mapping box.
"""
num_bytes = offset + length - fptr.tell()
num_components = int(num_bytes / 4)
read_buffer = fptr.read(num_bytes)
data = struct.unpack('>' + 'HBB' * num_components, read_buffer)
component_index = data[0:num_bytes:3]
mapping_type = data[1:num_bytes:3]
palette_index = data[2:num_bytes:3]
return cls(component_index, mapping_type, palette_index,
length=length, offset=offset) | [
"def",
"parse",
"(",
"cls",
",",
"fptr",
",",
"offset",
",",
"length",
")",
":",
"num_bytes",
"=",
"offset",
"+",
"length",
"-",
"fptr",
".",
"tell",
"(",
")",
"num_components",
"=",
"int",
"(",
"num_bytes",
"/",
"4",
")",
"read_buffer",
"=",
"fptr",
".",
"read",
"(",
"num_bytes",
")",
"data",
"=",
"struct",
".",
"unpack",
"(",
"'>'",
"+",
"'HBB'",
"*",
"num_components",
",",
"read_buffer",
")",
"component_index",
"=",
"data",
"[",
"0",
":",
"num_bytes",
":",
"3",
"]",
"mapping_type",
"=",
"data",
"[",
"1",
":",
"num_bytes",
":",
"3",
"]",
"palette_index",
"=",
"data",
"[",
"2",
":",
"num_bytes",
":",
"3",
"]",
"return",
"cls",
"(",
"component_index",
",",
"mapping_type",
",",
"palette_index",
",",
"length",
"=",
"length",
",",
"offset",
"=",
"offset",
")"
] | Parse component mapping box.
Parameters
----------
fptr : file
Open file object.
offset : int
Start position of box in bytes.
length : int
Length of the box in bytes.
Returns
-------
ComponentMappingBox
Instance of the current component mapping box. | [
"Parse",
"component",
"mapping",
"box",
"."
] | python | train |
awslabs/serverless-application-model | examples/apps/lex-book-trip-python/lambda_function.py | https://github.com/awslabs/serverless-application-model/blob/cccb0c96b5c91e53355ebc07e542467303a5eedd/examples/apps/lex-book-trip-python/lambda_function.py#L97-L113 | def generate_car_price(location, days, age, car_type):
"""
Generates a number within a reasonable range that might be expected for a flight.
The price is fixed for a given pair of locations.
"""
car_types = ['economy', 'standard', 'midsize', 'full size', 'minivan', 'luxury']
base_location_cost = 0
for i in range(len(location)):
base_location_cost += ord(location.lower()[i]) - 97
age_multiplier = 1.10 if age < 25 else 1
# Select economy is car_type is not found
if car_type not in car_types:
car_type = car_types[0]
return days * ((100 + base_location_cost) + ((car_types.index(car_type) * 50) * age_multiplier)) | [
"def",
"generate_car_price",
"(",
"location",
",",
"days",
",",
"age",
",",
"car_type",
")",
":",
"car_types",
"=",
"[",
"'economy'",
",",
"'standard'",
",",
"'midsize'",
",",
"'full size'",
",",
"'minivan'",
",",
"'luxury'",
"]",
"base_location_cost",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"location",
")",
")",
":",
"base_location_cost",
"+=",
"ord",
"(",
"location",
".",
"lower",
"(",
")",
"[",
"i",
"]",
")",
"-",
"97",
"age_multiplier",
"=",
"1.10",
"if",
"age",
"<",
"25",
"else",
"1",
"# Select economy is car_type is not found",
"if",
"car_type",
"not",
"in",
"car_types",
":",
"car_type",
"=",
"car_types",
"[",
"0",
"]",
"return",
"days",
"*",
"(",
"(",
"100",
"+",
"base_location_cost",
")",
"+",
"(",
"(",
"car_types",
".",
"index",
"(",
"car_type",
")",
"*",
"50",
")",
"*",
"age_multiplier",
")",
")"
] | Generates a number within a reasonable range that might be expected for a flight.
The price is fixed for a given pair of locations. | [
"Generates",
"a",
"number",
"within",
"a",
"reasonable",
"range",
"that",
"might",
"be",
"expected",
"for",
"a",
"flight",
".",
"The",
"price",
"is",
"fixed",
"for",
"a",
"given",
"pair",
"of",
"locations",
"."
] | python | train |
exa-analytics/exa | exa/core/editor.py | https://github.com/exa-analytics/exa/blob/40fb3c22b531d460dbc51e603de75b856cc28f0d/exa/core/editor.py#L88-L96 | def head(self, n=10):
"""
Display the top of the file.
Args:
n (int): Number of lines to display
"""
r = self.__repr__().split('\n')
print('\n'.join(r[:n]), end=' ') | [
"def",
"head",
"(",
"self",
",",
"n",
"=",
"10",
")",
":",
"r",
"=",
"self",
".",
"__repr__",
"(",
")",
".",
"split",
"(",
"'\\n'",
")",
"print",
"(",
"'\\n'",
".",
"join",
"(",
"r",
"[",
":",
"n",
"]",
")",
",",
"end",
"=",
"' '",
")"
] | Display the top of the file.
Args:
n (int): Number of lines to display | [
"Display",
"the",
"top",
"of",
"the",
"file",
"."
] | python | train |
tango-controls/pytango | tango/pytango_pprint.py | https://github.com/tango-controls/pytango/blob/9cf78c517c9cdc1081ff6d080a9646a740cc1d36/tango/pytango_pprint.py#L59-L62 | def __struct_params_s(obj, separator=', ', f=repr, fmt='%s = %s'):
"""method wrapper for printing all elements of a struct"""
s = separator.join([__single_param(obj, n, f, fmt) for n in dir(obj) if __inc_param(obj, n)])
return s | [
"def",
"__struct_params_s",
"(",
"obj",
",",
"separator",
"=",
"', '",
",",
"f",
"=",
"repr",
",",
"fmt",
"=",
"'%s = %s'",
")",
":",
"s",
"=",
"separator",
".",
"join",
"(",
"[",
"__single_param",
"(",
"obj",
",",
"n",
",",
"f",
",",
"fmt",
")",
"for",
"n",
"in",
"dir",
"(",
"obj",
")",
"if",
"__inc_param",
"(",
"obj",
",",
"n",
")",
"]",
")",
"return",
"s"
] | method wrapper for printing all elements of a struct | [
"method",
"wrapper",
"for",
"printing",
"all",
"elements",
"of",
"a",
"struct"
] | python | train |
lpantano/seqcluster | seqcluster/libs/annotation.py | https://github.com/lpantano/seqcluster/blob/774e23add8cd4fdc83d626cea3bd1f458e7d060d/seqcluster/libs/annotation.py#L9-L36 | def read_gtf_line(cols, field="name"):
"""parse gtf line to get class/name information"""
field = field.lower()
try:
group = cols[2]
attrs = cols[8].split(";")
name = [attr.strip().split(" ")[1] for attr in attrs if attr.strip().split(" ")[0].lower().endswith(field)]
if not name:
name = [attr.strip().split(" ")[1] for attr in attrs if attr.strip().split(" ")[0].lower().endswith("gene_id")]
if not name:
name = ["None"]
biotype = [attr.strip().split(" ")[1] for attr in attrs if attr.strip().split(" ")[0].lower().endswith("biotype")]
if biotype:
group = biotype[0]
c = cols[0]
s = int(cols[3])
e = int(cols[4])
st = cols[6]
return [c, s, e, st, group, name[0]]
except(Exception, e):
logger.error(cols)
logger.error("File is not in correct format")
logger.error("Expect chr source feature start end . strand attributes")
logger.error("Attributes are 'gene_name SNCA; gene_id ENSG; '")
logger.error("The 3rd column is used as type of small RNA (like miRNA)")
logger.error("at least should contains '; *name NAME; '")
logger.error(e)
raise | [
"def",
"read_gtf_line",
"(",
"cols",
",",
"field",
"=",
"\"name\"",
")",
":",
"field",
"=",
"field",
".",
"lower",
"(",
")",
"try",
":",
"group",
"=",
"cols",
"[",
"2",
"]",
"attrs",
"=",
"cols",
"[",
"8",
"]",
".",
"split",
"(",
"\";\"",
")",
"name",
"=",
"[",
"attr",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\" \"",
")",
"[",
"1",
"]",
"for",
"attr",
"in",
"attrs",
"if",
"attr",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\" \"",
")",
"[",
"0",
"]",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"field",
")",
"]",
"if",
"not",
"name",
":",
"name",
"=",
"[",
"attr",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\" \"",
")",
"[",
"1",
"]",
"for",
"attr",
"in",
"attrs",
"if",
"attr",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\" \"",
")",
"[",
"0",
"]",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"\"gene_id\"",
")",
"]",
"if",
"not",
"name",
":",
"name",
"=",
"[",
"\"None\"",
"]",
"biotype",
"=",
"[",
"attr",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\" \"",
")",
"[",
"1",
"]",
"for",
"attr",
"in",
"attrs",
"if",
"attr",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\" \"",
")",
"[",
"0",
"]",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"\"biotype\"",
")",
"]",
"if",
"biotype",
":",
"group",
"=",
"biotype",
"[",
"0",
"]",
"c",
"=",
"cols",
"[",
"0",
"]",
"s",
"=",
"int",
"(",
"cols",
"[",
"3",
"]",
")",
"e",
"=",
"int",
"(",
"cols",
"[",
"4",
"]",
")",
"st",
"=",
"cols",
"[",
"6",
"]",
"return",
"[",
"c",
",",
"s",
",",
"e",
",",
"st",
",",
"group",
",",
"name",
"[",
"0",
"]",
"]",
"except",
"(",
"Exception",
",",
"e",
")",
":",
"logger",
".",
"error",
"(",
"cols",
")",
"logger",
".",
"error",
"(",
"\"File is not in correct format\"",
")",
"logger",
".",
"error",
"(",
"\"Expect chr source feature start end . strand attributes\"",
")",
"logger",
".",
"error",
"(",
"\"Attributes are 'gene_name SNCA; gene_id ENSG; '\"",
")",
"logger",
".",
"error",
"(",
"\"The 3rd column is used as type of small RNA (like miRNA)\"",
")",
"logger",
".",
"error",
"(",
"\"at least should contains '; *name NAME; '\"",
")",
"logger",
".",
"error",
"(",
"e",
")",
"raise"
] | parse gtf line to get class/name information | [
"parse",
"gtf",
"line",
"to",
"get",
"class",
"/",
"name",
"information"
] | python | train |
douban/brownant | brownant/utils.py | https://github.com/douban/brownant/blob/3c7e6d30f67b8f0f8ca1f823ea3daed74e8725cd/brownant/utils.py#L4-L22 | def to_bytes_safe(text, encoding="utf-8"):
"""Convert the input value into bytes type.
If the input value is string type and could be encode as UTF-8 bytes, the
encoded value will be returned. Otherwise, the encoding has failed, the
origin value will be returned as well.
:param text: the input value which could be string or bytes.
:param encoding: the expected encoding be used while converting the string
input into bytes.
:rtype: :class:`~__builtin__.bytes`
"""
if not isinstance(text, (bytes, text_type)):
raise TypeError("must be string type")
if isinstance(text, text_type):
return text.encode(encoding)
return text | [
"def",
"to_bytes_safe",
"(",
"text",
",",
"encoding",
"=",
"\"utf-8\"",
")",
":",
"if",
"not",
"isinstance",
"(",
"text",
",",
"(",
"bytes",
",",
"text_type",
")",
")",
":",
"raise",
"TypeError",
"(",
"\"must be string type\"",
")",
"if",
"isinstance",
"(",
"text",
",",
"text_type",
")",
":",
"return",
"text",
".",
"encode",
"(",
"encoding",
")",
"return",
"text"
] | Convert the input value into bytes type.
If the input value is string type and could be encode as UTF-8 bytes, the
encoded value will be returned. Otherwise, the encoding has failed, the
origin value will be returned as well.
:param text: the input value which could be string or bytes.
:param encoding: the expected encoding be used while converting the string
input into bytes.
:rtype: :class:`~__builtin__.bytes` | [
"Convert",
"the",
"input",
"value",
"into",
"bytes",
"type",
"."
] | python | train |
BD2KGenomics/protect | attic/ProTECT.py | https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L301-L361 | def run_star(job, fastqs, univ_options, star_options):
"""
This module uses STAR to align the RNA fastqs to the reference
ARGUMENTS
1. fastqs: REFER RETURN VALUE of run_cutadapt()
2. univ_options: Dict of universal arguments used by almost all tools
univ_options
+- 'dockerhub': <dockerhub to use>
3. star_options: Dict of parameters specific to STAR
star_options
|- 'index_tar': <JSid for the STAR index tarball>
+- 'n': <number of threads to allocate>
RETURN VALUES
1. output_files: Dict of aligned bams
output_files
|- 'rnaAligned.toTranscriptome.out.bam': <JSid>
+- 'rnaAligned.sortedByCoord.out.bam': Dict of genome bam + bai
|- 'rna_fix_pg_sorted.bam': <JSid>
+- 'rna_fix_pg_sorted.bam.bai': <JSid>
This module corresponds to node 9 on the tree
"""
assert star_options['type'] in ('star', 'starlong')
job.fileStore.logToMaster('Running STAR on %s' %univ_options['patient'])
work_dir = job.fileStore.getLocalTempDir()
input_files = {
'rna_cutadapt_1.fastq': fastqs['rna_cutadapt_1.fastq'],
'rna_cutadapt_2.fastq': fastqs['rna_cutadapt_2.fastq'],
'star_index.tar.gz': star_options['index_tar']}
input_files = get_files_from_filestore(job, input_files, work_dir,
docker=True)
parameters = ['--runThreadN', str(star_options['n']),
'--genomeDir', input_files['star_index'],
'--outFileNamePrefix', 'rna',
'--readFilesIn',
input_files['rna_cutadapt_1.fastq'],
input_files['rna_cutadapt_2.fastq'],
'--outSAMattributes', 'NH', 'HI', 'AS', 'NM', 'MD',
'--outSAMtype', 'BAM', 'SortedByCoordinate',
'--quantMode', 'TranscriptomeSAM',
'--outSAMunmapped', 'Within']
if star_options['type'] == 'star':
docker_call(tool='star', tool_parameters=parameters, work_dir=work_dir,
dockerhub=univ_options['dockerhub'])
else:
docker_call(tool='starlong', tool_parameters=parameters, work_dir=work_dir,
dockerhub=univ_options['dockerhub'])
output_files = defaultdict()
for bam_file in ['rnaAligned.toTranscriptome.out.bam',
'rnaAligned.sortedByCoord.out.bam']:
output_files[bam_file] = job.fileStore.writeGlobalFile('/'.join([
work_dir, bam_file]))
job.fileStore.deleteGlobalFile(fastqs['rna_cutadapt_1.fastq'])
job.fileStore.deleteGlobalFile(fastqs['rna_cutadapt_2.fastq'])
index_star = job.wrapJobFn(index_bamfile,
output_files['rnaAligned.sortedByCoord.out.bam'],
'rna', univ_options, disk='120G')
job.addChild(index_star)
output_files['rnaAligned.sortedByCoord.out.bam'] = index_star.rv()
return output_files | [
"def",
"run_star",
"(",
"job",
",",
"fastqs",
",",
"univ_options",
",",
"star_options",
")",
":",
"assert",
"star_options",
"[",
"'type'",
"]",
"in",
"(",
"'star'",
",",
"'starlong'",
")",
"job",
".",
"fileStore",
".",
"logToMaster",
"(",
"'Running STAR on %s'",
"%",
"univ_options",
"[",
"'patient'",
"]",
")",
"work_dir",
"=",
"job",
".",
"fileStore",
".",
"getLocalTempDir",
"(",
")",
"input_files",
"=",
"{",
"'rna_cutadapt_1.fastq'",
":",
"fastqs",
"[",
"'rna_cutadapt_1.fastq'",
"]",
",",
"'rna_cutadapt_2.fastq'",
":",
"fastqs",
"[",
"'rna_cutadapt_2.fastq'",
"]",
",",
"'star_index.tar.gz'",
":",
"star_options",
"[",
"'index_tar'",
"]",
"}",
"input_files",
"=",
"get_files_from_filestore",
"(",
"job",
",",
"input_files",
",",
"work_dir",
",",
"docker",
"=",
"True",
")",
"parameters",
"=",
"[",
"'--runThreadN'",
",",
"str",
"(",
"star_options",
"[",
"'n'",
"]",
")",
",",
"'--genomeDir'",
",",
"input_files",
"[",
"'star_index'",
"]",
",",
"'--outFileNamePrefix'",
",",
"'rna'",
",",
"'--readFilesIn'",
",",
"input_files",
"[",
"'rna_cutadapt_1.fastq'",
"]",
",",
"input_files",
"[",
"'rna_cutadapt_2.fastq'",
"]",
",",
"'--outSAMattributes'",
",",
"'NH'",
",",
"'HI'",
",",
"'AS'",
",",
"'NM'",
",",
"'MD'",
",",
"'--outSAMtype'",
",",
"'BAM'",
",",
"'SortedByCoordinate'",
",",
"'--quantMode'",
",",
"'TranscriptomeSAM'",
",",
"'--outSAMunmapped'",
",",
"'Within'",
"]",
"if",
"star_options",
"[",
"'type'",
"]",
"==",
"'star'",
":",
"docker_call",
"(",
"tool",
"=",
"'star'",
",",
"tool_parameters",
"=",
"parameters",
",",
"work_dir",
"=",
"work_dir",
",",
"dockerhub",
"=",
"univ_options",
"[",
"'dockerhub'",
"]",
")",
"else",
":",
"docker_call",
"(",
"tool",
"=",
"'starlong'",
",",
"tool_parameters",
"=",
"parameters",
",",
"work_dir",
"=",
"work_dir",
",",
"dockerhub",
"=",
"univ_options",
"[",
"'dockerhub'",
"]",
")",
"output_files",
"=",
"defaultdict",
"(",
")",
"for",
"bam_file",
"in",
"[",
"'rnaAligned.toTranscriptome.out.bam'",
",",
"'rnaAligned.sortedByCoord.out.bam'",
"]",
":",
"output_files",
"[",
"bam_file",
"]",
"=",
"job",
".",
"fileStore",
".",
"writeGlobalFile",
"(",
"'/'",
".",
"join",
"(",
"[",
"work_dir",
",",
"bam_file",
"]",
")",
")",
"job",
".",
"fileStore",
".",
"deleteGlobalFile",
"(",
"fastqs",
"[",
"'rna_cutadapt_1.fastq'",
"]",
")",
"job",
".",
"fileStore",
".",
"deleteGlobalFile",
"(",
"fastqs",
"[",
"'rna_cutadapt_2.fastq'",
"]",
")",
"index_star",
"=",
"job",
".",
"wrapJobFn",
"(",
"index_bamfile",
",",
"output_files",
"[",
"'rnaAligned.sortedByCoord.out.bam'",
"]",
",",
"'rna'",
",",
"univ_options",
",",
"disk",
"=",
"'120G'",
")",
"job",
".",
"addChild",
"(",
"index_star",
")",
"output_files",
"[",
"'rnaAligned.sortedByCoord.out.bam'",
"]",
"=",
"index_star",
".",
"rv",
"(",
")",
"return",
"output_files"
] | This module uses STAR to align the RNA fastqs to the reference
ARGUMENTS
1. fastqs: REFER RETURN VALUE of run_cutadapt()
2. univ_options: Dict of universal arguments used by almost all tools
univ_options
+- 'dockerhub': <dockerhub to use>
3. star_options: Dict of parameters specific to STAR
star_options
|- 'index_tar': <JSid for the STAR index tarball>
+- 'n': <number of threads to allocate>
RETURN VALUES
1. output_files: Dict of aligned bams
output_files
|- 'rnaAligned.toTranscriptome.out.bam': <JSid>
+- 'rnaAligned.sortedByCoord.out.bam': Dict of genome bam + bai
|- 'rna_fix_pg_sorted.bam': <JSid>
+- 'rna_fix_pg_sorted.bam.bai': <JSid>
This module corresponds to node 9 on the tree | [
"This",
"module",
"uses",
"STAR",
"to",
"align",
"the",
"RNA",
"fastqs",
"to",
"the",
"reference"
] | python | train |
wglass/lighthouse | lighthouse/zookeeper.py | https://github.com/wglass/lighthouse/blob/f4ce6550895acc31e433ede0c05d366718a3ffe5/lighthouse/zookeeper.py#L109-L126 | def handle_connection_change(self, state):
"""
Callback for handling changes in the kazoo client's connection state.
If the connection becomes lost or suspended, the `connected` Event
is cleared. Other given states imply that the connection is
established so `connected` is set.
"""
if state == client.KazooState.LOST:
if not self.shutdown.is_set():
logger.info("Zookeeper session lost!")
self.connected.clear()
elif state == client.KazooState.SUSPENDED:
logger.info("Zookeeper connection suspended!")
self.connected.clear()
else:
logger.info("Zookeeper connection (re)established.")
self.connected.set() | [
"def",
"handle_connection_change",
"(",
"self",
",",
"state",
")",
":",
"if",
"state",
"==",
"client",
".",
"KazooState",
".",
"LOST",
":",
"if",
"not",
"self",
".",
"shutdown",
".",
"is_set",
"(",
")",
":",
"logger",
".",
"info",
"(",
"\"Zookeeper session lost!\"",
")",
"self",
".",
"connected",
".",
"clear",
"(",
")",
"elif",
"state",
"==",
"client",
".",
"KazooState",
".",
"SUSPENDED",
":",
"logger",
".",
"info",
"(",
"\"Zookeeper connection suspended!\"",
")",
"self",
".",
"connected",
".",
"clear",
"(",
")",
"else",
":",
"logger",
".",
"info",
"(",
"\"Zookeeper connection (re)established.\"",
")",
"self",
".",
"connected",
".",
"set",
"(",
")"
] | Callback for handling changes in the kazoo client's connection state.
If the connection becomes lost or suspended, the `connected` Event
is cleared. Other given states imply that the connection is
established so `connected` is set. | [
"Callback",
"for",
"handling",
"changes",
"in",
"the",
"kazoo",
"client",
"s",
"connection",
"state",
"."
] | python | train |
geopy/geopy | geopy/geocoders/what3words.py | https://github.com/geopy/geopy/blob/02c838d965e76497f3c3d61f53808c86b5c58224/geopy/geocoders/what3words.py#L102-L152 | def geocode(self,
query,
lang='en',
exactly_one=True,
timeout=DEFAULT_SENTINEL):
"""
Return a location point for a `3 words` query. If the `3 words` address
doesn't exist, a :class:`geopy.exc.GeocoderQueryError` exception will be
thrown.
:param str query: The 3-word address you wish to geocode.
:param str lang: two character language codes as supported by
the API (https://docs.what3words.com/api/v2/#lang).
:param bool exactly_one: Return one result or a list of results, if
available. Due to the address scheme there is always exactly one
result for each `3 words` address, so this parameter is rather
useless for this geocoder.
.. versionchanged:: 1.14.0
``exactly_one=False`` now returns a list of a single location.
This option wasn't respected before.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
if not self._check_query(query):
raise exc.GeocoderQueryError(
"Search string must be 'word.word.word'"
)
params = {
'addr': self.format_string % query,
'lang': lang.lower(),
'key': self.api_key,
}
url = "?".join((self.geocode_api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
return self._parse_json(
self._call_geocoder(url, timeout=timeout),
exactly_one=exactly_one
) | [
"def",
"geocode",
"(",
"self",
",",
"query",
",",
"lang",
"=",
"'en'",
",",
"exactly_one",
"=",
"True",
",",
"timeout",
"=",
"DEFAULT_SENTINEL",
")",
":",
"if",
"not",
"self",
".",
"_check_query",
"(",
"query",
")",
":",
"raise",
"exc",
".",
"GeocoderQueryError",
"(",
"\"Search string must be 'word.word.word'\"",
")",
"params",
"=",
"{",
"'addr'",
":",
"self",
".",
"format_string",
"%",
"query",
",",
"'lang'",
":",
"lang",
".",
"lower",
"(",
")",
",",
"'key'",
":",
"self",
".",
"api_key",
",",
"}",
"url",
"=",
"\"?\"",
".",
"join",
"(",
"(",
"self",
".",
"geocode_api",
",",
"urlencode",
"(",
"params",
")",
")",
")",
"logger",
".",
"debug",
"(",
"\"%s.geocode: %s\"",
",",
"self",
".",
"__class__",
".",
"__name__",
",",
"url",
")",
"return",
"self",
".",
"_parse_json",
"(",
"self",
".",
"_call_geocoder",
"(",
"url",
",",
"timeout",
"=",
"timeout",
")",
",",
"exactly_one",
"=",
"exactly_one",
")"
] | Return a location point for a `3 words` query. If the `3 words` address
doesn't exist, a :class:`geopy.exc.GeocoderQueryError` exception will be
thrown.
:param str query: The 3-word address you wish to geocode.
:param str lang: two character language codes as supported by
the API (https://docs.what3words.com/api/v2/#lang).
:param bool exactly_one: Return one result or a list of results, if
available. Due to the address scheme there is always exactly one
result for each `3 words` address, so this parameter is rather
useless for this geocoder.
.. versionchanged:: 1.14.0
``exactly_one=False`` now returns a list of a single location.
This option wasn't respected before.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``. | [
"Return",
"a",
"location",
"point",
"for",
"a",
"3",
"words",
"query",
".",
"If",
"the",
"3",
"words",
"address",
"doesn",
"t",
"exist",
"a",
":",
"class",
":",
"geopy",
".",
"exc",
".",
"GeocoderQueryError",
"exception",
"will",
"be",
"thrown",
"."
] | python | train |
mitsei/dlkit | dlkit/handcar/learning/sessions.py | https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/handcar/learning/sessions.py#L3098-L3122 | def get_activity_form_for_update(self, activity_id=None):
"""Gets the activity form for updating an existing activity.
A new activity form should be requested for each update
transaction.
arg: activityId (osid.id.Id): the Id of the Activity
return: (osid.learning.ActivityForm) - the activity form
raise: NotFound - activityId is not found
raise: NullArgument - activityId is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
"""
if activity_id is None:
raise NullArgument()
try:
url_path = construct_url('activities',
bank_id=self._catalog_idstr,
act_id=activity_id)
activity = objects.Activity(self._get_request(url_path))
except Exception:
raise
activity_form = objects.ActivityForm(activity._my_map)
self._forms[activity_form.get_id().get_identifier()] = not UPDATED
return activity_form | [
"def",
"get_activity_form_for_update",
"(",
"self",
",",
"activity_id",
"=",
"None",
")",
":",
"if",
"activity_id",
"is",
"None",
":",
"raise",
"NullArgument",
"(",
")",
"try",
":",
"url_path",
"=",
"construct_url",
"(",
"'activities'",
",",
"bank_id",
"=",
"self",
".",
"_catalog_idstr",
",",
"act_id",
"=",
"activity_id",
")",
"activity",
"=",
"objects",
".",
"Activity",
"(",
"self",
".",
"_get_request",
"(",
"url_path",
")",
")",
"except",
"Exception",
":",
"raise",
"activity_form",
"=",
"objects",
".",
"ActivityForm",
"(",
"activity",
".",
"_my_map",
")",
"self",
".",
"_forms",
"[",
"activity_form",
".",
"get_id",
"(",
")",
".",
"get_identifier",
"(",
")",
"]",
"=",
"not",
"UPDATED",
"return",
"activity_form"
] | Gets the activity form for updating an existing activity.
A new activity form should be requested for each update
transaction.
arg: activityId (osid.id.Id): the Id of the Activity
return: (osid.learning.ActivityForm) - the activity form
raise: NotFound - activityId is not found
raise: NullArgument - activityId is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented. | [
"Gets",
"the",
"activity",
"form",
"for",
"updating",
"an",
"existing",
"activity",
".",
"A",
"new",
"activity",
"form",
"should",
"be",
"requested",
"for",
"each",
"update",
"transaction",
".",
"arg",
":",
"activityId",
"(",
"osid",
".",
"id",
".",
"Id",
")",
":",
"the",
"Id",
"of",
"the",
"Activity",
"return",
":",
"(",
"osid",
".",
"learning",
".",
"ActivityForm",
")",
"-",
"the",
"activity",
"form",
"raise",
":",
"NotFound",
"-",
"activityId",
"is",
"not",
"found",
"raise",
":",
"NullArgument",
"-",
"activityId",
"is",
"null",
"raise",
":",
"OperationFailed",
"-",
"unable",
"to",
"complete",
"request",
"raise",
":",
"PermissionDenied",
"-",
"authorization",
"failure",
"compliance",
":",
"mandatory",
"-",
"This",
"method",
"must",
"be",
"implemented",
"."
] | python | train |
tanghaibao/goatools | goatools/cli/docopt_parse.py | https://github.com/tanghaibao/goatools/blob/407682e573a108864a79031f8ca19ee3bf377626/goatools/cli/docopt_parse.py#L82-L96 | def _chk_docunknown(args, exp):
"""Return any unknown args."""
unknown = []
for arg in args:
if arg[:2] == '--':
val = arg[2:]
if val not in exp:
unknown.append(arg)
elif arg[:1] == '-':
val = arg[1:]
if val not in exp:
unknown.append(arg)
if '-h' in unknown or '--help' in unknown:
return []
return unknown | [
"def",
"_chk_docunknown",
"(",
"args",
",",
"exp",
")",
":",
"unknown",
"=",
"[",
"]",
"for",
"arg",
"in",
"args",
":",
"if",
"arg",
"[",
":",
"2",
"]",
"==",
"'--'",
":",
"val",
"=",
"arg",
"[",
"2",
":",
"]",
"if",
"val",
"not",
"in",
"exp",
":",
"unknown",
".",
"append",
"(",
"arg",
")",
"elif",
"arg",
"[",
":",
"1",
"]",
"==",
"'-'",
":",
"val",
"=",
"arg",
"[",
"1",
":",
"]",
"if",
"val",
"not",
"in",
"exp",
":",
"unknown",
".",
"append",
"(",
"arg",
")",
"if",
"'-h'",
"in",
"unknown",
"or",
"'--help'",
"in",
"unknown",
":",
"return",
"[",
"]",
"return",
"unknown"
] | Return any unknown args. | [
"Return",
"any",
"unknown",
"args",
"."
] | python | train |
trendels/braceexpand | braceexpand.py | https://github.com/trendels/braceexpand/blob/c17aa2f2b0d9f016a8115163d94acc9a147d54c2/braceexpand.py#L23-L93 | def braceexpand(pattern, escape=True):
"""braceexpand(pattern) -> iterator over generated strings
Returns an iterator over the strings resulting from brace expansion
of pattern. This function implements Brace Expansion as described in
bash(1), with the following limitations:
* A pattern containing unbalanced braces will raise an
UnbalancedBracesError exception. In bash, unbalanced braces will either
be partly expanded or ignored.
* A mixed-case character range like '{Z..a}' or '{a..Z}' will not
include the characters '[]^_`' between 'Z' and 'a'.
When escape is True (the default), characters in pattern can be
prefixed with a backslash to cause them not to be interpreted as
special characters for brace expansion (such as '{', '}', ',').
To pass through a a literal backslash, double it ('\\\\').
When escape is False, backslashes in pattern have no special
meaning and will be preserved in the output.
Examples:
>>> from braceexpand import braceexpand
# Integer range
>>> list(braceexpand('item{1..3}'))
['item1', 'item2', 'item3']
# Character range
>>> list(braceexpand('{a..c}'))
['a', 'b', 'c']
# Sequence
>>> list(braceexpand('index.html{,.backup}'))
['index.html', 'index.html.backup']
# Nested patterns
>>> list(braceexpand('python{2.{5..7},3.{2,3}}'))
['python2.5', 'python2.6', 'python2.7', 'python3.2', 'python3.3']
# Prefixing an integer with zero causes all numbers to be padded to
# the same width.
>>> list(braceexpand('{07..10}'))
['07', '08', '09', '10']
# An optional increment can be specified for ranges.
>>> list(braceexpand('{a..g..2}'))
['a', 'c', 'e', 'g']
# Ranges can go in both directions.
>>> list(braceexpand('{4..1}'))
['4', '3', '2', '1']
# Unbalanced braces raise an exception.
>>> list(braceexpand('{1{2,3}'))
Traceback (most recent call last):
...
UnbalancedBracesError: Unbalanced braces: '{1{2,3}'
# By default, the backslash is the escape character.
>>> list(braceexpand(r'{1\{2,3}'))
['1{2', '3']
# Setting 'escape' to False disables backslash escaping.
>>> list(braceexpand(r'\{1,2}', escape=False))
['\\\\1', '\\\\2']
"""
return (_flatten(t, escape) for t in parse_pattern(pattern, escape)) | [
"def",
"braceexpand",
"(",
"pattern",
",",
"escape",
"=",
"True",
")",
":",
"return",
"(",
"_flatten",
"(",
"t",
",",
"escape",
")",
"for",
"t",
"in",
"parse_pattern",
"(",
"pattern",
",",
"escape",
")",
")"
] | braceexpand(pattern) -> iterator over generated strings
Returns an iterator over the strings resulting from brace expansion
of pattern. This function implements Brace Expansion as described in
bash(1), with the following limitations:
* A pattern containing unbalanced braces will raise an
UnbalancedBracesError exception. In bash, unbalanced braces will either
be partly expanded or ignored.
* A mixed-case character range like '{Z..a}' or '{a..Z}' will not
include the characters '[]^_`' between 'Z' and 'a'.
When escape is True (the default), characters in pattern can be
prefixed with a backslash to cause them not to be interpreted as
special characters for brace expansion (such as '{', '}', ',').
To pass through a a literal backslash, double it ('\\\\').
When escape is False, backslashes in pattern have no special
meaning and will be preserved in the output.
Examples:
>>> from braceexpand import braceexpand
# Integer range
>>> list(braceexpand('item{1..3}'))
['item1', 'item2', 'item3']
# Character range
>>> list(braceexpand('{a..c}'))
['a', 'b', 'c']
# Sequence
>>> list(braceexpand('index.html{,.backup}'))
['index.html', 'index.html.backup']
# Nested patterns
>>> list(braceexpand('python{2.{5..7},3.{2,3}}'))
['python2.5', 'python2.6', 'python2.7', 'python3.2', 'python3.3']
# Prefixing an integer with zero causes all numbers to be padded to
# the same width.
>>> list(braceexpand('{07..10}'))
['07', '08', '09', '10']
# An optional increment can be specified for ranges.
>>> list(braceexpand('{a..g..2}'))
['a', 'c', 'e', 'g']
# Ranges can go in both directions.
>>> list(braceexpand('{4..1}'))
['4', '3', '2', '1']
# Unbalanced braces raise an exception.
>>> list(braceexpand('{1{2,3}'))
Traceback (most recent call last):
...
UnbalancedBracesError: Unbalanced braces: '{1{2,3}'
# By default, the backslash is the escape character.
>>> list(braceexpand(r'{1\{2,3}'))
['1{2', '3']
# Setting 'escape' to False disables backslash escaping.
>>> list(braceexpand(r'\{1,2}', escape=False))
['\\\\1', '\\\\2'] | [
"braceexpand",
"(",
"pattern",
")",
"-",
">",
"iterator",
"over",
"generated",
"strings"
] | python | train |
annoviko/pyclustering | pyclustering/cluster/agglomerative.py | https://github.com/annoviko/pyclustering/blob/98aa0dd89fd36f701668fb1eb29c8fb5662bf7d0/pyclustering/cluster/agglomerative.py#L194-L213 | def __merge_similar_clusters(self):
"""!
@brief Merges the most similar clusters in line with link type.
"""
if (self.__similarity == type_link.AVERAGE_LINK):
self.__merge_by_average_link();
elif (self.__similarity == type_link.CENTROID_LINK):
self.__merge_by_centroid_link();
elif (self.__similarity == type_link.COMPLETE_LINK):
self.__merge_by_complete_link();
elif (self.__similarity == type_link.SINGLE_LINK):
self.__merge_by_signle_link();
else:
raise NameError('Not supported similarity is used'); | [
"def",
"__merge_similar_clusters",
"(",
"self",
")",
":",
"if",
"(",
"self",
".",
"__similarity",
"==",
"type_link",
".",
"AVERAGE_LINK",
")",
":",
"self",
".",
"__merge_by_average_link",
"(",
")",
"elif",
"(",
"self",
".",
"__similarity",
"==",
"type_link",
".",
"CENTROID_LINK",
")",
":",
"self",
".",
"__merge_by_centroid_link",
"(",
")",
"elif",
"(",
"self",
".",
"__similarity",
"==",
"type_link",
".",
"COMPLETE_LINK",
")",
":",
"self",
".",
"__merge_by_complete_link",
"(",
")",
"elif",
"(",
"self",
".",
"__similarity",
"==",
"type_link",
".",
"SINGLE_LINK",
")",
":",
"self",
".",
"__merge_by_signle_link",
"(",
")",
"else",
":",
"raise",
"NameError",
"(",
"'Not supported similarity is used'",
")"
] | !
@brief Merges the most similar clusters in line with link type. | [
"!"
] | python | valid |
tensorflow/tensor2tensor | tensor2tensor/rl/trainer_model_based.py | https://github.com/tensorflow/tensor2tensor/blob/272500b6efe353aeb638d2745ed56e519462ca31/tensor2tensor/rl/trainer_model_based.py#L253-L378 | def training_loop(hparams, output_dir, report_fn=None, report_metric=None):
"""Run the main training loop."""
if report_fn:
assert report_metric is not None
# Directories
subdirectories = [
"data", "tmp", "world_model", ("world_model", "debug_videos"),
"policy", "eval_metrics"
]
directories = setup_directories(output_dir, subdirectories)
epoch = -1
data_dir = directories["data"]
env = rl_utils.setup_env(
hparams, batch_size=hparams.real_batch_size,
max_num_noops=hparams.max_num_noops,
rl_env_max_episode_steps=hparams.rl_env_max_episode_steps
)
env.start_new_epoch(epoch, data_dir)
if hparams.wm_policy_param_sharing:
policy_model_dir = directories["world_model"]
else:
policy_model_dir = directories["policy"]
learner = rl_utils.LEARNERS[hparams.base_algo](
hparams.frame_stack_size, policy_model_dir,
policy_model_dir, hparams.epochs
)
# Timing log function
log_relative_time = make_relative_timing_fn()
# Per-epoch state
epoch_metrics = []
metrics = {}
# Collect data from the real environment.
policy_model_dir = directories["policy"]
tf.logging.info("Initial training of the policy in real environment.")
train_agent_real_env(env, learner, hparams, epoch)
metrics["mean_reward/train/clipped"] = rl_utils.compute_mean_reward(
env.current_epoch_rollouts(), clipped=True
)
tf.logging.info("Mean training reward (initial): {}".format(
metrics["mean_reward/train/clipped"]
))
env.generate_data(data_dir)
eval_metrics_writer = tf.summary.FileWriter(
directories["eval_metrics"]
)
world_model_steps_num = 0
for epoch in range(hparams.epochs):
log = make_log_fn(epoch, log_relative_time)
# Train world model
log("Training world model")
world_model_steps_num = train_world_model(
env, data_dir, directories["world_model"], hparams,
world_model_steps_num, epoch
)
# Train agent
log("Training policy in simulated environment.")
train_agent(env, learner, directories["world_model"], hparams, epoch)
env.start_new_epoch(epoch, data_dir)
# Train agent on real env (short)
log("Training policy in real environment.")
train_agent_real_env(env, learner, hparams, epoch)
if hparams.stop_loop_early:
return 0.0
env.generate_data(data_dir)
metrics = load_metrics(directories["eval_metrics"], epoch)
if metrics:
# Skip eval if metrics have already been written for this epoch. Otherwise
# we'd overwrite them with wrong data.
log("Metrics found for this epoch, skipping evaluation.")
else:
metrics["mean_reward/train/clipped"] = rl_utils.compute_mean_reward(
env.current_epoch_rollouts(), clipped=True
)
log("Mean training reward: {}".format(
metrics["mean_reward/train/clipped"]
))
eval_metrics = rl_utils.evaluate_all_configs(hparams, policy_model_dir)
log("Agent eval metrics:\n{}".format(pprint.pformat(eval_metrics)))
metrics.update(eval_metrics)
if hparams.eval_world_model:
debug_video_path = os.path.join(
directories["world_model", "debug_videos"],
"{}.avi".format(env.current_epoch)
)
wm_metrics = rl_utils.evaluate_world_model(
env, hparams, directories["world_model"], debug_video_path
)
log("World model eval metrics:\n{}".format(pprint.pformat(wm_metrics)))
metrics.update(wm_metrics)
rl_utils.summarize_metrics(eval_metrics_writer, metrics, epoch)
# Report metrics
if report_fn:
if report_metric == "mean_reward":
metric_name = rl_utils.get_metric_name(
sampling_temp=hparams.eval_sampling_temps[0],
max_num_noops=hparams.eval_max_num_noops,
clipped=False
)
report_fn(eval_metrics[metric_name], epoch)
else:
report_fn(eval_metrics[report_metric], epoch)
epoch_metrics.append(metrics)
# Return the evaluation metrics from the final epoch
return epoch_metrics[-1] | [
"def",
"training_loop",
"(",
"hparams",
",",
"output_dir",
",",
"report_fn",
"=",
"None",
",",
"report_metric",
"=",
"None",
")",
":",
"if",
"report_fn",
":",
"assert",
"report_metric",
"is",
"not",
"None",
"# Directories",
"subdirectories",
"=",
"[",
"\"data\"",
",",
"\"tmp\"",
",",
"\"world_model\"",
",",
"(",
"\"world_model\"",
",",
"\"debug_videos\"",
")",
",",
"\"policy\"",
",",
"\"eval_metrics\"",
"]",
"directories",
"=",
"setup_directories",
"(",
"output_dir",
",",
"subdirectories",
")",
"epoch",
"=",
"-",
"1",
"data_dir",
"=",
"directories",
"[",
"\"data\"",
"]",
"env",
"=",
"rl_utils",
".",
"setup_env",
"(",
"hparams",
",",
"batch_size",
"=",
"hparams",
".",
"real_batch_size",
",",
"max_num_noops",
"=",
"hparams",
".",
"max_num_noops",
",",
"rl_env_max_episode_steps",
"=",
"hparams",
".",
"rl_env_max_episode_steps",
")",
"env",
".",
"start_new_epoch",
"(",
"epoch",
",",
"data_dir",
")",
"if",
"hparams",
".",
"wm_policy_param_sharing",
":",
"policy_model_dir",
"=",
"directories",
"[",
"\"world_model\"",
"]",
"else",
":",
"policy_model_dir",
"=",
"directories",
"[",
"\"policy\"",
"]",
"learner",
"=",
"rl_utils",
".",
"LEARNERS",
"[",
"hparams",
".",
"base_algo",
"]",
"(",
"hparams",
".",
"frame_stack_size",
",",
"policy_model_dir",
",",
"policy_model_dir",
",",
"hparams",
".",
"epochs",
")",
"# Timing log function",
"log_relative_time",
"=",
"make_relative_timing_fn",
"(",
")",
"# Per-epoch state",
"epoch_metrics",
"=",
"[",
"]",
"metrics",
"=",
"{",
"}",
"# Collect data from the real environment.",
"policy_model_dir",
"=",
"directories",
"[",
"\"policy\"",
"]",
"tf",
".",
"logging",
".",
"info",
"(",
"\"Initial training of the policy in real environment.\"",
")",
"train_agent_real_env",
"(",
"env",
",",
"learner",
",",
"hparams",
",",
"epoch",
")",
"metrics",
"[",
"\"mean_reward/train/clipped\"",
"]",
"=",
"rl_utils",
".",
"compute_mean_reward",
"(",
"env",
".",
"current_epoch_rollouts",
"(",
")",
",",
"clipped",
"=",
"True",
")",
"tf",
".",
"logging",
".",
"info",
"(",
"\"Mean training reward (initial): {}\"",
".",
"format",
"(",
"metrics",
"[",
"\"mean_reward/train/clipped\"",
"]",
")",
")",
"env",
".",
"generate_data",
"(",
"data_dir",
")",
"eval_metrics_writer",
"=",
"tf",
".",
"summary",
".",
"FileWriter",
"(",
"directories",
"[",
"\"eval_metrics\"",
"]",
")",
"world_model_steps_num",
"=",
"0",
"for",
"epoch",
"in",
"range",
"(",
"hparams",
".",
"epochs",
")",
":",
"log",
"=",
"make_log_fn",
"(",
"epoch",
",",
"log_relative_time",
")",
"# Train world model",
"log",
"(",
"\"Training world model\"",
")",
"world_model_steps_num",
"=",
"train_world_model",
"(",
"env",
",",
"data_dir",
",",
"directories",
"[",
"\"world_model\"",
"]",
",",
"hparams",
",",
"world_model_steps_num",
",",
"epoch",
")",
"# Train agent",
"log",
"(",
"\"Training policy in simulated environment.\"",
")",
"train_agent",
"(",
"env",
",",
"learner",
",",
"directories",
"[",
"\"world_model\"",
"]",
",",
"hparams",
",",
"epoch",
")",
"env",
".",
"start_new_epoch",
"(",
"epoch",
",",
"data_dir",
")",
"# Train agent on real env (short)",
"log",
"(",
"\"Training policy in real environment.\"",
")",
"train_agent_real_env",
"(",
"env",
",",
"learner",
",",
"hparams",
",",
"epoch",
")",
"if",
"hparams",
".",
"stop_loop_early",
":",
"return",
"0.0",
"env",
".",
"generate_data",
"(",
"data_dir",
")",
"metrics",
"=",
"load_metrics",
"(",
"directories",
"[",
"\"eval_metrics\"",
"]",
",",
"epoch",
")",
"if",
"metrics",
":",
"# Skip eval if metrics have already been written for this epoch. Otherwise",
"# we'd overwrite them with wrong data.",
"log",
"(",
"\"Metrics found for this epoch, skipping evaluation.\"",
")",
"else",
":",
"metrics",
"[",
"\"mean_reward/train/clipped\"",
"]",
"=",
"rl_utils",
".",
"compute_mean_reward",
"(",
"env",
".",
"current_epoch_rollouts",
"(",
")",
",",
"clipped",
"=",
"True",
")",
"log",
"(",
"\"Mean training reward: {}\"",
".",
"format",
"(",
"metrics",
"[",
"\"mean_reward/train/clipped\"",
"]",
")",
")",
"eval_metrics",
"=",
"rl_utils",
".",
"evaluate_all_configs",
"(",
"hparams",
",",
"policy_model_dir",
")",
"log",
"(",
"\"Agent eval metrics:\\n{}\"",
".",
"format",
"(",
"pprint",
".",
"pformat",
"(",
"eval_metrics",
")",
")",
")",
"metrics",
".",
"update",
"(",
"eval_metrics",
")",
"if",
"hparams",
".",
"eval_world_model",
":",
"debug_video_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"directories",
"[",
"\"world_model\"",
",",
"\"debug_videos\"",
"]",
",",
"\"{}.avi\"",
".",
"format",
"(",
"env",
".",
"current_epoch",
")",
")",
"wm_metrics",
"=",
"rl_utils",
".",
"evaluate_world_model",
"(",
"env",
",",
"hparams",
",",
"directories",
"[",
"\"world_model\"",
"]",
",",
"debug_video_path",
")",
"log",
"(",
"\"World model eval metrics:\\n{}\"",
".",
"format",
"(",
"pprint",
".",
"pformat",
"(",
"wm_metrics",
")",
")",
")",
"metrics",
".",
"update",
"(",
"wm_metrics",
")",
"rl_utils",
".",
"summarize_metrics",
"(",
"eval_metrics_writer",
",",
"metrics",
",",
"epoch",
")",
"# Report metrics",
"if",
"report_fn",
":",
"if",
"report_metric",
"==",
"\"mean_reward\"",
":",
"metric_name",
"=",
"rl_utils",
".",
"get_metric_name",
"(",
"sampling_temp",
"=",
"hparams",
".",
"eval_sampling_temps",
"[",
"0",
"]",
",",
"max_num_noops",
"=",
"hparams",
".",
"eval_max_num_noops",
",",
"clipped",
"=",
"False",
")",
"report_fn",
"(",
"eval_metrics",
"[",
"metric_name",
"]",
",",
"epoch",
")",
"else",
":",
"report_fn",
"(",
"eval_metrics",
"[",
"report_metric",
"]",
",",
"epoch",
")",
"epoch_metrics",
".",
"append",
"(",
"metrics",
")",
"# Return the evaluation metrics from the final epoch",
"return",
"epoch_metrics",
"[",
"-",
"1",
"]"
] | Run the main training loop. | [
"Run",
"the",
"main",
"training",
"loop",
"."
] | python | train |
RJT1990/pyflux | pyflux/families/poisson.py | https://github.com/RJT1990/pyflux/blob/297f2afc2095acd97c12e827dd500e8ea5da0c0f/pyflux/families/poisson.py#L216-L240 | def markov_blanket(y, mean, scale, shape, skewness):
""" Markov blanket for the Poisson distribution
Parameters
----------
y : np.ndarray
univariate time series
mean : np.ndarray
array of location parameters for the Poisson distribution
scale : float
scale parameter for the Poisson distribution
shape : float
tail thickness parameter for the Poisson distribution
skewness : float
skewness parameter for the Poisson distribution
Returns
----------
- Markov blanket of the Poisson family
"""
return ss.poisson.logpmf(y, mean) | [
"def",
"markov_blanket",
"(",
"y",
",",
"mean",
",",
"scale",
",",
"shape",
",",
"skewness",
")",
":",
"return",
"ss",
".",
"poisson",
".",
"logpmf",
"(",
"y",
",",
"mean",
")"
] | Markov blanket for the Poisson distribution
Parameters
----------
y : np.ndarray
univariate time series
mean : np.ndarray
array of location parameters for the Poisson distribution
scale : float
scale parameter for the Poisson distribution
shape : float
tail thickness parameter for the Poisson distribution
skewness : float
skewness parameter for the Poisson distribution
Returns
----------
- Markov blanket of the Poisson family | [
"Markov",
"blanket",
"for",
"the",
"Poisson",
"distribution"
] | python | train |
googledatalab/pydatalab | google/datalab/storage/_api.py | https://github.com/googledatalab/pydatalab/blob/d9031901d5bca22fe0d5925d204e6698df9852e1/google/datalab/storage/_api.py#L250-L264 | def objects_patch(self, bucket, key, info):
"""Updates the metadata associated with an object.
Args:
bucket: the name of the bucket containing the object.
key: the key of the object being updated.
info: the metadata to update.
Returns:
A parsed object information dictionary.
Raises:
Exception if there is an error performing the operation.
"""
url = Api._ENDPOINT + (Api._OBJECT_PATH % (bucket, Api._escape_key(key)))
return google.datalab.utils.Http.request(url, method='PATCH', data=info,
credentials=self._credentials) | [
"def",
"objects_patch",
"(",
"self",
",",
"bucket",
",",
"key",
",",
"info",
")",
":",
"url",
"=",
"Api",
".",
"_ENDPOINT",
"+",
"(",
"Api",
".",
"_OBJECT_PATH",
"%",
"(",
"bucket",
",",
"Api",
".",
"_escape_key",
"(",
"key",
")",
")",
")",
"return",
"google",
".",
"datalab",
".",
"utils",
".",
"Http",
".",
"request",
"(",
"url",
",",
"method",
"=",
"'PATCH'",
",",
"data",
"=",
"info",
",",
"credentials",
"=",
"self",
".",
"_credentials",
")"
] | Updates the metadata associated with an object.
Args:
bucket: the name of the bucket containing the object.
key: the key of the object being updated.
info: the metadata to update.
Returns:
A parsed object information dictionary.
Raises:
Exception if there is an error performing the operation. | [
"Updates",
"the",
"metadata",
"associated",
"with",
"an",
"object",
"."
] | python | train |
iwanbk/nyamuk | nyamuk/base_nyamuk.py | https://github.com/iwanbk/nyamuk/blob/ac4c6028de288a4c8e0b332ae16eae889deb643d/nyamuk/base_nyamuk.py#L139-L230 | def packet_read(self):
"""Read packet from network."""
bytes_received = 0
if self.sock == NC.INVALID_SOCKET:
return NC.ERR_NO_CONN
if self.in_packet.command == 0:
ba_data, errnum, errmsg = nyamuk_net.read(self.sock, 1)
if errnum == 0 and len(ba_data) == 1:
bytes_received += 1
byte = ba_data[0]
self.in_packet.command = byte
if self.as_broker:
if self.bridge is None and self.state == NC.CS_NEW and (byte & 0xF0) != NC.CMD_CONNECT:
print "RETURN ERR_PROTOCOL"
return NC.ERR_PROTOCOL, bytes_received
else:
if errnum == errno.EAGAIN or errnum == errno.EWOULDBLOCK:
return NC.ERR_SUCCESS, bytes_received
elif errnum == 0 and len(ba_data) == 0 or errnum == errno.ECONNRESET:
return NC.ERR_CONN_LOST, bytes_received
else:
evt = event.EventNeterr(errnum, errmsg)
self.push_event(evt)
return NC.ERR_UNKNOWN, bytes_received
if not self.in_packet.have_remaining:
loop_flag = True
while loop_flag:
ba_data, errnum, errmsg = nyamuk_net.read(self.sock, 1)
if errnum == 0 and len(ba_data) == 1:
byte = ba_data[0]
bytes_received += 1
self.in_packet.remaining_count += 1
if self.in_packet.remaining_count > 4:
return NC.ERR_PROTOCOL, bytes_received
self.in_packet.remaining_length += (byte & 127) * self.in_packet.remaining_mult
self.in_packet.remaining_mult *= 128
else:
if errnum == errno.EAGAIN or errnum == errno.EWOULDBLOCK:
return NC.ERR_SUCCESS, bytes_received
elif errnum == 0 and len(ba_data) == 0 or errnum == errno.ECONNRESET:
return NC.ERR_CONN_LOST, bytes_received
else:
evt = event.EventNeterr(errnum, errmsg)
self.push_event(evt)
return NC.ERR_UNKNOWN, bytes_received
if (byte & 128) == 0:
loop_flag = False
if self.in_packet.remaining_length > 0:
self.in_packet.payload = bytearray(self.in_packet.remaining_length)
if self.in_packet.payload is None:
return NC.ERR_NO_MEM, bytes_received
self.in_packet.to_process = self.in_packet.remaining_length
self.in_packet.have_remaining = True
if self.in_packet.to_process > 0:
ba_data, errnum, errmsg = nyamuk_net.read(self.sock, self.in_packet.to_process)
if errnum == 0 and len(ba_data) > 0:
readlen = len(ba_data)
bytes_received += readlen
for idx in xrange(0, readlen):
self.in_packet.payload[self.in_packet.pos] = ba_data[idx]
self.in_packet.pos += 1
self.in_packet.to_process -= 1
else:
if errnum == errno.EAGAIN or errnum == errno.EWOULDBLOCK:
return NC.ERR_SUCCESS, bytes_received
elif errnum == 0 and len(ba_data) == 0 or errnum == errno.ECONNRESET:
return NC.ERR_CONN_LOST, bytes_received
else:
evt = event.EventNeterr(errnum, errmsg)
self.push_event(evt)
return NC.ERR_UNKNOWN, bytes_received
#all data for this packet is read
self.in_packet.pos = 0
ret = self.packet_handle()
self.in_packet.packet_cleanup()
self.last_msg_in = time.time()
return ret, bytes_received | [
"def",
"packet_read",
"(",
"self",
")",
":",
"bytes_received",
"=",
"0",
"if",
"self",
".",
"sock",
"==",
"NC",
".",
"INVALID_SOCKET",
":",
"return",
"NC",
".",
"ERR_NO_CONN",
"if",
"self",
".",
"in_packet",
".",
"command",
"==",
"0",
":",
"ba_data",
",",
"errnum",
",",
"errmsg",
"=",
"nyamuk_net",
".",
"read",
"(",
"self",
".",
"sock",
",",
"1",
")",
"if",
"errnum",
"==",
"0",
"and",
"len",
"(",
"ba_data",
")",
"==",
"1",
":",
"bytes_received",
"+=",
"1",
"byte",
"=",
"ba_data",
"[",
"0",
"]",
"self",
".",
"in_packet",
".",
"command",
"=",
"byte",
"if",
"self",
".",
"as_broker",
":",
"if",
"self",
".",
"bridge",
"is",
"None",
"and",
"self",
".",
"state",
"==",
"NC",
".",
"CS_NEW",
"and",
"(",
"byte",
"&",
"0xF0",
")",
"!=",
"NC",
".",
"CMD_CONNECT",
":",
"print",
"\"RETURN ERR_PROTOCOL\"",
"return",
"NC",
".",
"ERR_PROTOCOL",
",",
"bytes_received",
"else",
":",
"if",
"errnum",
"==",
"errno",
".",
"EAGAIN",
"or",
"errnum",
"==",
"errno",
".",
"EWOULDBLOCK",
":",
"return",
"NC",
".",
"ERR_SUCCESS",
",",
"bytes_received",
"elif",
"errnum",
"==",
"0",
"and",
"len",
"(",
"ba_data",
")",
"==",
"0",
"or",
"errnum",
"==",
"errno",
".",
"ECONNRESET",
":",
"return",
"NC",
".",
"ERR_CONN_LOST",
",",
"bytes_received",
"else",
":",
"evt",
"=",
"event",
".",
"EventNeterr",
"(",
"errnum",
",",
"errmsg",
")",
"self",
".",
"push_event",
"(",
"evt",
")",
"return",
"NC",
".",
"ERR_UNKNOWN",
",",
"bytes_received",
"if",
"not",
"self",
".",
"in_packet",
".",
"have_remaining",
":",
"loop_flag",
"=",
"True",
"while",
"loop_flag",
":",
"ba_data",
",",
"errnum",
",",
"errmsg",
"=",
"nyamuk_net",
".",
"read",
"(",
"self",
".",
"sock",
",",
"1",
")",
"if",
"errnum",
"==",
"0",
"and",
"len",
"(",
"ba_data",
")",
"==",
"1",
":",
"byte",
"=",
"ba_data",
"[",
"0",
"]",
"bytes_received",
"+=",
"1",
"self",
".",
"in_packet",
".",
"remaining_count",
"+=",
"1",
"if",
"self",
".",
"in_packet",
".",
"remaining_count",
">",
"4",
":",
"return",
"NC",
".",
"ERR_PROTOCOL",
",",
"bytes_received",
"self",
".",
"in_packet",
".",
"remaining_length",
"+=",
"(",
"byte",
"&",
"127",
")",
"*",
"self",
".",
"in_packet",
".",
"remaining_mult",
"self",
".",
"in_packet",
".",
"remaining_mult",
"*=",
"128",
"else",
":",
"if",
"errnum",
"==",
"errno",
".",
"EAGAIN",
"or",
"errnum",
"==",
"errno",
".",
"EWOULDBLOCK",
":",
"return",
"NC",
".",
"ERR_SUCCESS",
",",
"bytes_received",
"elif",
"errnum",
"==",
"0",
"and",
"len",
"(",
"ba_data",
")",
"==",
"0",
"or",
"errnum",
"==",
"errno",
".",
"ECONNRESET",
":",
"return",
"NC",
".",
"ERR_CONN_LOST",
",",
"bytes_received",
"else",
":",
"evt",
"=",
"event",
".",
"EventNeterr",
"(",
"errnum",
",",
"errmsg",
")",
"self",
".",
"push_event",
"(",
"evt",
")",
"return",
"NC",
".",
"ERR_UNKNOWN",
",",
"bytes_received",
"if",
"(",
"byte",
"&",
"128",
")",
"==",
"0",
":",
"loop_flag",
"=",
"False",
"if",
"self",
".",
"in_packet",
".",
"remaining_length",
">",
"0",
":",
"self",
".",
"in_packet",
".",
"payload",
"=",
"bytearray",
"(",
"self",
".",
"in_packet",
".",
"remaining_length",
")",
"if",
"self",
".",
"in_packet",
".",
"payload",
"is",
"None",
":",
"return",
"NC",
".",
"ERR_NO_MEM",
",",
"bytes_received",
"self",
".",
"in_packet",
".",
"to_process",
"=",
"self",
".",
"in_packet",
".",
"remaining_length",
"self",
".",
"in_packet",
".",
"have_remaining",
"=",
"True",
"if",
"self",
".",
"in_packet",
".",
"to_process",
">",
"0",
":",
"ba_data",
",",
"errnum",
",",
"errmsg",
"=",
"nyamuk_net",
".",
"read",
"(",
"self",
".",
"sock",
",",
"self",
".",
"in_packet",
".",
"to_process",
")",
"if",
"errnum",
"==",
"0",
"and",
"len",
"(",
"ba_data",
")",
">",
"0",
":",
"readlen",
"=",
"len",
"(",
"ba_data",
")",
"bytes_received",
"+=",
"readlen",
"for",
"idx",
"in",
"xrange",
"(",
"0",
",",
"readlen",
")",
":",
"self",
".",
"in_packet",
".",
"payload",
"[",
"self",
".",
"in_packet",
".",
"pos",
"]",
"=",
"ba_data",
"[",
"idx",
"]",
"self",
".",
"in_packet",
".",
"pos",
"+=",
"1",
"self",
".",
"in_packet",
".",
"to_process",
"-=",
"1",
"else",
":",
"if",
"errnum",
"==",
"errno",
".",
"EAGAIN",
"or",
"errnum",
"==",
"errno",
".",
"EWOULDBLOCK",
":",
"return",
"NC",
".",
"ERR_SUCCESS",
",",
"bytes_received",
"elif",
"errnum",
"==",
"0",
"and",
"len",
"(",
"ba_data",
")",
"==",
"0",
"or",
"errnum",
"==",
"errno",
".",
"ECONNRESET",
":",
"return",
"NC",
".",
"ERR_CONN_LOST",
",",
"bytes_received",
"else",
":",
"evt",
"=",
"event",
".",
"EventNeterr",
"(",
"errnum",
",",
"errmsg",
")",
"self",
".",
"push_event",
"(",
"evt",
")",
"return",
"NC",
".",
"ERR_UNKNOWN",
",",
"bytes_received",
"#all data for this packet is read",
"self",
".",
"in_packet",
".",
"pos",
"=",
"0",
"ret",
"=",
"self",
".",
"packet_handle",
"(",
")",
"self",
".",
"in_packet",
".",
"packet_cleanup",
"(",
")",
"self",
".",
"last_msg_in",
"=",
"time",
".",
"time",
"(",
")",
"return",
"ret",
",",
"bytes_received"
] | Read packet from network. | [
"Read",
"packet",
"from",
"network",
"."
] | python | train |
incf-nidash/nidmresults | nidmresults/objects/modelfitting.py | https://github.com/incf-nidash/nidmresults/blob/438f7cce6abc4a4379b629bd76f4d427891e033f/nidmresults/objects/modelfitting.py#L160-L168 | def export(self, nidm_version, export_dir):
"""
Create prov entities and activities.
"""
self.add_attributes((
(PROV['type'], self.type),
(NIDM_GROUP_NAME, self.group_name),
(NIDM_NUMBER_OF_SUBJECTS, self.num_subjects),
(PROV['label'], self.label))) | [
"def",
"export",
"(",
"self",
",",
"nidm_version",
",",
"export_dir",
")",
":",
"self",
".",
"add_attributes",
"(",
"(",
"(",
"PROV",
"[",
"'type'",
"]",
",",
"self",
".",
"type",
")",
",",
"(",
"NIDM_GROUP_NAME",
",",
"self",
".",
"group_name",
")",
",",
"(",
"NIDM_NUMBER_OF_SUBJECTS",
",",
"self",
".",
"num_subjects",
")",
",",
"(",
"PROV",
"[",
"'label'",
"]",
",",
"self",
".",
"label",
")",
")",
")"
] | Create prov entities and activities. | [
"Create",
"prov",
"entities",
"and",
"activities",
"."
] | python | train |
mastro35/flows | flows/FlowsManager.py | https://github.com/mastro35/flows/blob/05e488385673a69597b5b39c7728795aa4d5eb18/flows/FlowsManager.py#L271-L282 | def _start_message_fetcher(self):
"""
Start the message fetcher (called from coroutine)
"""
Global.LOGGER.debug('starting the message fetcher')
event_loop = asyncio.get_event_loop()
try:
Global.LOGGER.debug('entering event loop for message fetcher coroutine')
event_loop.run_until_complete(self.message_fetcher_coroutine(event_loop))
finally:
Global.LOGGER.debug('closing the event loop')
event_loop.close() | [
"def",
"_start_message_fetcher",
"(",
"self",
")",
":",
"Global",
".",
"LOGGER",
".",
"debug",
"(",
"'starting the message fetcher'",
")",
"event_loop",
"=",
"asyncio",
".",
"get_event_loop",
"(",
")",
"try",
":",
"Global",
".",
"LOGGER",
".",
"debug",
"(",
"'entering event loop for message fetcher coroutine'",
")",
"event_loop",
".",
"run_until_complete",
"(",
"self",
".",
"message_fetcher_coroutine",
"(",
"event_loop",
")",
")",
"finally",
":",
"Global",
".",
"LOGGER",
".",
"debug",
"(",
"'closing the event loop'",
")",
"event_loop",
".",
"close",
"(",
")"
] | Start the message fetcher (called from coroutine) | [
"Start",
"the",
"message",
"fetcher",
"(",
"called",
"from",
"coroutine",
")"
] | python | train |
pypa/pipenv | pipenv/vendor/distlib/_backport/tarfile.py | https://github.com/pypa/pipenv/blob/cae8d76c210b9777e90aab76e9c4b0e53bb19cde/pipenv/vendor/distlib/_backport/tarfile.py#L2372-L2392 | def chown(self, tarinfo, targetpath):
"""Set owner of targetpath according to tarinfo.
"""
if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
# We have to be root to do so.
try:
g = grp.getgrnam(tarinfo.gname)[2]
except KeyError:
g = tarinfo.gid
try:
u = pwd.getpwnam(tarinfo.uname)[2]
except KeyError:
u = tarinfo.uid
try:
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
if sys.platform != "os2emx":
os.chown(targetpath, u, g)
except EnvironmentError as e:
raise ExtractError("could not change owner") | [
"def",
"chown",
"(",
"self",
",",
"tarinfo",
",",
"targetpath",
")",
":",
"if",
"pwd",
"and",
"hasattr",
"(",
"os",
",",
"\"geteuid\"",
")",
"and",
"os",
".",
"geteuid",
"(",
")",
"==",
"0",
":",
"# We have to be root to do so.",
"try",
":",
"g",
"=",
"grp",
".",
"getgrnam",
"(",
"tarinfo",
".",
"gname",
")",
"[",
"2",
"]",
"except",
"KeyError",
":",
"g",
"=",
"tarinfo",
".",
"gid",
"try",
":",
"u",
"=",
"pwd",
".",
"getpwnam",
"(",
"tarinfo",
".",
"uname",
")",
"[",
"2",
"]",
"except",
"KeyError",
":",
"u",
"=",
"tarinfo",
".",
"uid",
"try",
":",
"if",
"tarinfo",
".",
"issym",
"(",
")",
"and",
"hasattr",
"(",
"os",
",",
"\"lchown\"",
")",
":",
"os",
".",
"lchown",
"(",
"targetpath",
",",
"u",
",",
"g",
")",
"else",
":",
"if",
"sys",
".",
"platform",
"!=",
"\"os2emx\"",
":",
"os",
".",
"chown",
"(",
"targetpath",
",",
"u",
",",
"g",
")",
"except",
"EnvironmentError",
"as",
"e",
":",
"raise",
"ExtractError",
"(",
"\"could not change owner\"",
")"
] | Set owner of targetpath according to tarinfo. | [
"Set",
"owner",
"of",
"targetpath",
"according",
"to",
"tarinfo",
"."
] | python | train |
rigetti/pyquil | pyquil/device.py | https://github.com/rigetti/pyquil/blob/ec98e453084b0037d69d8c3245f6822a5422593d/pyquil/device.py#L467-L478 | def get_isa(self, oneq_type='Xhalves', twoq_type='CZ') -> ISA:
"""
Construct an ISA suitable for targeting by compilation.
This will raise an exception if the requested ISA is not supported by the device.
:param oneq_type: The family of one-qubit gates to target
:param twoq_type: The family of two-qubit gates to target
"""
qubits = [Qubit(id=q.id, type=oneq_type, dead=q.dead) for q in self._isa.qubits]
edges = [Edge(targets=e.targets, type=twoq_type, dead=e.dead) for e in self._isa.edges]
return ISA(qubits, edges) | [
"def",
"get_isa",
"(",
"self",
",",
"oneq_type",
"=",
"'Xhalves'",
",",
"twoq_type",
"=",
"'CZ'",
")",
"->",
"ISA",
":",
"qubits",
"=",
"[",
"Qubit",
"(",
"id",
"=",
"q",
".",
"id",
",",
"type",
"=",
"oneq_type",
",",
"dead",
"=",
"q",
".",
"dead",
")",
"for",
"q",
"in",
"self",
".",
"_isa",
".",
"qubits",
"]",
"edges",
"=",
"[",
"Edge",
"(",
"targets",
"=",
"e",
".",
"targets",
",",
"type",
"=",
"twoq_type",
",",
"dead",
"=",
"e",
".",
"dead",
")",
"for",
"e",
"in",
"self",
".",
"_isa",
".",
"edges",
"]",
"return",
"ISA",
"(",
"qubits",
",",
"edges",
")"
] | Construct an ISA suitable for targeting by compilation.
This will raise an exception if the requested ISA is not supported by the device.
:param oneq_type: The family of one-qubit gates to target
:param twoq_type: The family of two-qubit gates to target | [
"Construct",
"an",
"ISA",
"suitable",
"for",
"targeting",
"by",
"compilation",
"."
] | python | train |
OCHA-DAP/hdx-data-freshness | src/hdx/freshness/retry.py | https://github.com/OCHA-DAP/hdx-data-freshness/blob/991152a5ec4c4a980a5378411db060b13730cad3/src/hdx/freshness/retry.py#L31-L106 | async def send_http(session, method, url, *,
retries=1,
interval=1,
backoff=2,
http_status_codes_to_retry=HTTP_STATUS_CODES_TO_RETRY,
fn=lambda x:x,
**kwargs):
"""
Sends a HTTP request and implements a retry logic.
Arguments:
session (obj): A client aiohttp session object
method (str): Method to use
url (str): URL for the request
retries (int): Number of times to retry in case of failure
interval (float): Time to wait before retries
backoff (int): Multiply interval by this factor after each failure
http_status_codes_to_retry (List[int]): List of status codes to retry
fn (Callable[[x],x]: Function to call on successful connection
"""
backoff_interval = interval
raised_exc = None
attempt = 0
if method not in ['get', 'patch', 'post']:
raise ValueError
if retries == -1: # -1 means retry indefinitely
attempt = -1
elif retries == 0: # Zero means don't retry
attempt = 1
else: # any other value means retry N times
attempt = retries + 1
while attempt != 0:
if raised_exc:
logger.error('Caught "%s" url:%s method:%s, remaining tries %s, '
'sleeping %.2fsecs', raised_exc, method.upper(), url,
attempt, backoff_interval)
await asyncio.sleep(backoff_interval)
# bump interval for the next possible attempt
backoff_interval *= backoff
# logger.info('sending %s %s with %s', method.upper(), url, kwargs)
try:
async with await getattr(session, method)(url, **kwargs) as response:
if response.status == 200:
return await fn(response)
elif response.status in http_status_codes_to_retry:
logger.error(
'Received invalid response code:%s error:%s'
' response:%s url:%s', response.status, '', response.reason, url)
raise aiohttp.ClientResponseError(
code=response.status, message=response.reason, request_info=response.request_info,
history=response.history)
else:
raise FailedRequest(
code=response.status, message='Non-retryable response code',
raised='aiohttp.ClientResponseError', url=url)
except aiohttp.ClientError as exc:
try:
code = exc.code
except AttributeError:
code = ''
raised_exc = FailedRequest(code=code, message=exc,
raised='%s.%s' % (exc.__class__.__module__, exc.__class__.__qualname__), url=url)
except asyncio.TimeoutError as exc:
raised_exc = FailedRequest(code='', message='asyncio.TimeoutError',
raised='%s.%s' % (exc.__class__.__module__, exc.__class__.__qualname__), url=url)
else:
raised_exc = None
break
attempt -= 1
if raised_exc:
raise raised_exc | [
"async",
"def",
"send_http",
"(",
"session",
",",
"method",
",",
"url",
",",
"*",
",",
"retries",
"=",
"1",
",",
"interval",
"=",
"1",
",",
"backoff",
"=",
"2",
",",
"http_status_codes_to_retry",
"=",
"HTTP_STATUS_CODES_TO_RETRY",
",",
"fn",
"=",
"lambda",
"x",
":",
"x",
",",
"*",
"*",
"kwargs",
")",
":",
"backoff_interval",
"=",
"interval",
"raised_exc",
"=",
"None",
"attempt",
"=",
"0",
"if",
"method",
"not",
"in",
"[",
"'get'",
",",
"'patch'",
",",
"'post'",
"]",
":",
"raise",
"ValueError",
"if",
"retries",
"==",
"-",
"1",
":",
"# -1 means retry indefinitely",
"attempt",
"=",
"-",
"1",
"elif",
"retries",
"==",
"0",
":",
"# Zero means don't retry",
"attempt",
"=",
"1",
"else",
":",
"# any other value means retry N times",
"attempt",
"=",
"retries",
"+",
"1",
"while",
"attempt",
"!=",
"0",
":",
"if",
"raised_exc",
":",
"logger",
".",
"error",
"(",
"'Caught \"%s\" url:%s method:%s, remaining tries %s, '",
"'sleeping %.2fsecs'",
",",
"raised_exc",
",",
"method",
".",
"upper",
"(",
")",
",",
"url",
",",
"attempt",
",",
"backoff_interval",
")",
"await",
"asyncio",
".",
"sleep",
"(",
"backoff_interval",
")",
"# bump interval for the next possible attempt",
"backoff_interval",
"*=",
"backoff",
"# logger.info('sending %s %s with %s', method.upper(), url, kwargs)",
"try",
":",
"async",
"with",
"await",
"getattr",
"(",
"session",
",",
"method",
")",
"(",
"url",
",",
"*",
"*",
"kwargs",
")",
"as",
"response",
":",
"if",
"response",
".",
"status",
"==",
"200",
":",
"return",
"await",
"fn",
"(",
"response",
")",
"elif",
"response",
".",
"status",
"in",
"http_status_codes_to_retry",
":",
"logger",
".",
"error",
"(",
"'Received invalid response code:%s error:%s'",
"' response:%s url:%s'",
",",
"response",
".",
"status",
",",
"''",
",",
"response",
".",
"reason",
",",
"url",
")",
"raise",
"aiohttp",
".",
"ClientResponseError",
"(",
"code",
"=",
"response",
".",
"status",
",",
"message",
"=",
"response",
".",
"reason",
",",
"request_info",
"=",
"response",
".",
"request_info",
",",
"history",
"=",
"response",
".",
"history",
")",
"else",
":",
"raise",
"FailedRequest",
"(",
"code",
"=",
"response",
".",
"status",
",",
"message",
"=",
"'Non-retryable response code'",
",",
"raised",
"=",
"'aiohttp.ClientResponseError'",
",",
"url",
"=",
"url",
")",
"except",
"aiohttp",
".",
"ClientError",
"as",
"exc",
":",
"try",
":",
"code",
"=",
"exc",
".",
"code",
"except",
"AttributeError",
":",
"code",
"=",
"''",
"raised_exc",
"=",
"FailedRequest",
"(",
"code",
"=",
"code",
",",
"message",
"=",
"exc",
",",
"raised",
"=",
"'%s.%s'",
"%",
"(",
"exc",
".",
"__class__",
".",
"__module__",
",",
"exc",
".",
"__class__",
".",
"__qualname__",
")",
",",
"url",
"=",
"url",
")",
"except",
"asyncio",
".",
"TimeoutError",
"as",
"exc",
":",
"raised_exc",
"=",
"FailedRequest",
"(",
"code",
"=",
"''",
",",
"message",
"=",
"'asyncio.TimeoutError'",
",",
"raised",
"=",
"'%s.%s'",
"%",
"(",
"exc",
".",
"__class__",
".",
"__module__",
",",
"exc",
".",
"__class__",
".",
"__qualname__",
")",
",",
"url",
"=",
"url",
")",
"else",
":",
"raised_exc",
"=",
"None",
"break",
"attempt",
"-=",
"1",
"if",
"raised_exc",
":",
"raise",
"raised_exc"
] | Sends a HTTP request and implements a retry logic.
Arguments:
session (obj): A client aiohttp session object
method (str): Method to use
url (str): URL for the request
retries (int): Number of times to retry in case of failure
interval (float): Time to wait before retries
backoff (int): Multiply interval by this factor after each failure
http_status_codes_to_retry (List[int]): List of status codes to retry
fn (Callable[[x],x]: Function to call on successful connection | [
"Sends",
"a",
"HTTP",
"request",
"and",
"implements",
"a",
"retry",
"logic",
"."
] | python | train |
Qiskit/qiskit-terra | qiskit/quantum_info/operators/channel/superop.py | https://github.com/Qiskit/qiskit-terra/blob/d4f58d903bc96341b816f7c35df936d6421267d1/qiskit/quantum_info/operators/channel/superop.py#L268-L283 | def multiply(self, other):
"""Return the QuantumChannel self + other.
Args:
other (complex): a complex number.
Returns:
SuperOp: the scalar multiplication other * self as a SuperOp object.
Raises:
QiskitError: if other is not a valid scalar.
"""
if not isinstance(other, Number):
raise QiskitError("other is not a number")
return SuperOp(other * self._data, self.input_dims(),
self.output_dims()) | [
"def",
"multiply",
"(",
"self",
",",
"other",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"Number",
")",
":",
"raise",
"QiskitError",
"(",
"\"other is not a number\"",
")",
"return",
"SuperOp",
"(",
"other",
"*",
"self",
".",
"_data",
",",
"self",
".",
"input_dims",
"(",
")",
",",
"self",
".",
"output_dims",
"(",
")",
")"
] | Return the QuantumChannel self + other.
Args:
other (complex): a complex number.
Returns:
SuperOp: the scalar multiplication other * self as a SuperOp object.
Raises:
QiskitError: if other is not a valid scalar. | [
"Return",
"the",
"QuantumChannel",
"self",
"+",
"other",
"."
] | python | test |
Microsoft/nni | src/sdk/pynni/nni/curvefitting_assessor/curvefunctions.py | https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/curvefitting_assessor/curvefunctions.py#L174-L191 | def mmf(x, alpha, beta, kappa, delta):
"""Morgan-Mercer-Flodin
http://www.pisces-conservation.com/growthhelp/index.html?morgan_mercer_floden.htm
Parameters
----------
x: int
alpha: float
beta: float
kappa: float
delta: float
Returns
-------
float
alpha - (alpha - beta) / (1. + (kappa * x)**delta)
"""
return alpha - (alpha - beta) / (1. + (kappa * x)**delta) | [
"def",
"mmf",
"(",
"x",
",",
"alpha",
",",
"beta",
",",
"kappa",
",",
"delta",
")",
":",
"return",
"alpha",
"-",
"(",
"alpha",
"-",
"beta",
")",
"/",
"(",
"1.",
"+",
"(",
"kappa",
"*",
"x",
")",
"**",
"delta",
")"
] | Morgan-Mercer-Flodin
http://www.pisces-conservation.com/growthhelp/index.html?morgan_mercer_floden.htm
Parameters
----------
x: int
alpha: float
beta: float
kappa: float
delta: float
Returns
-------
float
alpha - (alpha - beta) / (1. + (kappa * x)**delta) | [
"Morgan",
"-",
"Mercer",
"-",
"Flodin",
"http",
":",
"//",
"www",
".",
"pisces",
"-",
"conservation",
".",
"com",
"/",
"growthhelp",
"/",
"index",
".",
"html?morgan_mercer_floden",
".",
"htm"
] | python | train |
apache/incubator-mxnet | example/gluon/dc_gan/dcgan.py | https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/example/gluon/dc_gan/dcgan.py#L52-L69 | def visual(title, X, name):
"""Image visualization and preservation
:param title: title
:param X: images to visualized
:param name: saved picture`s name
:return:
"""
assert len(X.shape) == 4
X = X.transpose((0, 2, 3, 1))
X = np.clip((X - np.min(X))*(255.0/(np.max(X) - np.min(X))), 0, 255).astype(np.uint8)
n = np.ceil(np.sqrt(X.shape[0]))
buff = np.zeros((int(n*X.shape[1]), int(n*X.shape[2]), int(X.shape[3])), dtype=np.uint8)
for i, img in enumerate(X):
fill_buf(buff, i, img, X.shape[1:3])
buff = buff[:, :, ::-1]
plt.imshow(buff)
plt.title(title)
plt.savefig(name) | [
"def",
"visual",
"(",
"title",
",",
"X",
",",
"name",
")",
":",
"assert",
"len",
"(",
"X",
".",
"shape",
")",
"==",
"4",
"X",
"=",
"X",
".",
"transpose",
"(",
"(",
"0",
",",
"2",
",",
"3",
",",
"1",
")",
")",
"X",
"=",
"np",
".",
"clip",
"(",
"(",
"X",
"-",
"np",
".",
"min",
"(",
"X",
")",
")",
"*",
"(",
"255.0",
"/",
"(",
"np",
".",
"max",
"(",
"X",
")",
"-",
"np",
".",
"min",
"(",
"X",
")",
")",
")",
",",
"0",
",",
"255",
")",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
"n",
"=",
"np",
".",
"ceil",
"(",
"np",
".",
"sqrt",
"(",
"X",
".",
"shape",
"[",
"0",
"]",
")",
")",
"buff",
"=",
"np",
".",
"zeros",
"(",
"(",
"int",
"(",
"n",
"*",
"X",
".",
"shape",
"[",
"1",
"]",
")",
",",
"int",
"(",
"n",
"*",
"X",
".",
"shape",
"[",
"2",
"]",
")",
",",
"int",
"(",
"X",
".",
"shape",
"[",
"3",
"]",
")",
")",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
"for",
"i",
",",
"img",
"in",
"enumerate",
"(",
"X",
")",
":",
"fill_buf",
"(",
"buff",
",",
"i",
",",
"img",
",",
"X",
".",
"shape",
"[",
"1",
":",
"3",
"]",
")",
"buff",
"=",
"buff",
"[",
":",
",",
":",
",",
":",
":",
"-",
"1",
"]",
"plt",
".",
"imshow",
"(",
"buff",
")",
"plt",
".",
"title",
"(",
"title",
")",
"plt",
".",
"savefig",
"(",
"name",
")"
] | Image visualization and preservation
:param title: title
:param X: images to visualized
:param name: saved picture`s name
:return: | [
"Image",
"visualization",
"and",
"preservation",
":",
"param",
"title",
":",
"title",
":",
"param",
"X",
":",
"images",
"to",
"visualized",
":",
"param",
"name",
":",
"saved",
"picture",
"s",
"name",
":",
"return",
":"
] | python | train |
markovmodel/PyEMMA | pyemma/coordinates/pipelines.py | https://github.com/markovmodel/PyEMMA/blob/5c3124398217de05ba5ce9c8fb01519222481ab8/pyemma/coordinates/pipelines.py#L223-L254 | def save_dtrajs(self, prefix='', output_dir='.',
output_format='ascii', extension='.dtraj'):
r"""Saves calculated discrete trajectories. Filenames are taken from
given reader. If data comes from memory dtrajs are written to a default
filename.
Parameters
----------
prefix : str
prepend prefix to filenames.
output_dir : str (optional)
save files to this directory. Defaults to current working directory.
output_format : str
if format is 'ascii' dtrajs will be written as csv files, otherwise
they will be written as NumPy .npy files.
extension : str
file extension to append (eg. '.itraj')
"""
clustering = self._chain[-1]
reader = self._chain[0]
from pyemma.coordinates.clustering.interface import AbstractClustering
assert isinstance(clustering, AbstractClustering)
trajfiles = None
if isinstance(reader, FeatureReader):
trajfiles = reader.filenames
clustering.save_dtrajs(
trajfiles, prefix, output_dir, output_format, extension) | [
"def",
"save_dtrajs",
"(",
"self",
",",
"prefix",
"=",
"''",
",",
"output_dir",
"=",
"'.'",
",",
"output_format",
"=",
"'ascii'",
",",
"extension",
"=",
"'.dtraj'",
")",
":",
"clustering",
"=",
"self",
".",
"_chain",
"[",
"-",
"1",
"]",
"reader",
"=",
"self",
".",
"_chain",
"[",
"0",
"]",
"from",
"pyemma",
".",
"coordinates",
".",
"clustering",
".",
"interface",
"import",
"AbstractClustering",
"assert",
"isinstance",
"(",
"clustering",
",",
"AbstractClustering",
")",
"trajfiles",
"=",
"None",
"if",
"isinstance",
"(",
"reader",
",",
"FeatureReader",
")",
":",
"trajfiles",
"=",
"reader",
".",
"filenames",
"clustering",
".",
"save_dtrajs",
"(",
"trajfiles",
",",
"prefix",
",",
"output_dir",
",",
"output_format",
",",
"extension",
")"
] | r"""Saves calculated discrete trajectories. Filenames are taken from
given reader. If data comes from memory dtrajs are written to a default
filename.
Parameters
----------
prefix : str
prepend prefix to filenames.
output_dir : str (optional)
save files to this directory. Defaults to current working directory.
output_format : str
if format is 'ascii' dtrajs will be written as csv files, otherwise
they will be written as NumPy .npy files.
extension : str
file extension to append (eg. '.itraj') | [
"r",
"Saves",
"calculated",
"discrete",
"trajectories",
".",
"Filenames",
"are",
"taken",
"from",
"given",
"reader",
".",
"If",
"data",
"comes",
"from",
"memory",
"dtrajs",
"are",
"written",
"to",
"a",
"default",
"filename",
"."
] | python | train |
Qiskit/qiskit-terra | qiskit/tools/monitor/backend_overview.py | https://github.com/Qiskit/qiskit-terra/blob/d4f58d903bc96341b816f7c35df936d6421267d1/qiskit/tools/monitor/backend_overview.py#L42-L121 | def backend_monitor(backend):
"""Monitor a single IBMQ backend.
Args:
backend (IBMQBackend): Backend to monitor.
Raises:
QiskitError: Input is not a IBMQ backend.
"""
if not isinstance(backend, IBMQBackend):
raise QiskitError('Input variable is not of type IBMQBackend.')
config = backend.configuration().to_dict()
status = backend.status().to_dict()
config_dict = {**status, **config}
if not config['simulator']:
props = backend.properties().to_dict()
print(backend.name())
print('='*len(backend.name()))
print('Configuration')
print('-'*13)
offset = ' '
upper_list = ['n_qubits', 'operational',
'status_msg', 'pending_jobs',
'basis_gates', 'local', 'simulator']
lower_list = list(set(config_dict.keys()).difference(upper_list))
# Remove gates because they are in a different tab
lower_list.remove('gates')
for item in upper_list+lower_list:
print(offset+item+':', config_dict[item])
# Stop here if simulator
if config['simulator']:
return
print()
qubit_header = 'Qubits [Name / Freq / T1 / T2 / U1 err / U2 err / U3 err / Readout err]'
print(qubit_header)
print('-'*len(qubit_header))
sep = ' / '
for qub in range(len(props['qubits'])):
name = 'Q%s' % qub
qubit_data = props['qubits'][qub]
gate_data = props['gates'][3*qub:3*qub+3]
t1_info = qubit_data[0]
t2_info = qubit_data[1]
freq_info = qubit_data[2]
readout_info = qubit_data[3]
freq = str(round(freq_info['value'], 5))+' '+freq_info['unit']
T1 = str(round(t1_info['value'], # pylint: disable=invalid-name
5))+' ' + t1_info['unit']
T2 = str(round(t2_info['value'], # pylint: disable=invalid-name
5))+' ' + t2_info['unit']
# pylint: disable=invalid-name
U1 = str(round(gate_data[0]['parameters'][0]['value'], 5))
# pylint: disable=invalid-name
U2 = str(round(gate_data[1]['parameters'][0]['value'], 5))
# pylint: disable=invalid-name
U3 = str(round(gate_data[2]['parameters'][0]['value'], 5))
readout_error = str(round(readout_info['value'], 5))
qstr = sep.join([name, freq, T1, T2, U1, U2, U3, readout_error])
print(offset+qstr)
print()
multi_qubit_gates = props['gates'][3*config['n_qubits']:]
multi_header = 'Multi-Qubit Gates [Name / Type / Gate Error]'
print(multi_header)
print('-'*len(multi_header))
for gate in multi_qubit_gates:
name = gate['name']
ttype = gate['gate']
error = str(round(gate['parameters'][0]['value'], 5))
mstr = sep.join([name, ttype, error])
print(offset+mstr) | [
"def",
"backend_monitor",
"(",
"backend",
")",
":",
"if",
"not",
"isinstance",
"(",
"backend",
",",
"IBMQBackend",
")",
":",
"raise",
"QiskitError",
"(",
"'Input variable is not of type IBMQBackend.'",
")",
"config",
"=",
"backend",
".",
"configuration",
"(",
")",
".",
"to_dict",
"(",
")",
"status",
"=",
"backend",
".",
"status",
"(",
")",
".",
"to_dict",
"(",
")",
"config_dict",
"=",
"{",
"*",
"*",
"status",
",",
"*",
"*",
"config",
"}",
"if",
"not",
"config",
"[",
"'simulator'",
"]",
":",
"props",
"=",
"backend",
".",
"properties",
"(",
")",
".",
"to_dict",
"(",
")",
"print",
"(",
"backend",
".",
"name",
"(",
")",
")",
"print",
"(",
"'='",
"*",
"len",
"(",
"backend",
".",
"name",
"(",
")",
")",
")",
"print",
"(",
"'Configuration'",
")",
"print",
"(",
"'-'",
"*",
"13",
")",
"offset",
"=",
"' '",
"upper_list",
"=",
"[",
"'n_qubits'",
",",
"'operational'",
",",
"'status_msg'",
",",
"'pending_jobs'",
",",
"'basis_gates'",
",",
"'local'",
",",
"'simulator'",
"]",
"lower_list",
"=",
"list",
"(",
"set",
"(",
"config_dict",
".",
"keys",
"(",
")",
")",
".",
"difference",
"(",
"upper_list",
")",
")",
"# Remove gates because they are in a different tab",
"lower_list",
".",
"remove",
"(",
"'gates'",
")",
"for",
"item",
"in",
"upper_list",
"+",
"lower_list",
":",
"print",
"(",
"offset",
"+",
"item",
"+",
"':'",
",",
"config_dict",
"[",
"item",
"]",
")",
"# Stop here if simulator",
"if",
"config",
"[",
"'simulator'",
"]",
":",
"return",
"print",
"(",
")",
"qubit_header",
"=",
"'Qubits [Name / Freq / T1 / T2 / U1 err / U2 err / U3 err / Readout err]'",
"print",
"(",
"qubit_header",
")",
"print",
"(",
"'-'",
"*",
"len",
"(",
"qubit_header",
")",
")",
"sep",
"=",
"' / '",
"for",
"qub",
"in",
"range",
"(",
"len",
"(",
"props",
"[",
"'qubits'",
"]",
")",
")",
":",
"name",
"=",
"'Q%s'",
"%",
"qub",
"qubit_data",
"=",
"props",
"[",
"'qubits'",
"]",
"[",
"qub",
"]",
"gate_data",
"=",
"props",
"[",
"'gates'",
"]",
"[",
"3",
"*",
"qub",
":",
"3",
"*",
"qub",
"+",
"3",
"]",
"t1_info",
"=",
"qubit_data",
"[",
"0",
"]",
"t2_info",
"=",
"qubit_data",
"[",
"1",
"]",
"freq_info",
"=",
"qubit_data",
"[",
"2",
"]",
"readout_info",
"=",
"qubit_data",
"[",
"3",
"]",
"freq",
"=",
"str",
"(",
"round",
"(",
"freq_info",
"[",
"'value'",
"]",
",",
"5",
")",
")",
"+",
"' '",
"+",
"freq_info",
"[",
"'unit'",
"]",
"T1",
"=",
"str",
"(",
"round",
"(",
"t1_info",
"[",
"'value'",
"]",
",",
"# pylint: disable=invalid-name",
"5",
")",
")",
"+",
"' '",
"+",
"t1_info",
"[",
"'unit'",
"]",
"T2",
"=",
"str",
"(",
"round",
"(",
"t2_info",
"[",
"'value'",
"]",
",",
"# pylint: disable=invalid-name",
"5",
")",
")",
"+",
"' '",
"+",
"t2_info",
"[",
"'unit'",
"]",
"# pylint: disable=invalid-name",
"U1",
"=",
"str",
"(",
"round",
"(",
"gate_data",
"[",
"0",
"]",
"[",
"'parameters'",
"]",
"[",
"0",
"]",
"[",
"'value'",
"]",
",",
"5",
")",
")",
"# pylint: disable=invalid-name",
"U2",
"=",
"str",
"(",
"round",
"(",
"gate_data",
"[",
"1",
"]",
"[",
"'parameters'",
"]",
"[",
"0",
"]",
"[",
"'value'",
"]",
",",
"5",
")",
")",
"# pylint: disable=invalid-name",
"U3",
"=",
"str",
"(",
"round",
"(",
"gate_data",
"[",
"2",
"]",
"[",
"'parameters'",
"]",
"[",
"0",
"]",
"[",
"'value'",
"]",
",",
"5",
")",
")",
"readout_error",
"=",
"str",
"(",
"round",
"(",
"readout_info",
"[",
"'value'",
"]",
",",
"5",
")",
")",
"qstr",
"=",
"sep",
".",
"join",
"(",
"[",
"name",
",",
"freq",
",",
"T1",
",",
"T2",
",",
"U1",
",",
"U2",
",",
"U3",
",",
"readout_error",
"]",
")",
"print",
"(",
"offset",
"+",
"qstr",
")",
"print",
"(",
")",
"multi_qubit_gates",
"=",
"props",
"[",
"'gates'",
"]",
"[",
"3",
"*",
"config",
"[",
"'n_qubits'",
"]",
":",
"]",
"multi_header",
"=",
"'Multi-Qubit Gates [Name / Type / Gate Error]'",
"print",
"(",
"multi_header",
")",
"print",
"(",
"'-'",
"*",
"len",
"(",
"multi_header",
")",
")",
"for",
"gate",
"in",
"multi_qubit_gates",
":",
"name",
"=",
"gate",
"[",
"'name'",
"]",
"ttype",
"=",
"gate",
"[",
"'gate'",
"]",
"error",
"=",
"str",
"(",
"round",
"(",
"gate",
"[",
"'parameters'",
"]",
"[",
"0",
"]",
"[",
"'value'",
"]",
",",
"5",
")",
")",
"mstr",
"=",
"sep",
".",
"join",
"(",
"[",
"name",
",",
"ttype",
",",
"error",
"]",
")",
"print",
"(",
"offset",
"+",
"mstr",
")"
] | Monitor a single IBMQ backend.
Args:
backend (IBMQBackend): Backend to monitor.
Raises:
QiskitError: Input is not a IBMQ backend. | [
"Monitor",
"a",
"single",
"IBMQ",
"backend",
"."
] | python | test |
MSchnei/pyprf_feature | pyprf_feature/analysis/model_creation_utils.py | https://github.com/MSchnei/pyprf_feature/blob/49004ede7ae1ddee07a30afe9ce3e2776750805c/pyprf_feature/analysis/model_creation_utils.py#L665-L699 | def fnd_unq_rws(A, return_index=False, return_inverse=False):
"""Find unique rows in 2D array.
Parameters
----------
A : 2d numpy array
Array for which unique rows should be identified.
return_index : bool
Bool to decide whether I is returned.
return_inverse : bool
Bool to decide whether J is returned.
Returns
-------
B : 1d numpy array,
Unique rows
I: 1d numpy array, only returned if return_index is True
B = A[I,:]
J: 2d numpy array, only returned if return_inverse is True
A = B[J,:]
"""
A = np.require(A, requirements='C')
assert A.ndim == 2, "array must be 2-dim'l"
B = np.unique(A.view([('', A.dtype)]*A.shape[1]),
return_index=return_index,
return_inverse=return_inverse)
if return_index or return_inverse:
return (B[0].view(A.dtype).reshape((-1, A.shape[1]), order='C'),) \
+ B[1:]
else:
return B.view(A.dtype).reshape((-1, A.shape[1]), order='C') | [
"def",
"fnd_unq_rws",
"(",
"A",
",",
"return_index",
"=",
"False",
",",
"return_inverse",
"=",
"False",
")",
":",
"A",
"=",
"np",
".",
"require",
"(",
"A",
",",
"requirements",
"=",
"'C'",
")",
"assert",
"A",
".",
"ndim",
"==",
"2",
",",
"\"array must be 2-dim'l\"",
"B",
"=",
"np",
".",
"unique",
"(",
"A",
".",
"view",
"(",
"[",
"(",
"''",
",",
"A",
".",
"dtype",
")",
"]",
"*",
"A",
".",
"shape",
"[",
"1",
"]",
")",
",",
"return_index",
"=",
"return_index",
",",
"return_inverse",
"=",
"return_inverse",
")",
"if",
"return_index",
"or",
"return_inverse",
":",
"return",
"(",
"B",
"[",
"0",
"]",
".",
"view",
"(",
"A",
".",
"dtype",
")",
".",
"reshape",
"(",
"(",
"-",
"1",
",",
"A",
".",
"shape",
"[",
"1",
"]",
")",
",",
"order",
"=",
"'C'",
")",
",",
")",
"+",
"B",
"[",
"1",
":",
"]",
"else",
":",
"return",
"B",
".",
"view",
"(",
"A",
".",
"dtype",
")",
".",
"reshape",
"(",
"(",
"-",
"1",
",",
"A",
".",
"shape",
"[",
"1",
"]",
")",
",",
"order",
"=",
"'C'",
")"
] | Find unique rows in 2D array.
Parameters
----------
A : 2d numpy array
Array for which unique rows should be identified.
return_index : bool
Bool to decide whether I is returned.
return_inverse : bool
Bool to decide whether J is returned.
Returns
-------
B : 1d numpy array,
Unique rows
I: 1d numpy array, only returned if return_index is True
B = A[I,:]
J: 2d numpy array, only returned if return_inverse is True
A = B[J,:] | [
"Find",
"unique",
"rows",
"in",
"2D",
"array",
"."
] | python | train |
hubo1016/vlcp | vlcp/service/connection/tcpserver.py | https://github.com/hubo1016/vlcp/blob/239055229ec93a99cc7e15208075724ccf543bd1/vlcp/service/connection/tcpserver.py#L162-L172 | async def startlisten(self, vhost = None):
'''
Start listen on current servers
:param vhost: return only servers of vhost if specified. '' to return only default servers.
None for all servers.
'''
servers = self.getservers(vhost)
for s in servers:
await s.startlisten()
return len(servers) | [
"async",
"def",
"startlisten",
"(",
"self",
",",
"vhost",
"=",
"None",
")",
":",
"servers",
"=",
"self",
".",
"getservers",
"(",
"vhost",
")",
"for",
"s",
"in",
"servers",
":",
"await",
"s",
".",
"startlisten",
"(",
")",
"return",
"len",
"(",
"servers",
")"
] | Start listen on current servers
:param vhost: return only servers of vhost if specified. '' to return only default servers.
None for all servers. | [
"Start",
"listen",
"on",
"current",
"servers",
":",
"param",
"vhost",
":",
"return",
"only",
"servers",
"of",
"vhost",
"if",
"specified",
".",
"to",
"return",
"only",
"default",
"servers",
".",
"None",
"for",
"all",
"servers",
"."
] | python | train |
nugget/python-insteonplm | insteonplm/messages/standardReceive.py | https://github.com/nugget/python-insteonplm/blob/65548041f1b0729ae1ae904443dd81b0c6cbf1bf/insteonplm/messages/standardReceive.py#L43-L49 | def from_raw_message(cls, rawmessage):
"""Create message from a raw byte stream."""
return StandardReceive(rawmessage[2:5],
rawmessage[5:8],
{'cmd1': rawmessage[9],
'cmd2': rawmessage[10]},
flags=rawmessage[8]) | [
"def",
"from_raw_message",
"(",
"cls",
",",
"rawmessage",
")",
":",
"return",
"StandardReceive",
"(",
"rawmessage",
"[",
"2",
":",
"5",
"]",
",",
"rawmessage",
"[",
"5",
":",
"8",
"]",
",",
"{",
"'cmd1'",
":",
"rawmessage",
"[",
"9",
"]",
",",
"'cmd2'",
":",
"rawmessage",
"[",
"10",
"]",
"}",
",",
"flags",
"=",
"rawmessage",
"[",
"8",
"]",
")"
] | Create message from a raw byte stream. | [
"Create",
"message",
"from",
"a",
"raw",
"byte",
"stream",
"."
] | python | train |
bertrandvidal/parse_this | parse_this/core.py | https://github.com/bertrandvidal/parse_this/blob/aa2e3737f19642300ef1ca65cae21c90049718a2/parse_this/core.py#L264-L299 | def _get_parser_call_method(func):
"""Returns the method that is linked to the 'call' method of the parser
Args:
func: the decorated function
Raises:
ParseThisError if the decorated method is __init__, __init__ can
only be decorated in a class decorated by parse_class
"""
func_name = func.__name__
parser = func.parser
def inner_call(instance=None, args=None):
"""This is method attached to <parser>.call.
Args:
instance: the instance of the parser
args: arguments to be parsed
"""
_LOG.debug("Calling %s.parser.call", func_name)
# Defer this check in the method call so that __init__ can be
# decorated in class decorated with parse_class
if func_name == "__init__":
raise ParseThisError(("To use 'create_parser' on the"
"'__init__' you need to decorate the "
"class with '@parse_class'"))
namespace = parser.parse_args(_get_args_to_parse(args, sys.argv))
if instance is None:
# If instance is None we are probably decorating a function not a
# method and don't need the instance
args_name = _get_args_name_from_parser(parser)
return _call(func, args_name, namespace)
return _call_method_from_namespace(instance, func_name, namespace)
return inner_call | [
"def",
"_get_parser_call_method",
"(",
"func",
")",
":",
"func_name",
"=",
"func",
".",
"__name__",
"parser",
"=",
"func",
".",
"parser",
"def",
"inner_call",
"(",
"instance",
"=",
"None",
",",
"args",
"=",
"None",
")",
":",
"\"\"\"This is method attached to <parser>.call.\n\n Args:\n instance: the instance of the parser\n args: arguments to be parsed\n \"\"\"",
"_LOG",
".",
"debug",
"(",
"\"Calling %s.parser.call\"",
",",
"func_name",
")",
"# Defer this check in the method call so that __init__ can be",
"# decorated in class decorated with parse_class",
"if",
"func_name",
"==",
"\"__init__\"",
":",
"raise",
"ParseThisError",
"(",
"(",
"\"To use 'create_parser' on the\"",
"\"'__init__' you need to decorate the \"",
"\"class with '@parse_class'\"",
")",
")",
"namespace",
"=",
"parser",
".",
"parse_args",
"(",
"_get_args_to_parse",
"(",
"args",
",",
"sys",
".",
"argv",
")",
")",
"if",
"instance",
"is",
"None",
":",
"# If instance is None we are probably decorating a function not a",
"# method and don't need the instance",
"args_name",
"=",
"_get_args_name_from_parser",
"(",
"parser",
")",
"return",
"_call",
"(",
"func",
",",
"args_name",
",",
"namespace",
")",
"return",
"_call_method_from_namespace",
"(",
"instance",
",",
"func_name",
",",
"namespace",
")",
"return",
"inner_call"
] | Returns the method that is linked to the 'call' method of the parser
Args:
func: the decorated function
Raises:
ParseThisError if the decorated method is __init__, __init__ can
only be decorated in a class decorated by parse_class | [
"Returns",
"the",
"method",
"that",
"is",
"linked",
"to",
"the",
"call",
"method",
"of",
"the",
"parser"
] | python | train |
xtrementl/focus | focus/daemon.py | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/daemon.py#L219-L226 | def _reg_sighandlers(self):
""" Registers signal handlers to this class.
"""
# SIGCHLD, so we shutdown when any of the child processes exit
_handler = lambda signo, frame: self.shutdown()
signal.signal(signal.SIGCHLD, _handler)
signal.signal(signal.SIGTERM, _handler) | [
"def",
"_reg_sighandlers",
"(",
"self",
")",
":",
"# SIGCHLD, so we shutdown when any of the child processes exit",
"_handler",
"=",
"lambda",
"signo",
",",
"frame",
":",
"self",
".",
"shutdown",
"(",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGCHLD",
",",
"_handler",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTERM",
",",
"_handler",
")"
] | Registers signal handlers to this class. | [
"Registers",
"signal",
"handlers",
"to",
"this",
"class",
"."
] | python | train |
hyperledger/indy-plenum | stp_core/crypto/nacl_wrappers.py | https://github.com/hyperledger/indy-plenum/blob/dcd144e238af7f17a869ffc9412f13dc488b7020/stp_core/crypto/nacl_wrappers.py#L86-L108 | def verify(self, smessage, signature=None, encoder=encoding.RawEncoder):
"""
Verifies the signature of a signed message, returning the message
if it has not been tampered with else raising
:class:`~ValueError`.
:param smessage: [:class:`bytes`] Either the original messaged or a
signature and message concated together.
:param signature: [:class:`bytes`] If an unsigned message is given for
smessage then the detached signature must be provded.
:param encoder: A class that is able to decode the secret message and
signature.
:rtype: :class:`bytes`
"""
if signature is not None:
# If we were given the message and signature separately, combine
# them.
smessage = signature + smessage
# Decode the signed message
smessage = encoder.decode(smessage)
return libnacl.crypto_sign_open(smessage, self._key) | [
"def",
"verify",
"(",
"self",
",",
"smessage",
",",
"signature",
"=",
"None",
",",
"encoder",
"=",
"encoding",
".",
"RawEncoder",
")",
":",
"if",
"signature",
"is",
"not",
"None",
":",
"# If we were given the message and signature separately, combine",
"# them.",
"smessage",
"=",
"signature",
"+",
"smessage",
"# Decode the signed message",
"smessage",
"=",
"encoder",
".",
"decode",
"(",
"smessage",
")",
"return",
"libnacl",
".",
"crypto_sign_open",
"(",
"smessage",
",",
"self",
".",
"_key",
")"
] | Verifies the signature of a signed message, returning the message
if it has not been tampered with else raising
:class:`~ValueError`.
:param smessage: [:class:`bytes`] Either the original messaged or a
signature and message concated together.
:param signature: [:class:`bytes`] If an unsigned message is given for
smessage then the detached signature must be provded.
:param encoder: A class that is able to decode the secret message and
signature.
:rtype: :class:`bytes` | [
"Verifies",
"the",
"signature",
"of",
"a",
"signed",
"message",
"returning",
"the",
"message",
"if",
"it",
"has",
"not",
"been",
"tampered",
"with",
"else",
"raising",
":",
"class",
":",
"~ValueError",
"."
] | python | train |
KelSolaar/Umbra | umbra/ui/widgets/basic_QPlainTextEdit.py | https://github.com/KelSolaar/Umbra/blob/66f45f08d9d723787f1191989f8b0dda84b412ce/umbra/ui/widgets/basic_QPlainTextEdit.py#L898-L911 | def go_to_line(self, line):
"""
Moves the text cursor to given line.
:param line: Line to go to.
:type line: int
:return: Method success.
:rtype: bool
"""
cursor = self.textCursor()
cursor.setPosition(self.document().findBlockByNumber(line - 1).position())
self.setTextCursor(cursor)
return True | [
"def",
"go_to_line",
"(",
"self",
",",
"line",
")",
":",
"cursor",
"=",
"self",
".",
"textCursor",
"(",
")",
"cursor",
".",
"setPosition",
"(",
"self",
".",
"document",
"(",
")",
".",
"findBlockByNumber",
"(",
"line",
"-",
"1",
")",
".",
"position",
"(",
")",
")",
"self",
".",
"setTextCursor",
"(",
"cursor",
")",
"return",
"True"
] | Moves the text cursor to given line.
:param line: Line to go to.
:type line: int
:return: Method success.
:rtype: bool | [
"Moves",
"the",
"text",
"cursor",
"to",
"given",
"line",
"."
] | python | train |
secure-systems-lab/securesystemslib | securesystemslib/pyca_crypto_keys.py | https://github.com/secure-systems-lab/securesystemslib/blob/beb3109d5bb462e5a60eed88fb40ed1167bd354e/securesystemslib/pyca_crypto_keys.py#L649-L739 | def encrypt_key(key_object, password):
"""
<Purpose>
Return a string containing 'key_object' in encrypted form. Encrypted
strings may be safely saved to a file. The corresponding decrypt_key()
function can be applied to the encrypted string to restore the original key
object. 'key_object' is a TUF key (e.g., RSAKEY_SCHEMA,
ED25519KEY_SCHEMA). This function calls the pyca/cryptography library to
perform the encryption and derive a suitable encryption key.
Whereas an encrypted PEM file uses the Triple Data Encryption Algorithm
(3DES), the Cipher-block chaining (CBC) mode of operation, and the Password
Based Key Derivation Function 1 (PBKF1) + MD5 to strengthen 'password',
encrypted TUF keys use AES-256-CTR-Mode and passwords strengthened with
PBKDF2-HMAC-SHA256 (100K iterations by default, but may be overriden in
'settings.PBKDF2_ITERATIONS' by the user).
http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
http://en.wikipedia.org/wiki/CTR_mode#Counter_.28CTR.29
https://en.wikipedia.org/wiki/PBKDF2
>>> ed25519_key = {'keytype': 'ed25519', \
'scheme': 'ed25519', \
'keyid': \
'd62247f817883f593cf6c66a5a55292488d457bcf638ae03207dbbba9dbe457d', \
'keyval': {'public': \
'74addb5ad544a4306b34741bc1175a3613a8d7dc69ff64724243efdec0e301ad', \
'private': \
'1f26964cc8d4f7ee5f3c5da2fbb7ab35811169573ac367b860a537e47789f8c4'}}
>>> passphrase = 'secret'
>>> encrypted_key = encrypt_key(ed25519_key, passphrase)
>>> securesystemslib.formats.ENCRYPTEDKEY_SCHEMA.matches(encrypted_key.encode('utf-8'))
True
<Arguments>
key_object:
The TUF key object that should contain the private portion of the ED25519
key.
password:
The password, or passphrase, to encrypt the private part of the RSA
key. 'password' is not used directly as the encryption key, a stronger
encryption key is derived from it.
<Exceptions>
securesystemslib.exceptions.FormatError, if any of the arguments are
improperly formatted or 'key_object' does not contain the private portion
of the key.
securesystemslib.exceptions.CryptoError, if an Ed25519 key in encrypted TUF
format cannot be created.
<Side Effects>
pyca/Cryptography cryptographic operations called to perform the actual
encryption of 'key_object'. 'password' used to derive a suitable
encryption key.
<Returns>
An encrypted string in 'securesystemslib.formats.ENCRYPTEDKEY_SCHEMA' format.
"""
# Do the arguments have the correct format?
# Ensure the arguments have the appropriate number of objects and object
# types, and that all dict keys are properly named.
# Raise 'securesystemslib.exceptions.FormatError' if the check fails.
securesystemslib.formats.ANYKEY_SCHEMA.check_match(key_object)
# Does 'password' have the correct format?
securesystemslib.formats.PASSWORD_SCHEMA.check_match(password)
# Ensure the private portion of the key is included in 'key_object'.
if 'private' not in key_object['keyval'] or not key_object['keyval']['private']:
raise securesystemslib.exceptions.FormatError('Key object does not contain'
' a private part.')
# Derive a key (i.e., an appropriate encryption key and not the
# user's password) from the given 'password'. Strengthen 'password' with
# PBKDF2-HMAC-SHA256 (100K iterations by default, but may be overriden in
# 'settings.PBKDF2_ITERATIONS' by the user).
salt, iterations, derived_key = _generate_derived_key(password)
# Store the derived key info in a dictionary, the object expected
# by the non-public _encrypt() routine.
derived_key_information = {'salt': salt, 'iterations': iterations,
'derived_key': derived_key}
# Convert the key object to json string format and encrypt it with the
# derived key.
encrypted_key = _encrypt(json.dumps(key_object), derived_key_information)
return encrypted_key | [
"def",
"encrypt_key",
"(",
"key_object",
",",
"password",
")",
":",
"# Do the arguments have the correct format?",
"# Ensure the arguments have the appropriate number of objects and object",
"# types, and that all dict keys are properly named.",
"# Raise 'securesystemslib.exceptions.FormatError' if the check fails.",
"securesystemslib",
".",
"formats",
".",
"ANYKEY_SCHEMA",
".",
"check_match",
"(",
"key_object",
")",
"# Does 'password' have the correct format?",
"securesystemslib",
".",
"formats",
".",
"PASSWORD_SCHEMA",
".",
"check_match",
"(",
"password",
")",
"# Ensure the private portion of the key is included in 'key_object'.",
"if",
"'private'",
"not",
"in",
"key_object",
"[",
"'keyval'",
"]",
"or",
"not",
"key_object",
"[",
"'keyval'",
"]",
"[",
"'private'",
"]",
":",
"raise",
"securesystemslib",
".",
"exceptions",
".",
"FormatError",
"(",
"'Key object does not contain'",
"' a private part.'",
")",
"# Derive a key (i.e., an appropriate encryption key and not the",
"# user's password) from the given 'password'. Strengthen 'password' with",
"# PBKDF2-HMAC-SHA256 (100K iterations by default, but may be overriden in",
"# 'settings.PBKDF2_ITERATIONS' by the user).",
"salt",
",",
"iterations",
",",
"derived_key",
"=",
"_generate_derived_key",
"(",
"password",
")",
"# Store the derived key info in a dictionary, the object expected",
"# by the non-public _encrypt() routine.",
"derived_key_information",
"=",
"{",
"'salt'",
":",
"salt",
",",
"'iterations'",
":",
"iterations",
",",
"'derived_key'",
":",
"derived_key",
"}",
"# Convert the key object to json string format and encrypt it with the",
"# derived key.",
"encrypted_key",
"=",
"_encrypt",
"(",
"json",
".",
"dumps",
"(",
"key_object",
")",
",",
"derived_key_information",
")",
"return",
"encrypted_key"
] | <Purpose>
Return a string containing 'key_object' in encrypted form. Encrypted
strings may be safely saved to a file. The corresponding decrypt_key()
function can be applied to the encrypted string to restore the original key
object. 'key_object' is a TUF key (e.g., RSAKEY_SCHEMA,
ED25519KEY_SCHEMA). This function calls the pyca/cryptography library to
perform the encryption and derive a suitable encryption key.
Whereas an encrypted PEM file uses the Triple Data Encryption Algorithm
(3DES), the Cipher-block chaining (CBC) mode of operation, and the Password
Based Key Derivation Function 1 (PBKF1) + MD5 to strengthen 'password',
encrypted TUF keys use AES-256-CTR-Mode and passwords strengthened with
PBKDF2-HMAC-SHA256 (100K iterations by default, but may be overriden in
'settings.PBKDF2_ITERATIONS' by the user).
http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
http://en.wikipedia.org/wiki/CTR_mode#Counter_.28CTR.29
https://en.wikipedia.org/wiki/PBKDF2
>>> ed25519_key = {'keytype': 'ed25519', \
'scheme': 'ed25519', \
'keyid': \
'd62247f817883f593cf6c66a5a55292488d457bcf638ae03207dbbba9dbe457d', \
'keyval': {'public': \
'74addb5ad544a4306b34741bc1175a3613a8d7dc69ff64724243efdec0e301ad', \
'private': \
'1f26964cc8d4f7ee5f3c5da2fbb7ab35811169573ac367b860a537e47789f8c4'}}
>>> passphrase = 'secret'
>>> encrypted_key = encrypt_key(ed25519_key, passphrase)
>>> securesystemslib.formats.ENCRYPTEDKEY_SCHEMA.matches(encrypted_key.encode('utf-8'))
True
<Arguments>
key_object:
The TUF key object that should contain the private portion of the ED25519
key.
password:
The password, or passphrase, to encrypt the private part of the RSA
key. 'password' is not used directly as the encryption key, a stronger
encryption key is derived from it.
<Exceptions>
securesystemslib.exceptions.FormatError, if any of the arguments are
improperly formatted or 'key_object' does not contain the private portion
of the key.
securesystemslib.exceptions.CryptoError, if an Ed25519 key in encrypted TUF
format cannot be created.
<Side Effects>
pyca/Cryptography cryptographic operations called to perform the actual
encryption of 'key_object'. 'password' used to derive a suitable
encryption key.
<Returns>
An encrypted string in 'securesystemslib.formats.ENCRYPTEDKEY_SCHEMA' format. | [
"<Purpose",
">",
"Return",
"a",
"string",
"containing",
"key_object",
"in",
"encrypted",
"form",
".",
"Encrypted",
"strings",
"may",
"be",
"safely",
"saved",
"to",
"a",
"file",
".",
"The",
"corresponding",
"decrypt_key",
"()",
"function",
"can",
"be",
"applied",
"to",
"the",
"encrypted",
"string",
"to",
"restore",
"the",
"original",
"key",
"object",
".",
"key_object",
"is",
"a",
"TUF",
"key",
"(",
"e",
".",
"g",
".",
"RSAKEY_SCHEMA",
"ED25519KEY_SCHEMA",
")",
".",
"This",
"function",
"calls",
"the",
"pyca",
"/",
"cryptography",
"library",
"to",
"perform",
"the",
"encryption",
"and",
"derive",
"a",
"suitable",
"encryption",
"key",
"."
] | python | train |
cloud9ers/gurumate | environment/lib/python2.7/site-packages/IPython/core/pylabtools.py | https://github.com/cloud9ers/gurumate/blob/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e/environment/lib/python2.7/site-packages/IPython/core/pylabtools.py#L53-L77 | def getfigs(*fig_nums):
"""Get a list of matplotlib figures by figure numbers.
If no arguments are given, all available figures are returned. If the
argument list contains references to invalid figures, a warning is printed
but the function continues pasting further figures.
Parameters
----------
figs : tuple
A tuple of ints giving the figure numbers of the figures to return.
"""
from matplotlib._pylab_helpers import Gcf
if not fig_nums:
fig_managers = Gcf.get_all_fig_managers()
return [fm.canvas.figure for fm in fig_managers]
else:
figs = []
for num in fig_nums:
f = Gcf.figs.get(num)
if f is None:
print('Warning: figure %s not available.' % num)
else:
figs.append(f.canvas.figure)
return figs | [
"def",
"getfigs",
"(",
"*",
"fig_nums",
")",
":",
"from",
"matplotlib",
".",
"_pylab_helpers",
"import",
"Gcf",
"if",
"not",
"fig_nums",
":",
"fig_managers",
"=",
"Gcf",
".",
"get_all_fig_managers",
"(",
")",
"return",
"[",
"fm",
".",
"canvas",
".",
"figure",
"for",
"fm",
"in",
"fig_managers",
"]",
"else",
":",
"figs",
"=",
"[",
"]",
"for",
"num",
"in",
"fig_nums",
":",
"f",
"=",
"Gcf",
".",
"figs",
".",
"get",
"(",
"num",
")",
"if",
"f",
"is",
"None",
":",
"print",
"(",
"'Warning: figure %s not available.'",
"%",
"num",
")",
"else",
":",
"figs",
".",
"append",
"(",
"f",
".",
"canvas",
".",
"figure",
")",
"return",
"figs"
] | Get a list of matplotlib figures by figure numbers.
If no arguments are given, all available figures are returned. If the
argument list contains references to invalid figures, a warning is printed
but the function continues pasting further figures.
Parameters
----------
figs : tuple
A tuple of ints giving the figure numbers of the figures to return. | [
"Get",
"a",
"list",
"of",
"matplotlib",
"figures",
"by",
"figure",
"numbers",
"."
] | python | test |
krukas/Trionyx | trionyx/renderer.py | https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/renderer.py#L62-L65 | def render_value(self, value, **options):
"""Render value"""
renderer = self.renderers.get(type(value), lambda value, **options: value)
return renderer(value, **options) | [
"def",
"render_value",
"(",
"self",
",",
"value",
",",
"*",
"*",
"options",
")",
":",
"renderer",
"=",
"self",
".",
"renderers",
".",
"get",
"(",
"type",
"(",
"value",
")",
",",
"lambda",
"value",
",",
"*",
"*",
"options",
":",
"value",
")",
"return",
"renderer",
"(",
"value",
",",
"*",
"*",
"options",
")"
] | Render value | [
"Render",
"value"
] | python | train |
spyder-ide/spyder | spyder/utils/qthelpers.py | https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/utils/qthelpers.py#L190-L213 | def create_toolbutton(parent, text=None, shortcut=None, icon=None, tip=None,
toggled=None, triggered=None,
autoraise=True, text_beside_icon=False):
"""Create a QToolButton"""
button = QToolButton(parent)
if text is not None:
button.setText(text)
if icon is not None:
if is_text_string(icon):
icon = get_icon(icon)
button.setIcon(icon)
if text is not None or tip is not None:
button.setToolTip(text if tip is None else tip)
if text_beside_icon:
button.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
button.setAutoRaise(autoraise)
if triggered is not None:
button.clicked.connect(triggered)
if toggled is not None:
button.toggled.connect(toggled)
button.setCheckable(True)
if shortcut is not None:
button.setShortcut(shortcut)
return button | [
"def",
"create_toolbutton",
"(",
"parent",
",",
"text",
"=",
"None",
",",
"shortcut",
"=",
"None",
",",
"icon",
"=",
"None",
",",
"tip",
"=",
"None",
",",
"toggled",
"=",
"None",
",",
"triggered",
"=",
"None",
",",
"autoraise",
"=",
"True",
",",
"text_beside_icon",
"=",
"False",
")",
":",
"button",
"=",
"QToolButton",
"(",
"parent",
")",
"if",
"text",
"is",
"not",
"None",
":",
"button",
".",
"setText",
"(",
"text",
")",
"if",
"icon",
"is",
"not",
"None",
":",
"if",
"is_text_string",
"(",
"icon",
")",
":",
"icon",
"=",
"get_icon",
"(",
"icon",
")",
"button",
".",
"setIcon",
"(",
"icon",
")",
"if",
"text",
"is",
"not",
"None",
"or",
"tip",
"is",
"not",
"None",
":",
"button",
".",
"setToolTip",
"(",
"text",
"if",
"tip",
"is",
"None",
"else",
"tip",
")",
"if",
"text_beside_icon",
":",
"button",
".",
"setToolButtonStyle",
"(",
"Qt",
".",
"ToolButtonTextBesideIcon",
")",
"button",
".",
"setAutoRaise",
"(",
"autoraise",
")",
"if",
"triggered",
"is",
"not",
"None",
":",
"button",
".",
"clicked",
".",
"connect",
"(",
"triggered",
")",
"if",
"toggled",
"is",
"not",
"None",
":",
"button",
".",
"toggled",
".",
"connect",
"(",
"toggled",
")",
"button",
".",
"setCheckable",
"(",
"True",
")",
"if",
"shortcut",
"is",
"not",
"None",
":",
"button",
".",
"setShortcut",
"(",
"shortcut",
")",
"return",
"button"
] | Create a QToolButton | [
"Create",
"a",
"QToolButton"
] | python | train |
nerdvegas/rez | src/rez/package_filter.py | https://github.com/nerdvegas/rez/blob/1d3b846d53b5b5404edfe8ddb9083f9ceec8c5e7/src/rez/package_filter.py#L49-L64 | def iter_packages(self, name, range_=None, paths=None):
"""Same as iter_packages in packages.py, but also applies this filter.
Args:
name (str): Name of the package, eg 'maya'.
range_ (VersionRange or str): If provided, limits the versions returned
to those in `range_`.
paths (list of str, optional): paths to search for packages, defaults
to `config.packages_path`.
Returns:
`Package` iterator.
"""
for package in iter_packages(name, range_, paths):
if not self.excludes(package):
yield package | [
"def",
"iter_packages",
"(",
"self",
",",
"name",
",",
"range_",
"=",
"None",
",",
"paths",
"=",
"None",
")",
":",
"for",
"package",
"in",
"iter_packages",
"(",
"name",
",",
"range_",
",",
"paths",
")",
":",
"if",
"not",
"self",
".",
"excludes",
"(",
"package",
")",
":",
"yield",
"package"
] | Same as iter_packages in packages.py, but also applies this filter.
Args:
name (str): Name of the package, eg 'maya'.
range_ (VersionRange or str): If provided, limits the versions returned
to those in `range_`.
paths (list of str, optional): paths to search for packages, defaults
to `config.packages_path`.
Returns:
`Package` iterator. | [
"Same",
"as",
"iter_packages",
"in",
"packages",
".",
"py",
"but",
"also",
"applies",
"this",
"filter",
"."
] | python | train |
qubell/contrib-python-qubell-client | qubell/api/private/organization.py | https://github.com/qubell/contrib-python-qubell-client/blob/4586ea11d5103c2ff9607d3ed922b5a0991b8845/qubell/api/private/organization.py#L184-L188 | def get_application(self, id=None, name=None):
""" Get application object by name or id.
"""
log.info("Picking application: %s (%s)" % (name, id))
return self.applications[id or name] | [
"def",
"get_application",
"(",
"self",
",",
"id",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"log",
".",
"info",
"(",
"\"Picking application: %s (%s)\"",
"%",
"(",
"name",
",",
"id",
")",
")",
"return",
"self",
".",
"applications",
"[",
"id",
"or",
"name",
"]"
] | Get application object by name or id. | [
"Get",
"application",
"object",
"by",
"name",
"or",
"id",
"."
] | python | train |
spotify/luigi | luigi/contrib/batch.py | https://github.com/spotify/luigi/blob/c5eca1c3c3ee2a7eb612486192a0da146710a1e9/luigi/contrib/batch.py#L115-L130 | def get_job_status(self, job_id):
"""Retrieve task statuses from ECS API
:param job_id (str): AWS Batch job uuid
Returns one of {SUBMITTED|PENDING|RUNNABLE|STARTING|RUNNING|SUCCEEDED|FAILED}
"""
response = self._client.describe_jobs(jobs=[job_id])
# Error checking
status_code = response['ResponseMetadata']['HTTPStatusCode']
if status_code != 200:
msg = 'Job status request received status code {0}:\n{1}'
raise Exception(msg.format(status_code, response))
return response['jobs'][0]['status'] | [
"def",
"get_job_status",
"(",
"self",
",",
"job_id",
")",
":",
"response",
"=",
"self",
".",
"_client",
".",
"describe_jobs",
"(",
"jobs",
"=",
"[",
"job_id",
"]",
")",
"# Error checking",
"status_code",
"=",
"response",
"[",
"'ResponseMetadata'",
"]",
"[",
"'HTTPStatusCode'",
"]",
"if",
"status_code",
"!=",
"200",
":",
"msg",
"=",
"'Job status request received status code {0}:\\n{1}'",
"raise",
"Exception",
"(",
"msg",
".",
"format",
"(",
"status_code",
",",
"response",
")",
")",
"return",
"response",
"[",
"'jobs'",
"]",
"[",
"0",
"]",
"[",
"'status'",
"]"
] | Retrieve task statuses from ECS API
:param job_id (str): AWS Batch job uuid
Returns one of {SUBMITTED|PENDING|RUNNABLE|STARTING|RUNNING|SUCCEEDED|FAILED} | [
"Retrieve",
"task",
"statuses",
"from",
"ECS",
"API"
] | python | train |
saltstack/salt | salt/thorium/file.py | https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/thorium/file.py#L51-L86 | def save(name, filter=False):
'''
Save the register to <salt cachedir>/thorium/saves/<name>, or to an
absolute path.
If an absolute path is specified, then the directory will be created
non-recursively if it doesn't exist.
USAGE:
.. code-block:: yaml
foo:
file.save
/tmp/foo:
file.save
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if name.startswith('/'):
tgt_dir = os.path.dirname(name)
fn_ = name
else:
tgt_dir = os.path.join(__opts__['cachedir'], 'thorium', 'saves')
fn_ = os.path.join(tgt_dir, name)
if not os.path.isdir(tgt_dir):
os.makedirs(tgt_dir)
with salt.utils.files.fopen(fn_, 'w+') as fp_:
if filter is True:
salt.utils.json.dump(salt.utils.data.simple_types_filter(__reg__), fp_)
else:
salt.utils.json.dump(__reg__, fp_)
return ret | [
"def",
"save",
"(",
"name",
",",
"filter",
"=",
"False",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'comment'",
":",
"''",
",",
"'result'",
":",
"True",
"}",
"if",
"name",
".",
"startswith",
"(",
"'/'",
")",
":",
"tgt_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"name",
")",
"fn_",
"=",
"name",
"else",
":",
"tgt_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"__opts__",
"[",
"'cachedir'",
"]",
",",
"'thorium'",
",",
"'saves'",
")",
"fn_",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tgt_dir",
",",
"name",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"tgt_dir",
")",
":",
"os",
".",
"makedirs",
"(",
"tgt_dir",
")",
"with",
"salt",
".",
"utils",
".",
"files",
".",
"fopen",
"(",
"fn_",
",",
"'w+'",
")",
"as",
"fp_",
":",
"if",
"filter",
"is",
"True",
":",
"salt",
".",
"utils",
".",
"json",
".",
"dump",
"(",
"salt",
".",
"utils",
".",
"data",
".",
"simple_types_filter",
"(",
"__reg__",
")",
",",
"fp_",
")",
"else",
":",
"salt",
".",
"utils",
".",
"json",
".",
"dump",
"(",
"__reg__",
",",
"fp_",
")",
"return",
"ret"
] | Save the register to <salt cachedir>/thorium/saves/<name>, or to an
absolute path.
If an absolute path is specified, then the directory will be created
non-recursively if it doesn't exist.
USAGE:
.. code-block:: yaml
foo:
file.save
/tmp/foo:
file.save | [
"Save",
"the",
"register",
"to",
"<salt",
"cachedir",
">",
"/",
"thorium",
"/",
"saves",
"/",
"<name",
">",
"or",
"to",
"an",
"absolute",
"path",
"."
] | python | train |
bslatkin/dpxdt | dpxdt/server/work_queue_handlers.py | https://github.com/bslatkin/dpxdt/blob/9f860de1731021d99253670429e5f2157e1f6297/dpxdt/server/work_queue_handlers.py#L59-L78 | def handle_lease(queue_name):
"""Leases a task from a queue."""
owner = request.form.get('owner', request.remote_addr, type=str)
try:
task_list = work_queue.lease(
queue_name,
owner,
request.form.get('count', 1, type=int),
request.form.get('timeout', 60, type=int))
except work_queue.Error, e:
return utils.jsonify_error(e)
if not task_list:
return flask.jsonify(tasks=[])
db.session.commit()
task_ids = [t['task_id'] for t in task_list]
logging.debug('Task leased: queue=%r, task_ids=%r, owner=%r',
queue_name, task_ids, owner)
return flask.jsonify(tasks=task_list) | [
"def",
"handle_lease",
"(",
"queue_name",
")",
":",
"owner",
"=",
"request",
".",
"form",
".",
"get",
"(",
"'owner'",
",",
"request",
".",
"remote_addr",
",",
"type",
"=",
"str",
")",
"try",
":",
"task_list",
"=",
"work_queue",
".",
"lease",
"(",
"queue_name",
",",
"owner",
",",
"request",
".",
"form",
".",
"get",
"(",
"'count'",
",",
"1",
",",
"type",
"=",
"int",
")",
",",
"request",
".",
"form",
".",
"get",
"(",
"'timeout'",
",",
"60",
",",
"type",
"=",
"int",
")",
")",
"except",
"work_queue",
".",
"Error",
",",
"e",
":",
"return",
"utils",
".",
"jsonify_error",
"(",
"e",
")",
"if",
"not",
"task_list",
":",
"return",
"flask",
".",
"jsonify",
"(",
"tasks",
"=",
"[",
"]",
")",
"db",
".",
"session",
".",
"commit",
"(",
")",
"task_ids",
"=",
"[",
"t",
"[",
"'task_id'",
"]",
"for",
"t",
"in",
"task_list",
"]",
"logging",
".",
"debug",
"(",
"'Task leased: queue=%r, task_ids=%r, owner=%r'",
",",
"queue_name",
",",
"task_ids",
",",
"owner",
")",
"return",
"flask",
".",
"jsonify",
"(",
"tasks",
"=",
"task_list",
")"
] | Leases a task from a queue. | [
"Leases",
"a",
"task",
"from",
"a",
"queue",
"."
] | python | train |
saulpw/visidata | visidata/clipboard.py | https://github.com/saulpw/visidata/blob/32771e0cea6c24fc7902683d14558391395c591f/visidata/clipboard.py#L79-L90 | def copy(self, value):
'Copy a cell to the system clipboard.'
with tempfile.NamedTemporaryFile() as temp:
with open(temp.name, 'w', encoding=options.encoding) as fp:
fp.write(str(value))
p = subprocess.Popen(
self.command,
stdin=open(temp.name, 'r', encoding=options.encoding),
stdout=subprocess.DEVNULL)
p.communicate() | [
"def",
"copy",
"(",
"self",
",",
"value",
")",
":",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
")",
"as",
"temp",
":",
"with",
"open",
"(",
"temp",
".",
"name",
",",
"'w'",
",",
"encoding",
"=",
"options",
".",
"encoding",
")",
"as",
"fp",
":",
"fp",
".",
"write",
"(",
"str",
"(",
"value",
")",
")",
"p",
"=",
"subprocess",
".",
"Popen",
"(",
"self",
".",
"command",
",",
"stdin",
"=",
"open",
"(",
"temp",
".",
"name",
",",
"'r'",
",",
"encoding",
"=",
"options",
".",
"encoding",
")",
",",
"stdout",
"=",
"subprocess",
".",
"DEVNULL",
")",
"p",
".",
"communicate",
"(",
")"
] | Copy a cell to the system clipboard. | [
"Copy",
"a",
"cell",
"to",
"the",
"system",
"clipboard",
"."
] | python | train |
inveniosoftware/invenio-files-rest | invenio_files_rest/models.py | https://github.com/inveniosoftware/invenio-files-rest/blob/59a950da61cc8d5882a03c6fde6db2e2ed10befd/invenio_files_rest/models.py#L1006-L1024 | def set_location(self, uri, size, checksum, storage_class=None):
"""Set only URI location of for object.
Useful to link files on externally controlled storage. If a file
instance has already been set, this methods raises an
``FileInstanceAlreadySetError`` exception.
:param uri: Full URI to object (which can be interpreted by the storage
interface).
:param size: Size of file.
:param checksum: Checksum of file.
:param storage_class: Storage class where file is stored ()
"""
self.file = FileInstance()
self.file.set_uri(
uri, size, checksum, storage_class=storage_class
)
db.session.add(self.file)
return self | [
"def",
"set_location",
"(",
"self",
",",
"uri",
",",
"size",
",",
"checksum",
",",
"storage_class",
"=",
"None",
")",
":",
"self",
".",
"file",
"=",
"FileInstance",
"(",
")",
"self",
".",
"file",
".",
"set_uri",
"(",
"uri",
",",
"size",
",",
"checksum",
",",
"storage_class",
"=",
"storage_class",
")",
"db",
".",
"session",
".",
"add",
"(",
"self",
".",
"file",
")",
"return",
"self"
] | Set only URI location of for object.
Useful to link files on externally controlled storage. If a file
instance has already been set, this methods raises an
``FileInstanceAlreadySetError`` exception.
:param uri: Full URI to object (which can be interpreted by the storage
interface).
:param size: Size of file.
:param checksum: Checksum of file.
:param storage_class: Storage class where file is stored () | [
"Set",
"only",
"URI",
"location",
"of",
"for",
"object",
"."
] | python | train |
BerkeleyAutomation/perception | perception/image.py | https://github.com/BerkeleyAutomation/perception/blob/03d9b37dd6b66896cdfe173905c9413c8c3c5df6/perception/image.py#L517-L540 | def min_images(images):
"""Create a min Image from a list of Images.
Parameters
----------
:obj:`list` of :obj:`Image`
A list of Image objects.
Returns
-------
:obj:`Image`
A new Image of the same type whose data is the min of all of
the images' data.
"""
images_data = np.array([image.data for image in images])
images_data[images_data == 0] = np.inf
min_image_data = np.min(images_data, axis=0)
min_image_data[min_image_data == np.inf] = 0.0
an_image = images[0]
return type(an_image)(
min_image_data.astype(
an_image.data.dtype),
an_image.frame) | [
"def",
"min_images",
"(",
"images",
")",
":",
"images_data",
"=",
"np",
".",
"array",
"(",
"[",
"image",
".",
"data",
"for",
"image",
"in",
"images",
"]",
")",
"images_data",
"[",
"images_data",
"==",
"0",
"]",
"=",
"np",
".",
"inf",
"min_image_data",
"=",
"np",
".",
"min",
"(",
"images_data",
",",
"axis",
"=",
"0",
")",
"min_image_data",
"[",
"min_image_data",
"==",
"np",
".",
"inf",
"]",
"=",
"0.0",
"an_image",
"=",
"images",
"[",
"0",
"]",
"return",
"type",
"(",
"an_image",
")",
"(",
"min_image_data",
".",
"astype",
"(",
"an_image",
".",
"data",
".",
"dtype",
")",
",",
"an_image",
".",
"frame",
")"
] | Create a min Image from a list of Images.
Parameters
----------
:obj:`list` of :obj:`Image`
A list of Image objects.
Returns
-------
:obj:`Image`
A new Image of the same type whose data is the min of all of
the images' data. | [
"Create",
"a",
"min",
"Image",
"from",
"a",
"list",
"of",
"Images",
"."
] | python | train |
CalebBell/thermo | thermo/chemical.py | https://github.com/CalebBell/thermo/blob/3857ed023a3e64fd3039a32d53576c24990ef1c3/thermo/chemical.py#L1182-L1197 | def charge(self):
r'''Charge of a chemical, computed with RDKit from a chemical's SMILES.
If RDKit is not available, holds None.
Examples
--------
>>> Chemical('sodium ion').charge
1
'''
try:
if not self.rdkitmol:
return charge_from_formula(self.formula)
else:
return Chem.GetFormalCharge(self.rdkitmol)
except:
return charge_from_formula(self.formula) | [
"def",
"charge",
"(",
"self",
")",
":",
"try",
":",
"if",
"not",
"self",
".",
"rdkitmol",
":",
"return",
"charge_from_formula",
"(",
"self",
".",
"formula",
")",
"else",
":",
"return",
"Chem",
".",
"GetFormalCharge",
"(",
"self",
".",
"rdkitmol",
")",
"except",
":",
"return",
"charge_from_formula",
"(",
"self",
".",
"formula",
")"
] | r'''Charge of a chemical, computed with RDKit from a chemical's SMILES.
If RDKit is not available, holds None.
Examples
--------
>>> Chemical('sodium ion').charge
1 | [
"r",
"Charge",
"of",
"a",
"chemical",
"computed",
"with",
"RDKit",
"from",
"a",
"chemical",
"s",
"SMILES",
".",
"If",
"RDKit",
"is",
"not",
"available",
"holds",
"None",
"."
] | python | valid |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L169-L185 | def tree_to_xml(self):
"""Traverses treeview and generates a ElementTree object"""
# Need to remove filter or hidden items will not be saved.
self.filter_remove(remember=True)
tree = self.treeview
root = ET.Element('interface')
items = tree.get_children()
for item in items:
node = self.tree_node_to_xml('', item)
root.append(node)
# restore filter
self.filter_restore()
return ET.ElementTree(root) | [
"def",
"tree_to_xml",
"(",
"self",
")",
":",
"# Need to remove filter or hidden items will not be saved.",
"self",
".",
"filter_remove",
"(",
"remember",
"=",
"True",
")",
"tree",
"=",
"self",
".",
"treeview",
"root",
"=",
"ET",
".",
"Element",
"(",
"'interface'",
")",
"items",
"=",
"tree",
".",
"get_children",
"(",
")",
"for",
"item",
"in",
"items",
":",
"node",
"=",
"self",
".",
"tree_node_to_xml",
"(",
"''",
",",
"item",
")",
"root",
".",
"append",
"(",
"node",
")",
"# restore filter",
"self",
".",
"filter_restore",
"(",
")",
"return",
"ET",
".",
"ElementTree",
"(",
"root",
")"
] | Traverses treeview and generates a ElementTree object | [
"Traverses",
"treeview",
"and",
"generates",
"a",
"ElementTree",
"object"
] | python | train |
rocky/python-xdis | xdis/main.py | https://github.com/rocky/python-xdis/blob/46a2902ae8f5d8eee495eed67ac0690fd545453d/xdis/main.py#L157-L222 | def disco_loop_asm_format(opc, version, co, real_out,
fn_name_map, all_fns):
"""Produces disassembly in a format more conducive to
automatic assembly by producing inner modules before they are
used by outer ones. Since this is recusive, we'll
use more stack space at runtime.
"""
if version < 3.0:
co = code2compat(co)
else:
co = code3compat(co)
co_name = co.co_name
mapped_name = fn_name_map.get(co_name, co_name)
new_consts = []
for c in co.co_consts:
if iscode(c):
if version < 3.0:
c_compat = code2compat(c)
else:
c_compat = code3compat(c)
disco_loop_asm_format(opc, version, c_compat, real_out,
fn_name_map, all_fns)
m = re.match(".* object <(.+)> at", str(c))
if m:
basename = m.group(1)
if basename != 'module':
mapped_name = code_uniquify(basename, c.co_code)
c_compat.co_name = mapped_name
c_compat.freeze()
new_consts.append(c_compat)
else:
new_consts.append(c)
pass
co.co_consts = new_consts
m = re.match("^<(.+)>$", co.co_name)
if m or co_name in all_fns:
if co_name in all_fns:
basename = co_name
else:
basename = m.group(1)
if basename != 'module':
mapped_name = code_uniquify(basename, co.co_code)
co_name = mapped_name
assert mapped_name not in fn_name_map
fn_name_map[mapped_name] = basename
co.co_name = mapped_name
pass
elif co_name in fn_name_map:
# FIXME: better would be a hash of the co_code
mapped_name = code_uniquify(co_name, co.co_code)
fn_name_map[mapped_name] = co_name
co.co_name = mapped_name
pass
co = co.freeze()
all_fns.add(co_name)
if co.co_name != '<module>' or co.co_filename:
real_out.write("\n" + format_code_info(co, version, mapped_name) + "\n")
bytecode = Bytecode(co, opc, dup_lines=True)
real_out.write(bytecode.dis(asm_format=True) + "\n") | [
"def",
"disco_loop_asm_format",
"(",
"opc",
",",
"version",
",",
"co",
",",
"real_out",
",",
"fn_name_map",
",",
"all_fns",
")",
":",
"if",
"version",
"<",
"3.0",
":",
"co",
"=",
"code2compat",
"(",
"co",
")",
"else",
":",
"co",
"=",
"code3compat",
"(",
"co",
")",
"co_name",
"=",
"co",
".",
"co_name",
"mapped_name",
"=",
"fn_name_map",
".",
"get",
"(",
"co_name",
",",
"co_name",
")",
"new_consts",
"=",
"[",
"]",
"for",
"c",
"in",
"co",
".",
"co_consts",
":",
"if",
"iscode",
"(",
"c",
")",
":",
"if",
"version",
"<",
"3.0",
":",
"c_compat",
"=",
"code2compat",
"(",
"c",
")",
"else",
":",
"c_compat",
"=",
"code3compat",
"(",
"c",
")",
"disco_loop_asm_format",
"(",
"opc",
",",
"version",
",",
"c_compat",
",",
"real_out",
",",
"fn_name_map",
",",
"all_fns",
")",
"m",
"=",
"re",
".",
"match",
"(",
"\".* object <(.+)> at\"",
",",
"str",
"(",
"c",
")",
")",
"if",
"m",
":",
"basename",
"=",
"m",
".",
"group",
"(",
"1",
")",
"if",
"basename",
"!=",
"'module'",
":",
"mapped_name",
"=",
"code_uniquify",
"(",
"basename",
",",
"c",
".",
"co_code",
")",
"c_compat",
".",
"co_name",
"=",
"mapped_name",
"c_compat",
".",
"freeze",
"(",
")",
"new_consts",
".",
"append",
"(",
"c_compat",
")",
"else",
":",
"new_consts",
".",
"append",
"(",
"c",
")",
"pass",
"co",
".",
"co_consts",
"=",
"new_consts",
"m",
"=",
"re",
".",
"match",
"(",
"\"^<(.+)>$\"",
",",
"co",
".",
"co_name",
")",
"if",
"m",
"or",
"co_name",
"in",
"all_fns",
":",
"if",
"co_name",
"in",
"all_fns",
":",
"basename",
"=",
"co_name",
"else",
":",
"basename",
"=",
"m",
".",
"group",
"(",
"1",
")",
"if",
"basename",
"!=",
"'module'",
":",
"mapped_name",
"=",
"code_uniquify",
"(",
"basename",
",",
"co",
".",
"co_code",
")",
"co_name",
"=",
"mapped_name",
"assert",
"mapped_name",
"not",
"in",
"fn_name_map",
"fn_name_map",
"[",
"mapped_name",
"]",
"=",
"basename",
"co",
".",
"co_name",
"=",
"mapped_name",
"pass",
"elif",
"co_name",
"in",
"fn_name_map",
":",
"# FIXME: better would be a hash of the co_code",
"mapped_name",
"=",
"code_uniquify",
"(",
"co_name",
",",
"co",
".",
"co_code",
")",
"fn_name_map",
"[",
"mapped_name",
"]",
"=",
"co_name",
"co",
".",
"co_name",
"=",
"mapped_name",
"pass",
"co",
"=",
"co",
".",
"freeze",
"(",
")",
"all_fns",
".",
"add",
"(",
"co_name",
")",
"if",
"co",
".",
"co_name",
"!=",
"'<module>'",
"or",
"co",
".",
"co_filename",
":",
"real_out",
".",
"write",
"(",
"\"\\n\"",
"+",
"format_code_info",
"(",
"co",
",",
"version",
",",
"mapped_name",
")",
"+",
"\"\\n\"",
")",
"bytecode",
"=",
"Bytecode",
"(",
"co",
",",
"opc",
",",
"dup_lines",
"=",
"True",
")",
"real_out",
".",
"write",
"(",
"bytecode",
".",
"dis",
"(",
"asm_format",
"=",
"True",
")",
"+",
"\"\\n\"",
")"
] | Produces disassembly in a format more conducive to
automatic assembly by producing inner modules before they are
used by outer ones. Since this is recusive, we'll
use more stack space at runtime. | [
"Produces",
"disassembly",
"in",
"a",
"format",
"more",
"conducive",
"to",
"automatic",
"assembly",
"by",
"producing",
"inner",
"modules",
"before",
"they",
"are",
"used",
"by",
"outer",
"ones",
".",
"Since",
"this",
"is",
"recusive",
"we",
"ll",
"use",
"more",
"stack",
"space",
"at",
"runtime",
"."
] | python | train |
sassoftware/saspy | saspy/sasiostdio.py | https://github.com/sassoftware/saspy/blob/e433f71990f249d3a6c3db323ceb11cb2d462cf9/saspy/sasiostdio.py#L537-L737 | def submit(self, code: str, results: str ="html", prompt: dict = None) -> dict:
'''
This method is used to submit any SAS code. It returns the Log and Listing as a python dictionary.
code - the SAS statements you want to execute
results - format of results, HTML is default, TEXT is the alternative
prompt - dict of names:flags to prompt for; create macro variables (used in submitted code), then keep or delete
The keys are the names of the macro variables and the boolean flag is to either hide what you type and delete
the macros, or show what you type and keep the macros (they will still be available later)
for example (what you type for pw will not be displayed, user and dsname will):
results = sas.submit(
"""
libname tera teradata server=teracop1 user=&user pw=&pw;
proc print data=tera.&dsname (obs=10); run;
""" ,
prompt = {'user': False, 'pw': True, 'dsname': False}
)
Returns - a Dict containing two keys:values, [LOG, LST]. LOG is text and LST is 'results' (HTML or TEXT)
NOTE: to view HTML results in the ipykernel, issue: from IPython.display import HTML and use HTML() instead of print()
i.e,: results = sas.submit("data a; x=1; run; proc print;run')
print(results['LOG'])
HTML(results['LST'])
'''
prompt = prompt if prompt is not None else {}
odsopen = b"ods listing close;ods "+self.sascfg.output.encode()+ \
b" (id=saspy_internal) file=stdout options(bitmap_mode='inline') device=svg style="+self._sb.HTML_Style.encode()+ \
b"; ods graphics on / outputfmt=png;\n"
odsclose = b"ods "+self.sascfg.output.encode()+b" (id=saspy_internal) close;ods listing;\n"
ods = True;
mj = b";*\';*\";*/;"
lstf = b''
logf = b''
bail = False
eof = 5
bc = False
done = False
logn = self._logcnt()
logcodei = "%put E3969440A681A24088859985" + logn + ";"
logcodeo = b"\nE3969440A681A24088859985" + logn.encode()
pcodei = ''
pcodeiv = ''
pcodeo = ''
if self.pid == None:
self._sb.SASpid = None
print("No SAS process attached. SAS process has terminated unexpectedly.")
return dict(LOG="No SAS process attached. SAS process has terminated unexpectedly.", LST='')
rc = os.waitid(os.P_PID, self.pid, os.WEXITED | os.WNOHANG)
if rc != None:
self.pid = None
self._sb.SASpid = None
return dict(LOG='SAS process has terminated unexpectedly. Pid State= '+str(rc), LST='')
# to cover the possibility of an _asubmit w/ lst output not read; no known cases now; used to be __flushlst__()
# removing this and adding comment in _asubmit to use _getlst[txt] so this will never be necessary; delete later
#while(len(self.stdout.read1(4096)) > 0):
# continue
if results.upper() != "HTML":
ods = False
if len(prompt):
pcodei += 'options nosource nonotes;\n'
pcodeo += 'options nosource nonotes;\n'
for key in prompt:
gotit = False
while not gotit:
var = self.sascfg._prompt('Please enter value for macro variable '+key+' ', pw=prompt[key])
if var is None:
raise KeyboardInterrupt
if len(var) > 0:
gotit = True
else:
print("Sorry, didn't get a value for that variable.")
if prompt[key]:
pcodei += '%let '+key+'='+var+';\n'
pcodeo += '%symdel '+key+';\n'
else:
pcodeiv += '%let '+key+'='+var+';\n'
pcodei += 'options source notes;\n'
pcodeo += 'options source notes;\n'
if ods:
self.stdin.write(odsopen)
pgm = mj+b'\n'+pcodei.encode(self.sascfg.encoding)+pcodeiv.encode(self.sascfg.encoding)
pgm += code.encode(self.sascfg.encoding)+b'\n'+pcodeo.encode(self.sascfg.encoding)+b'\n'+mj
out = self.stdin.write(pgm)
if ods:
self.stdin.write(odsclose)
out = self.stdin.write(b'\n'+logcodei.encode(self.sascfg.encoding)+b'\n')
self.stdin.flush()
while not done:
try:
while True:
rc = os.waitid(os.P_PID, self.pid, os.WEXITED | os.WNOHANG)
if rc is not None:
log = b''
try:
log = self.stderr.read1(4096)
if len(log) > 0:
logf += log
self._log += logf.decode(self.sascfg.encoding, errors='replace')
except:
pass
self.pid = None
self._sb.SASpid = None
return dict(LOG='SAS process has terminated unexpectedly. Pid State= ' +
str(rc)+'\n'+logf.decode(self.sascfg.encoding, errors='replace'), LST='')
if bail:
eof -= 1
if eof < 0:
break
if ods:
lst = self.stdout.read1(4096)
else:
lst = self.stdout.read1(4096)
if len(lst) > 0:
lstf += lst
else:
log = self.stderr.read1(4096)
if len(log) > 0:
logf += log
if logf.count(logcodeo) >= 1:
bail = True
if not bail and bc:
self.stdin.write(odsclose+logcodei.encode(self.sascfg.encoding) + b'\n')
self.stdin.flush()
bc = False
done = True
except (ConnectionResetError):
log = ''
try:
log = self.stderr.read1(4096)
if len(log) > 0:
logf += log
self._log += logf.decode(self.sascfg.encoding, errors='replace')
except:
pass
rc = 0
rc = os.waitpid(self.pid, 0)
self.pid = None
self._sb.SASpid = None
log = logf.partition(logcodeo)[0]+b'\nConnection Reset: SAS process has terminated unexpectedly. Pid State= '+str(rc).encode()+b'\n'+logf
return dict(LOG=log.encode(), LST='')
except (KeyboardInterrupt, SystemExit):
print('Exception caught!')
ll = self._breakprompt(logcodeo)
if ll.get('ABORT', False):
return ll
logf += ll['LOG']
lstf += ll['LST']
bc = ll['BC']
if not bc:
print('Exception handled :)\n')
else:
print('Exception ignored, continuing to process...\n')
self.stdin.write(odsclose+logcodei.encode(self.sascfg.encoding)+b'\n')
self.stdin.flush()
if ods:
try:
lstf = lstf.decode()
except UnicodeDecodeError:
try:
lstf = lstf.decode(self.sascfg.encoding)
except UnicodeDecodeError:
lstf = lstf.decode(errors='replace')
else:
lstf = lstf.decode(self.sascfg.encoding, errors='replace')
logf = logf.decode(self.sascfg.encoding, errors='replace')
trip = lstf.rpartition("/*]]>*/")
if len(trip[1]) > 0 and len(trip[2]) < 100:
lstf = ''
self._log += logf
final = logf.partition(logcodei)
z = final[0].rpartition(chr(10))
prev = '%08d' % (self._log_cnt - 1)
zz = z[0].rpartition("\nE3969440A681A24088859985" + prev +'\n')
logd = zz[2].replace(mj.decode(self.sascfg.encoding), '')
lstd = lstf.replace(chr(12), chr(10)).replace('<body class="c body">',
'<body class="l body">').replace("font-size: x-small;",
"font-size: normal;")
return dict(LOG=logd, LST=lstd) | [
"def",
"submit",
"(",
"self",
",",
"code",
":",
"str",
",",
"results",
":",
"str",
"=",
"\"html\"",
",",
"prompt",
":",
"dict",
"=",
"None",
")",
"->",
"dict",
":",
"prompt",
"=",
"prompt",
"if",
"prompt",
"is",
"not",
"None",
"else",
"{",
"}",
"odsopen",
"=",
"b\"ods listing close;ods \"",
"+",
"self",
".",
"sascfg",
".",
"output",
".",
"encode",
"(",
")",
"+",
"b\" (id=saspy_internal) file=stdout options(bitmap_mode='inline') device=svg style=\"",
"+",
"self",
".",
"_sb",
".",
"HTML_Style",
".",
"encode",
"(",
")",
"+",
"b\"; ods graphics on / outputfmt=png;\\n\"",
"odsclose",
"=",
"b\"ods \"",
"+",
"self",
".",
"sascfg",
".",
"output",
".",
"encode",
"(",
")",
"+",
"b\" (id=saspy_internal) close;ods listing;\\n\"",
"ods",
"=",
"True",
"mj",
"=",
"b\";*\\';*\\\";*/;\"",
"lstf",
"=",
"b''",
"logf",
"=",
"b''",
"bail",
"=",
"False",
"eof",
"=",
"5",
"bc",
"=",
"False",
"done",
"=",
"False",
"logn",
"=",
"self",
".",
"_logcnt",
"(",
")",
"logcodei",
"=",
"\"%put E3969440A681A24088859985\"",
"+",
"logn",
"+",
"\";\"",
"logcodeo",
"=",
"b\"\\nE3969440A681A24088859985\"",
"+",
"logn",
".",
"encode",
"(",
")",
"pcodei",
"=",
"''",
"pcodeiv",
"=",
"''",
"pcodeo",
"=",
"''",
"if",
"self",
".",
"pid",
"==",
"None",
":",
"self",
".",
"_sb",
".",
"SASpid",
"=",
"None",
"print",
"(",
"\"No SAS process attached. SAS process has terminated unexpectedly.\"",
")",
"return",
"dict",
"(",
"LOG",
"=",
"\"No SAS process attached. SAS process has terminated unexpectedly.\"",
",",
"LST",
"=",
"''",
")",
"rc",
"=",
"os",
".",
"waitid",
"(",
"os",
".",
"P_PID",
",",
"self",
".",
"pid",
",",
"os",
".",
"WEXITED",
"|",
"os",
".",
"WNOHANG",
")",
"if",
"rc",
"!=",
"None",
":",
"self",
".",
"pid",
"=",
"None",
"self",
".",
"_sb",
".",
"SASpid",
"=",
"None",
"return",
"dict",
"(",
"LOG",
"=",
"'SAS process has terminated unexpectedly. Pid State= '",
"+",
"str",
"(",
"rc",
")",
",",
"LST",
"=",
"''",
")",
"# to cover the possibility of an _asubmit w/ lst output not read; no known cases now; used to be __flushlst__()",
"# removing this and adding comment in _asubmit to use _getlst[txt] so this will never be necessary; delete later",
"#while(len(self.stdout.read1(4096)) > 0):",
"# continue",
"if",
"results",
".",
"upper",
"(",
")",
"!=",
"\"HTML\"",
":",
"ods",
"=",
"False",
"if",
"len",
"(",
"prompt",
")",
":",
"pcodei",
"+=",
"'options nosource nonotes;\\n'",
"pcodeo",
"+=",
"'options nosource nonotes;\\n'",
"for",
"key",
"in",
"prompt",
":",
"gotit",
"=",
"False",
"while",
"not",
"gotit",
":",
"var",
"=",
"self",
".",
"sascfg",
".",
"_prompt",
"(",
"'Please enter value for macro variable '",
"+",
"key",
"+",
"' '",
",",
"pw",
"=",
"prompt",
"[",
"key",
"]",
")",
"if",
"var",
"is",
"None",
":",
"raise",
"KeyboardInterrupt",
"if",
"len",
"(",
"var",
")",
">",
"0",
":",
"gotit",
"=",
"True",
"else",
":",
"print",
"(",
"\"Sorry, didn't get a value for that variable.\"",
")",
"if",
"prompt",
"[",
"key",
"]",
":",
"pcodei",
"+=",
"'%let '",
"+",
"key",
"+",
"'='",
"+",
"var",
"+",
"';\\n'",
"pcodeo",
"+=",
"'%symdel '",
"+",
"key",
"+",
"';\\n'",
"else",
":",
"pcodeiv",
"+=",
"'%let '",
"+",
"key",
"+",
"'='",
"+",
"var",
"+",
"';\\n'",
"pcodei",
"+=",
"'options source notes;\\n'",
"pcodeo",
"+=",
"'options source notes;\\n'",
"if",
"ods",
":",
"self",
".",
"stdin",
".",
"write",
"(",
"odsopen",
")",
"pgm",
"=",
"mj",
"+",
"b'\\n'",
"+",
"pcodei",
".",
"encode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
")",
"+",
"pcodeiv",
".",
"encode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
")",
"pgm",
"+=",
"code",
".",
"encode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
")",
"+",
"b'\\n'",
"+",
"pcodeo",
".",
"encode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
")",
"+",
"b'\\n'",
"+",
"mj",
"out",
"=",
"self",
".",
"stdin",
".",
"write",
"(",
"pgm",
")",
"if",
"ods",
":",
"self",
".",
"stdin",
".",
"write",
"(",
"odsclose",
")",
"out",
"=",
"self",
".",
"stdin",
".",
"write",
"(",
"b'\\n'",
"+",
"logcodei",
".",
"encode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
")",
"+",
"b'\\n'",
")",
"self",
".",
"stdin",
".",
"flush",
"(",
")",
"while",
"not",
"done",
":",
"try",
":",
"while",
"True",
":",
"rc",
"=",
"os",
".",
"waitid",
"(",
"os",
".",
"P_PID",
",",
"self",
".",
"pid",
",",
"os",
".",
"WEXITED",
"|",
"os",
".",
"WNOHANG",
")",
"if",
"rc",
"is",
"not",
"None",
":",
"log",
"=",
"b''",
"try",
":",
"log",
"=",
"self",
".",
"stderr",
".",
"read1",
"(",
"4096",
")",
"if",
"len",
"(",
"log",
")",
">",
"0",
":",
"logf",
"+=",
"log",
"self",
".",
"_log",
"+=",
"logf",
".",
"decode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
",",
"errors",
"=",
"'replace'",
")",
"except",
":",
"pass",
"self",
".",
"pid",
"=",
"None",
"self",
".",
"_sb",
".",
"SASpid",
"=",
"None",
"return",
"dict",
"(",
"LOG",
"=",
"'SAS process has terminated unexpectedly. Pid State= '",
"+",
"str",
"(",
"rc",
")",
"+",
"'\\n'",
"+",
"logf",
".",
"decode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
",",
"errors",
"=",
"'replace'",
")",
",",
"LST",
"=",
"''",
")",
"if",
"bail",
":",
"eof",
"-=",
"1",
"if",
"eof",
"<",
"0",
":",
"break",
"if",
"ods",
":",
"lst",
"=",
"self",
".",
"stdout",
".",
"read1",
"(",
"4096",
")",
"else",
":",
"lst",
"=",
"self",
".",
"stdout",
".",
"read1",
"(",
"4096",
")",
"if",
"len",
"(",
"lst",
")",
">",
"0",
":",
"lstf",
"+=",
"lst",
"else",
":",
"log",
"=",
"self",
".",
"stderr",
".",
"read1",
"(",
"4096",
")",
"if",
"len",
"(",
"log",
")",
">",
"0",
":",
"logf",
"+=",
"log",
"if",
"logf",
".",
"count",
"(",
"logcodeo",
")",
">=",
"1",
":",
"bail",
"=",
"True",
"if",
"not",
"bail",
"and",
"bc",
":",
"self",
".",
"stdin",
".",
"write",
"(",
"odsclose",
"+",
"logcodei",
".",
"encode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
")",
"+",
"b'\\n'",
")",
"self",
".",
"stdin",
".",
"flush",
"(",
")",
"bc",
"=",
"False",
"done",
"=",
"True",
"except",
"(",
"ConnectionResetError",
")",
":",
"log",
"=",
"''",
"try",
":",
"log",
"=",
"self",
".",
"stderr",
".",
"read1",
"(",
"4096",
")",
"if",
"len",
"(",
"log",
")",
">",
"0",
":",
"logf",
"+=",
"log",
"self",
".",
"_log",
"+=",
"logf",
".",
"decode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
",",
"errors",
"=",
"'replace'",
")",
"except",
":",
"pass",
"rc",
"=",
"0",
"rc",
"=",
"os",
".",
"waitpid",
"(",
"self",
".",
"pid",
",",
"0",
")",
"self",
".",
"pid",
"=",
"None",
"self",
".",
"_sb",
".",
"SASpid",
"=",
"None",
"log",
"=",
"logf",
".",
"partition",
"(",
"logcodeo",
")",
"[",
"0",
"]",
"+",
"b'\\nConnection Reset: SAS process has terminated unexpectedly. Pid State= '",
"+",
"str",
"(",
"rc",
")",
".",
"encode",
"(",
")",
"+",
"b'\\n'",
"+",
"logf",
"return",
"dict",
"(",
"LOG",
"=",
"log",
".",
"encode",
"(",
")",
",",
"LST",
"=",
"''",
")",
"except",
"(",
"KeyboardInterrupt",
",",
"SystemExit",
")",
":",
"print",
"(",
"'Exception caught!'",
")",
"ll",
"=",
"self",
".",
"_breakprompt",
"(",
"logcodeo",
")",
"if",
"ll",
".",
"get",
"(",
"'ABORT'",
",",
"False",
")",
":",
"return",
"ll",
"logf",
"+=",
"ll",
"[",
"'LOG'",
"]",
"lstf",
"+=",
"ll",
"[",
"'LST'",
"]",
"bc",
"=",
"ll",
"[",
"'BC'",
"]",
"if",
"not",
"bc",
":",
"print",
"(",
"'Exception handled :)\\n'",
")",
"else",
":",
"print",
"(",
"'Exception ignored, continuing to process...\\n'",
")",
"self",
".",
"stdin",
".",
"write",
"(",
"odsclose",
"+",
"logcodei",
".",
"encode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
")",
"+",
"b'\\n'",
")",
"self",
".",
"stdin",
".",
"flush",
"(",
")",
"if",
"ods",
":",
"try",
":",
"lstf",
"=",
"lstf",
".",
"decode",
"(",
")",
"except",
"UnicodeDecodeError",
":",
"try",
":",
"lstf",
"=",
"lstf",
".",
"decode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
")",
"except",
"UnicodeDecodeError",
":",
"lstf",
"=",
"lstf",
".",
"decode",
"(",
"errors",
"=",
"'replace'",
")",
"else",
":",
"lstf",
"=",
"lstf",
".",
"decode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
",",
"errors",
"=",
"'replace'",
")",
"logf",
"=",
"logf",
".",
"decode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
",",
"errors",
"=",
"'replace'",
")",
"trip",
"=",
"lstf",
".",
"rpartition",
"(",
"\"/*]]>*/\"",
")",
"if",
"len",
"(",
"trip",
"[",
"1",
"]",
")",
">",
"0",
"and",
"len",
"(",
"trip",
"[",
"2",
"]",
")",
"<",
"100",
":",
"lstf",
"=",
"''",
"self",
".",
"_log",
"+=",
"logf",
"final",
"=",
"logf",
".",
"partition",
"(",
"logcodei",
")",
"z",
"=",
"final",
"[",
"0",
"]",
".",
"rpartition",
"(",
"chr",
"(",
"10",
")",
")",
"prev",
"=",
"'%08d'",
"%",
"(",
"self",
".",
"_log_cnt",
"-",
"1",
")",
"zz",
"=",
"z",
"[",
"0",
"]",
".",
"rpartition",
"(",
"\"\\nE3969440A681A24088859985\"",
"+",
"prev",
"+",
"'\\n'",
")",
"logd",
"=",
"zz",
"[",
"2",
"]",
".",
"replace",
"(",
"mj",
".",
"decode",
"(",
"self",
".",
"sascfg",
".",
"encoding",
")",
",",
"''",
")",
"lstd",
"=",
"lstf",
".",
"replace",
"(",
"chr",
"(",
"12",
")",
",",
"chr",
"(",
"10",
")",
")",
".",
"replace",
"(",
"'<body class=\"c body\">'",
",",
"'<body class=\"l body\">'",
")",
".",
"replace",
"(",
"\"font-size: x-small;\"",
",",
"\"font-size: normal;\"",
")",
"return",
"dict",
"(",
"LOG",
"=",
"logd",
",",
"LST",
"=",
"lstd",
")"
] | This method is used to submit any SAS code. It returns the Log and Listing as a python dictionary.
code - the SAS statements you want to execute
results - format of results, HTML is default, TEXT is the alternative
prompt - dict of names:flags to prompt for; create macro variables (used in submitted code), then keep or delete
The keys are the names of the macro variables and the boolean flag is to either hide what you type and delete
the macros, or show what you type and keep the macros (they will still be available later)
for example (what you type for pw will not be displayed, user and dsname will):
results = sas.submit(
"""
libname tera teradata server=teracop1 user=&user pw=&pw;
proc print data=tera.&dsname (obs=10); run;
""" ,
prompt = {'user': False, 'pw': True, 'dsname': False}
)
Returns - a Dict containing two keys:values, [LOG, LST]. LOG is text and LST is 'results' (HTML or TEXT)
NOTE: to view HTML results in the ipykernel, issue: from IPython.display import HTML and use HTML() instead of print()
i.e,: results = sas.submit("data a; x=1; run; proc print;run')
print(results['LOG'])
HTML(results['LST']) | [
"This",
"method",
"is",
"used",
"to",
"submit",
"any",
"SAS",
"code",
".",
"It",
"returns",
"the",
"Log",
"and",
"Listing",
"as",
"a",
"python",
"dictionary",
".",
"code",
"-",
"the",
"SAS",
"statements",
"you",
"want",
"to",
"execute",
"results",
"-",
"format",
"of",
"results",
"HTML",
"is",
"default",
"TEXT",
"is",
"the",
"alternative",
"prompt",
"-",
"dict",
"of",
"names",
":",
"flags",
"to",
"prompt",
"for",
";",
"create",
"macro",
"variables",
"(",
"used",
"in",
"submitted",
"code",
")",
"then",
"keep",
"or",
"delete",
"The",
"keys",
"are",
"the",
"names",
"of",
"the",
"macro",
"variables",
"and",
"the",
"boolean",
"flag",
"is",
"to",
"either",
"hide",
"what",
"you",
"type",
"and",
"delete",
"the",
"macros",
"or",
"show",
"what",
"you",
"type",
"and",
"keep",
"the",
"macros",
"(",
"they",
"will",
"still",
"be",
"available",
"later",
")",
"for",
"example",
"(",
"what",
"you",
"type",
"for",
"pw",
"will",
"not",
"be",
"displayed",
"user",
"and",
"dsname",
"will",
")",
":"
] | python | train |
angr/claripy | claripy/vsa/strided_interval.py | https://github.com/angr/claripy/blob/4ed61924880af1ea8fb778047d896ec0156412a6/claripy/vsa/strided_interval.py#L594-L618 | def _unsigned_bounds(self):
"""
Get lower bound and upper bound for `self` in unsigned arithmetic.
:return: a list of (lower_bound, upper_bound) tuples.
"""
ssplit = self._ssplit()
if len(ssplit) == 1:
lb = ssplit[0].lower_bound
ub = ssplit[0].upper_bound
return [ (lb, ub) ]
elif len(ssplit) == 2:
# ssplit[0] is on the left hemisphere, and ssplit[1] is on the right hemisphere
lb_1 = ssplit[0].lower_bound
ub_1 = ssplit[0].upper_bound
lb_2 = ssplit[1].lower_bound
ub_2 = ssplit[1].upper_bound
return [ (lb_1, ub_1), (lb_2, ub_2) ]
else:
raise Exception('WTF') | [
"def",
"_unsigned_bounds",
"(",
"self",
")",
":",
"ssplit",
"=",
"self",
".",
"_ssplit",
"(",
")",
"if",
"len",
"(",
"ssplit",
")",
"==",
"1",
":",
"lb",
"=",
"ssplit",
"[",
"0",
"]",
".",
"lower_bound",
"ub",
"=",
"ssplit",
"[",
"0",
"]",
".",
"upper_bound",
"return",
"[",
"(",
"lb",
",",
"ub",
")",
"]",
"elif",
"len",
"(",
"ssplit",
")",
"==",
"2",
":",
"# ssplit[0] is on the left hemisphere, and ssplit[1] is on the right hemisphere",
"lb_1",
"=",
"ssplit",
"[",
"0",
"]",
".",
"lower_bound",
"ub_1",
"=",
"ssplit",
"[",
"0",
"]",
".",
"upper_bound",
"lb_2",
"=",
"ssplit",
"[",
"1",
"]",
".",
"lower_bound",
"ub_2",
"=",
"ssplit",
"[",
"1",
"]",
".",
"upper_bound",
"return",
"[",
"(",
"lb_1",
",",
"ub_1",
")",
",",
"(",
"lb_2",
",",
"ub_2",
")",
"]",
"else",
":",
"raise",
"Exception",
"(",
"'WTF'",
")"
] | Get lower bound and upper bound for `self` in unsigned arithmetic.
:return: a list of (lower_bound, upper_bound) tuples. | [
"Get",
"lower",
"bound",
"and",
"upper",
"bound",
"for",
"self",
"in",
"unsigned",
"arithmetic",
"."
] | python | train |
dpkp/kafka-python | kafka/cluster.py | https://github.com/dpkp/kafka-python/blob/f6a8a38937688ea2cc5dc13d3d1039493be5c9b5/kafka/cluster.py#L213-L222 | def failed_update(self, exception):
"""Update cluster state given a failed MetadataRequest."""
f = None
with self._lock:
if self._future:
f = self._future
self._future = None
if f:
f.failure(exception)
self._last_refresh_ms = time.time() * 1000 | [
"def",
"failed_update",
"(",
"self",
",",
"exception",
")",
":",
"f",
"=",
"None",
"with",
"self",
".",
"_lock",
":",
"if",
"self",
".",
"_future",
":",
"f",
"=",
"self",
".",
"_future",
"self",
".",
"_future",
"=",
"None",
"if",
"f",
":",
"f",
".",
"failure",
"(",
"exception",
")",
"self",
".",
"_last_refresh_ms",
"=",
"time",
".",
"time",
"(",
")",
"*",
"1000"
] | Update cluster state given a failed MetadataRequest. | [
"Update",
"cluster",
"state",
"given",
"a",
"failed",
"MetadataRequest",
"."
] | python | train |
AN3223/fpbox | fpbox/funcs.py | https://github.com/AN3223/fpbox/blob/d3b88fa6d68b7673c58edf46c89a552a9aedd162/fpbox/funcs.py#L81-L85 | def lazy_reverse_binmap(f, xs):
"""
Same as lazy_binmap, except the parameters are flipped for the binary function
"""
return (f(y, x) for x, y in zip(xs, xs[1:])) | [
"def",
"lazy_reverse_binmap",
"(",
"f",
",",
"xs",
")",
":",
"return",
"(",
"f",
"(",
"y",
",",
"x",
")",
"for",
"x",
",",
"y",
"in",
"zip",
"(",
"xs",
",",
"xs",
"[",
"1",
":",
"]",
")",
")"
] | Same as lazy_binmap, except the parameters are flipped for the binary function | [
"Same",
"as",
"lazy_binmap",
"except",
"the",
"parameters",
"are",
"flipped",
"for",
"the",
"binary",
"function"
] | python | train |
google/grr | grr/server/grr_response_server/flow.py | https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/flow.py#L189-L210 | def GetOutputPluginStates(output_plugins, source=None, token=None):
"""Initializes state for a list of output plugins."""
output_plugins_states = []
for plugin_descriptor in output_plugins:
plugin_class = plugin_descriptor.GetPluginClass()
try:
_, plugin_state = plugin_class.CreatePluginAndDefaultState(
source_urn=source, args=plugin_descriptor.plugin_args, token=token)
except Exception as e: # pylint: disable=broad-except
raise ValueError("Plugin %s failed to initialize (%s)" %
(plugin_class, e))
# TODO(amoser): Those do not need to be inside the state, they
# could be part of the plugin descriptor.
plugin_state["logs"] = []
plugin_state["errors"] = []
output_plugins_states.append(
rdf_flow_runner.OutputPluginState(
plugin_state=plugin_state, plugin_descriptor=plugin_descriptor))
return output_plugins_states | [
"def",
"GetOutputPluginStates",
"(",
"output_plugins",
",",
"source",
"=",
"None",
",",
"token",
"=",
"None",
")",
":",
"output_plugins_states",
"=",
"[",
"]",
"for",
"plugin_descriptor",
"in",
"output_plugins",
":",
"plugin_class",
"=",
"plugin_descriptor",
".",
"GetPluginClass",
"(",
")",
"try",
":",
"_",
",",
"plugin_state",
"=",
"plugin_class",
".",
"CreatePluginAndDefaultState",
"(",
"source_urn",
"=",
"source",
",",
"args",
"=",
"plugin_descriptor",
".",
"plugin_args",
",",
"token",
"=",
"token",
")",
"except",
"Exception",
"as",
"e",
":",
"# pylint: disable=broad-except",
"raise",
"ValueError",
"(",
"\"Plugin %s failed to initialize (%s)\"",
"%",
"(",
"plugin_class",
",",
"e",
")",
")",
"# TODO(amoser): Those do not need to be inside the state, they",
"# could be part of the plugin descriptor.",
"plugin_state",
"[",
"\"logs\"",
"]",
"=",
"[",
"]",
"plugin_state",
"[",
"\"errors\"",
"]",
"=",
"[",
"]",
"output_plugins_states",
".",
"append",
"(",
"rdf_flow_runner",
".",
"OutputPluginState",
"(",
"plugin_state",
"=",
"plugin_state",
",",
"plugin_descriptor",
"=",
"plugin_descriptor",
")",
")",
"return",
"output_plugins_states"
] | Initializes state for a list of output plugins. | [
"Initializes",
"state",
"for",
"a",
"list",
"of",
"output",
"plugins",
"."
] | python | train |
Mic92/python-mpd2 | mpd/base.py | https://github.com/Mic92/python-mpd2/blob/fc2782009915d9b642ceef6e4d3b52fa6168998b/mpd/base.py#L133-L159 | def mpd_command_provider(cls):
"""Decorator hooking up registered MPD commands to concrete client
implementation.
A class using this decorator must inherit from ``MPDClientBase`` and
implement it's ``add_command`` function.
"""
def collect(cls, callbacks=dict()):
"""Collect MPD command callbacks from given class.
Searches class __dict__ on given class and all it's bases for functions
which have been decorated with @mpd_commands and returns a dict
containing callback name as keys and
(callback, callback implementing class) tuples as values.
"""
for name, ob in cls.__dict__.items():
if hasattr(ob, "mpd_commands") and name not in callbacks:
callbacks[name] = (ob, cls)
for base in cls.__bases__:
callbacks = collect(base, callbacks)
return callbacks
for name, value in collect(cls).items():
callback, from_ = value
for command in callback.mpd_commands:
cls.add_command(command, callback)
return cls | [
"def",
"mpd_command_provider",
"(",
"cls",
")",
":",
"def",
"collect",
"(",
"cls",
",",
"callbacks",
"=",
"dict",
"(",
")",
")",
":",
"\"\"\"Collect MPD command callbacks from given class.\n\n Searches class __dict__ on given class and all it's bases for functions\n which have been decorated with @mpd_commands and returns a dict\n containing callback name as keys and\n (callback, callback implementing class) tuples as values.\n \"\"\"",
"for",
"name",
",",
"ob",
"in",
"cls",
".",
"__dict__",
".",
"items",
"(",
")",
":",
"if",
"hasattr",
"(",
"ob",
",",
"\"mpd_commands\"",
")",
"and",
"name",
"not",
"in",
"callbacks",
":",
"callbacks",
"[",
"name",
"]",
"=",
"(",
"ob",
",",
"cls",
")",
"for",
"base",
"in",
"cls",
".",
"__bases__",
":",
"callbacks",
"=",
"collect",
"(",
"base",
",",
"callbacks",
")",
"return",
"callbacks",
"for",
"name",
",",
"value",
"in",
"collect",
"(",
"cls",
")",
".",
"items",
"(",
")",
":",
"callback",
",",
"from_",
"=",
"value",
"for",
"command",
"in",
"callback",
".",
"mpd_commands",
":",
"cls",
".",
"add_command",
"(",
"command",
",",
"callback",
")",
"return",
"cls"
] | Decorator hooking up registered MPD commands to concrete client
implementation.
A class using this decorator must inherit from ``MPDClientBase`` and
implement it's ``add_command`` function. | [
"Decorator",
"hooking",
"up",
"registered",
"MPD",
"commands",
"to",
"concrete",
"client",
"implementation",
"."
] | python | train |
globus/globus-cli | globus_cli/commands/endpoint/update.py | https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/update.py#L17-L41 | def endpoint_update(**kwargs):
"""
Executor for `globus endpoint update`
"""
# validate params. Requires a get call to check the endpoint type
client = get_client()
endpoint_id = kwargs.pop("endpoint_id")
get_res = client.get_endpoint(endpoint_id)
if get_res["host_endpoint_id"]:
endpoint_type = "shared"
elif get_res["is_globus_connect"]:
endpoint_type = "personal"
elif get_res["s3_url"]:
endpoint_type = "s3"
else:
endpoint_type = "server"
validate_endpoint_create_and_update_params(
endpoint_type, get_res["subscription_id"], kwargs
)
# make the update
ep_doc = assemble_generic_doc("endpoint", **kwargs)
res = client.update_endpoint(endpoint_id, ep_doc)
formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message") | [
"def",
"endpoint_update",
"(",
"*",
"*",
"kwargs",
")",
":",
"# validate params. Requires a get call to check the endpoint type",
"client",
"=",
"get_client",
"(",
")",
"endpoint_id",
"=",
"kwargs",
".",
"pop",
"(",
"\"endpoint_id\"",
")",
"get_res",
"=",
"client",
".",
"get_endpoint",
"(",
"endpoint_id",
")",
"if",
"get_res",
"[",
"\"host_endpoint_id\"",
"]",
":",
"endpoint_type",
"=",
"\"shared\"",
"elif",
"get_res",
"[",
"\"is_globus_connect\"",
"]",
":",
"endpoint_type",
"=",
"\"personal\"",
"elif",
"get_res",
"[",
"\"s3_url\"",
"]",
":",
"endpoint_type",
"=",
"\"s3\"",
"else",
":",
"endpoint_type",
"=",
"\"server\"",
"validate_endpoint_create_and_update_params",
"(",
"endpoint_type",
",",
"get_res",
"[",
"\"subscription_id\"",
"]",
",",
"kwargs",
")",
"# make the update",
"ep_doc",
"=",
"assemble_generic_doc",
"(",
"\"endpoint\"",
",",
"*",
"*",
"kwargs",
")",
"res",
"=",
"client",
".",
"update_endpoint",
"(",
"endpoint_id",
",",
"ep_doc",
")",
"formatted_print",
"(",
"res",
",",
"text_format",
"=",
"FORMAT_TEXT_RAW",
",",
"response_key",
"=",
"\"message\"",
")"
] | Executor for `globus endpoint update` | [
"Executor",
"for",
"globus",
"endpoint",
"update"
] | python | train |