id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
251,100 | openpermissions/perch | perch/user.py | Token.valid | def valid(cls, token, **kwargs):
"""
Check if a token exists and has not expired
:param token: the token
:return: bool
"""
try:
token = yield cls.get(token)
except couch.NotFound:
raise Return(False)
raise Return(token.ttl >= datetime.utcnow()) | python | def valid(cls, token, **kwargs):
"""
Check if a token exists and has not expired
:param token: the token
:return: bool
"""
try:
token = yield cls.get(token)
except couch.NotFound:
raise Return(False)
raise Return(token.ttl >= datetime.utcnow()) | [
"def",
"valid",
"(",
"cls",
",",
"token",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"token",
"=",
"yield",
"cls",
".",
"get",
"(",
"token",
")",
"except",
"couch",
".",
"NotFound",
":",
"raise",
"Return",
"(",
"False",
")",
"raise",
"Return",
"(",
"token",
".",
"ttl",
">=",
"datetime",
".",
"utcnow",
"(",
")",
")"
] | Check if a token exists and has not expired
:param token: the token
:return: bool | [
"Check",
"if",
"a",
"token",
"exists",
"and",
"has",
"not",
"expired"
] | 36d78994133918f3c52c187f19e50132960a0156 | https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/user.py#L320-L332 |
251,101 | stevepeak/inquiry | inquiry/garden.py | Garden.plant | def plant(self, *seeds, **arguments):
"""Applys seeds and arguments
to the garden for use during the harvest
"""
map(self._clean, seeds)
self.network_kwargs.update(arguments) | python | def plant(self, *seeds, **arguments):
"""Applys seeds and arguments
to the garden for use during the harvest
"""
map(self._clean, seeds)
self.network_kwargs.update(arguments) | [
"def",
"plant",
"(",
"self",
",",
"*",
"seeds",
",",
"*",
"*",
"arguments",
")",
":",
"map",
"(",
"self",
".",
"_clean",
",",
"seeds",
")",
"self",
".",
"network_kwargs",
".",
"update",
"(",
"arguments",
")"
] | Applys seeds and arguments
to the garden for use during the harvest | [
"Applys",
"seeds",
"and",
"arguments",
"to",
"the",
"garden",
"for",
"use",
"during",
"the",
"harvest"
] | f6ea435c302560ba19985b5d4ce2c97e2f321508 | https://github.com/stevepeak/inquiry/blob/f6ea435c302560ba19985b5d4ce2c97e2f321508/inquiry/garden.py#L85-L90 |
251,102 | stevepeak/inquiry | inquiry/garden.py | Garden._clean | def _clean(self, seed):
"""Takes a seed and applies it to the garden
"""
seed = deepcopy(seed)
# inherit any other figures
self._inherit(*array(get(seed, 'inherit', [])))
# merge the seed arguments
if '&arguments' in seed:
self.arguments = merge(self.arguments, seed.pop('&arguments'))
elif 'arguments' in seed:
self.arguments = seed.pop('arguments')
# append the seed
self.seeds.append(seed) | python | def _clean(self, seed):
"""Takes a seed and applies it to the garden
"""
seed = deepcopy(seed)
# inherit any other figures
self._inherit(*array(get(seed, 'inherit', [])))
# merge the seed arguments
if '&arguments' in seed:
self.arguments = merge(self.arguments, seed.pop('&arguments'))
elif 'arguments' in seed:
self.arguments = seed.pop('arguments')
# append the seed
self.seeds.append(seed) | [
"def",
"_clean",
"(",
"self",
",",
"seed",
")",
":",
"seed",
"=",
"deepcopy",
"(",
"seed",
")",
"# inherit any other figures",
"self",
".",
"_inherit",
"(",
"*",
"array",
"(",
"get",
"(",
"seed",
",",
"'inherit'",
",",
"[",
"]",
")",
")",
")",
"# merge the seed arguments",
"if",
"'&arguments'",
"in",
"seed",
":",
"self",
".",
"arguments",
"=",
"merge",
"(",
"self",
".",
"arguments",
",",
"seed",
".",
"pop",
"(",
"'&arguments'",
")",
")",
"elif",
"'arguments'",
"in",
"seed",
":",
"self",
".",
"arguments",
"=",
"seed",
".",
"pop",
"(",
"'arguments'",
")",
"# append the seed",
"self",
".",
"seeds",
".",
"append",
"(",
"seed",
")"
] | Takes a seed and applies it to the garden | [
"Takes",
"a",
"seed",
"and",
"applies",
"it",
"to",
"the",
"garden"
] | f6ea435c302560ba19985b5d4ce2c97e2f321508 | https://github.com/stevepeak/inquiry/blob/f6ea435c302560ba19985b5d4ce2c97e2f321508/inquiry/garden.py#L524-L537 |
251,103 | udoprog/mimeprovider | mimeprovider/__init__.py | MimeProvider._generate_base_mimetypes | def _generate_base_mimetypes(self):
"""
Generate the base mimetypes as described by non customized document
types.
"""
for t in self.type_instances:
if t.custom_mime:
continue
yield t.mime, (t, None, None) | python | def _generate_base_mimetypes(self):
"""
Generate the base mimetypes as described by non customized document
types.
"""
for t in self.type_instances:
if t.custom_mime:
continue
yield t.mime, (t, None, None) | [
"def",
"_generate_base_mimetypes",
"(",
"self",
")",
":",
"for",
"t",
"in",
"self",
".",
"type_instances",
":",
"if",
"t",
".",
"custom_mime",
":",
"continue",
"yield",
"t",
".",
"mime",
",",
"(",
"t",
",",
"None",
",",
"None",
")"
] | Generate the base mimetypes as described by non customized document
types. | [
"Generate",
"the",
"base",
"mimetypes",
"as",
"described",
"by",
"non",
"customized",
"document",
"types",
"."
] | 5acd61eb0ef813b4a2eb6bbe75d07af1e11847a4 | https://github.com/udoprog/mimeprovider/blob/5acd61eb0ef813b4a2eb6bbe75d07af1e11847a4/mimeprovider/__init__.py#L80-L89 |
251,104 | ulf1/oxyba | oxyba/crossvalidation_stats.py | crossvalidation_stats | def crossvalidation_stats(errors1, errors2):
"""Paired difference test
of the CV errors of two models
Parameters:
-----------
errors1 : ndarray
The CV errors model 1
errors2 : ndarray
The CV errors model 2
Returns:
--------
pvalue : float
Two-sided P-value if the differences between err1 and err2
are significant
tscore : float
t-statistics
se : float
Standard Error of the CV-Error-Difference
mu : float
The average difference between err1 and err2
"""
# load modules
import numpy as np
import scipy.stats
import warnings
# Number of blocks
K = errors1.shape[0]
# display warnings
if K < 30:
warnings.warn((
"The number of blocks is K<30 what is insufficient "
"for conducting a t-Test to compare both models! "
"K=40 is suggested."))
# difference between errors
delta = errors1 - errors2
# the average difference
mu = np.mean(delta)
# Standard Error of the CV-Error-Difference
# se = np.sqrt(np.sum((delta-np.mean(delta))**2)/K)
se = np.std(delta)
# t-statistics
tscore = mu / se
# Two-sided P-value
pvalue = scipy.stats.t.sf(np.abs(tscore), K - 1) * 2
# done
return pvalue, tscore, se, mu | python | def crossvalidation_stats(errors1, errors2):
"""Paired difference test
of the CV errors of two models
Parameters:
-----------
errors1 : ndarray
The CV errors model 1
errors2 : ndarray
The CV errors model 2
Returns:
--------
pvalue : float
Two-sided P-value if the differences between err1 and err2
are significant
tscore : float
t-statistics
se : float
Standard Error of the CV-Error-Difference
mu : float
The average difference between err1 and err2
"""
# load modules
import numpy as np
import scipy.stats
import warnings
# Number of blocks
K = errors1.shape[0]
# display warnings
if K < 30:
warnings.warn((
"The number of blocks is K<30 what is insufficient "
"for conducting a t-Test to compare both models! "
"K=40 is suggested."))
# difference between errors
delta = errors1 - errors2
# the average difference
mu = np.mean(delta)
# Standard Error of the CV-Error-Difference
# se = np.sqrt(np.sum((delta-np.mean(delta))**2)/K)
se = np.std(delta)
# t-statistics
tscore = mu / se
# Two-sided P-value
pvalue = scipy.stats.t.sf(np.abs(tscore), K - 1) * 2
# done
return pvalue, tscore, se, mu | [
"def",
"crossvalidation_stats",
"(",
"errors1",
",",
"errors2",
")",
":",
"# load modules",
"import",
"numpy",
"as",
"np",
"import",
"scipy",
".",
"stats",
"import",
"warnings",
"# Number of blocks",
"K",
"=",
"errors1",
".",
"shape",
"[",
"0",
"]",
"# display warnings",
"if",
"K",
"<",
"30",
":",
"warnings",
".",
"warn",
"(",
"(",
"\"The number of blocks is K<30 what is insufficient \"",
"\"for conducting a t-Test to compare both models! \"",
"\"K=40 is suggested.\"",
")",
")",
"# difference between errors",
"delta",
"=",
"errors1",
"-",
"errors2",
"# the average difference",
"mu",
"=",
"np",
".",
"mean",
"(",
"delta",
")",
"# Standard Error of the CV-Error-Difference",
"# se = np.sqrt(np.sum((delta-np.mean(delta))**2)/K)",
"se",
"=",
"np",
".",
"std",
"(",
"delta",
")",
"# t-statistics",
"tscore",
"=",
"mu",
"/",
"se",
"# Two-sided P-value",
"pvalue",
"=",
"scipy",
".",
"stats",
".",
"t",
".",
"sf",
"(",
"np",
".",
"abs",
"(",
"tscore",
")",
",",
"K",
"-",
"1",
")",
"*",
"2",
"# done",
"return",
"pvalue",
",",
"tscore",
",",
"se",
",",
"mu"
] | Paired difference test
of the CV errors of two models
Parameters:
-----------
errors1 : ndarray
The CV errors model 1
errors2 : ndarray
The CV errors model 2
Returns:
--------
pvalue : float
Two-sided P-value if the differences between err1 and err2
are significant
tscore : float
t-statistics
se : float
Standard Error of the CV-Error-Difference
mu : float
The average difference between err1 and err2 | [
"Paired",
"difference",
"test",
"of",
"the",
"CV",
"errors",
"of",
"two",
"models"
] | b3043116050de275124365cb11e7df91fb40169d | https://github.com/ulf1/oxyba/blob/b3043116050de275124365cb11e7df91fb40169d/oxyba/crossvalidation_stats.py#L2-L62 |
251,105 | jmgilman/Neolib | neolib/pyamf/remoting/gateway/wsgi.py | WSGIGateway.badRequestMethod | def badRequestMethod(self, environ, start_response):
"""
Return HTTP 400 Bad Request.
"""
response = "400 Bad Request\n\nTo access this PyAMF gateway you " \
"must use POST requests (%s received)" % environ['REQUEST_METHOD']
start_response('400 Bad Request', [
('Content-Type', 'text/plain'),
('Content-Length', str(len(response))),
('Server', gateway.SERVER_NAME),
])
return [response] | python | def badRequestMethod(self, environ, start_response):
"""
Return HTTP 400 Bad Request.
"""
response = "400 Bad Request\n\nTo access this PyAMF gateway you " \
"must use POST requests (%s received)" % environ['REQUEST_METHOD']
start_response('400 Bad Request', [
('Content-Type', 'text/plain'),
('Content-Length', str(len(response))),
('Server', gateway.SERVER_NAME),
])
return [response] | [
"def",
"badRequestMethod",
"(",
"self",
",",
"environ",
",",
"start_response",
")",
":",
"response",
"=",
"\"400 Bad Request\\n\\nTo access this PyAMF gateway you \"",
"\"must use POST requests (%s received)\"",
"%",
"environ",
"[",
"'REQUEST_METHOD'",
"]",
"start_response",
"(",
"'400 Bad Request'",
",",
"[",
"(",
"'Content-Type'",
",",
"'text/plain'",
")",
",",
"(",
"'Content-Length'",
",",
"str",
"(",
"len",
"(",
"response",
")",
")",
")",
",",
"(",
"'Server'",
",",
"gateway",
".",
"SERVER_NAME",
")",
",",
"]",
")",
"return",
"[",
"response",
"]"
] | Return HTTP 400 Bad Request. | [
"Return",
"HTTP",
"400",
"Bad",
"Request",
"."
] | 228fafeaed0f3195676137732384a14820ae285c | https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/remoting/gateway/wsgi.py#L55-L68 |
251,106 | dcolish/Refugee | refugee/manager.py | MigrationManager.collect | def collect(self):
"""
Walks self.migration_home and load all potential migration modules
"""
for root, dirname, files in walk(self.migration_home):
for file_name in file_filter(files, "*.py"):
file_name = file_name.replace('.py', '')
file = None
try:
if file_name == '__init__':
continue
file, pathname, description = find_module(
file_name, [root])
load_module(file_name, file, pathname, description)
finally:
if file is not None:
file.close() | python | def collect(self):
"""
Walks self.migration_home and load all potential migration modules
"""
for root, dirname, files in walk(self.migration_home):
for file_name in file_filter(files, "*.py"):
file_name = file_name.replace('.py', '')
file = None
try:
if file_name == '__init__':
continue
file, pathname, description = find_module(
file_name, [root])
load_module(file_name, file, pathname, description)
finally:
if file is not None:
file.close() | [
"def",
"collect",
"(",
"self",
")",
":",
"for",
"root",
",",
"dirname",
",",
"files",
"in",
"walk",
"(",
"self",
".",
"migration_home",
")",
":",
"for",
"file_name",
"in",
"file_filter",
"(",
"files",
",",
"\"*.py\"",
")",
":",
"file_name",
"=",
"file_name",
".",
"replace",
"(",
"'.py'",
",",
"''",
")",
"file",
"=",
"None",
"try",
":",
"if",
"file_name",
"==",
"'__init__'",
":",
"continue",
"file",
",",
"pathname",
",",
"description",
"=",
"find_module",
"(",
"file_name",
",",
"[",
"root",
"]",
")",
"load_module",
"(",
"file_name",
",",
"file",
",",
"pathname",
",",
"description",
")",
"finally",
":",
"if",
"file",
"is",
"not",
"None",
":",
"file",
".",
"close",
"(",
")"
] | Walks self.migration_home and load all potential migration modules | [
"Walks",
"self",
".",
"migration_home",
"and",
"load",
"all",
"potential",
"migration",
"modules"
] | b98391cb3127d09b15b59c7c25dab07a968062fa | https://github.com/dcolish/Refugee/blob/b98391cb3127d09b15b59c7c25dab07a968062fa/refugee/manager.py#L55-L71 |
251,107 | dcolish/Refugee | refugee/manager.py | MigrationManager.run_all | def run_all(self, direction):
"""
Runs all registered migrations
:param direction: Can be on of two values, UP or DOWN
"""
for key in sorted(migration_registry.keys):
self.run(key, direction) | python | def run_all(self, direction):
"""
Runs all registered migrations
:param direction: Can be on of two values, UP or DOWN
"""
for key in sorted(migration_registry.keys):
self.run(key, direction) | [
"def",
"run_all",
"(",
"self",
",",
"direction",
")",
":",
"for",
"key",
"in",
"sorted",
"(",
"migration_registry",
".",
"keys",
")",
":",
"self",
".",
"run",
"(",
"key",
",",
"direction",
")"
] | Runs all registered migrations
:param direction: Can be on of two values, UP or DOWN | [
"Runs",
"all",
"registered",
"migrations"
] | b98391cb3127d09b15b59c7c25dab07a968062fa | https://github.com/dcolish/Refugee/blob/b98391cb3127d09b15b59c7c25dab07a968062fa/refugee/manager.py#L120-L127 |
251,108 | dcolish/Refugee | refugee/manager.py | MigrationManager.run | def run(self, migration_name, direction):
"""
Asserts an engine is configured and runs the registered migration in the
given direction
:param migration_name: key to a registered class in the
`migration_registry`
:param direction: Can be on of two values, UP or DOWN
"""
if not self.engine:
raise AttributeError("No engine configured for MigrationManager")
connection = self.engine.connect()
trans = connection.begin()
try:
migration = migration_registry[migration_name]()
if migration.preflight():
trans = connection.begin()
if direction == Direction.UP:
migration.up(connection)
elif direction == Direction.DOWN:
migration.down(connection)
else:
raise UnknowDirectionError
if migration.check():
trans.commit()
else:
raise MigrationError("Migration failed consistency checks")
except Exception, e:
trans.rollback()
#XXX:dc: do more to introspect why we failed
raise e | python | def run(self, migration_name, direction):
"""
Asserts an engine is configured and runs the registered migration in the
given direction
:param migration_name: key to a registered class in the
`migration_registry`
:param direction: Can be on of two values, UP or DOWN
"""
if not self.engine:
raise AttributeError("No engine configured for MigrationManager")
connection = self.engine.connect()
trans = connection.begin()
try:
migration = migration_registry[migration_name]()
if migration.preflight():
trans = connection.begin()
if direction == Direction.UP:
migration.up(connection)
elif direction == Direction.DOWN:
migration.down(connection)
else:
raise UnknowDirectionError
if migration.check():
trans.commit()
else:
raise MigrationError("Migration failed consistency checks")
except Exception, e:
trans.rollback()
#XXX:dc: do more to introspect why we failed
raise e | [
"def",
"run",
"(",
"self",
",",
"migration_name",
",",
"direction",
")",
":",
"if",
"not",
"self",
".",
"engine",
":",
"raise",
"AttributeError",
"(",
"\"No engine configured for MigrationManager\"",
")",
"connection",
"=",
"self",
".",
"engine",
".",
"connect",
"(",
")",
"trans",
"=",
"connection",
".",
"begin",
"(",
")",
"try",
":",
"migration",
"=",
"migration_registry",
"[",
"migration_name",
"]",
"(",
")",
"if",
"migration",
".",
"preflight",
"(",
")",
":",
"trans",
"=",
"connection",
".",
"begin",
"(",
")",
"if",
"direction",
"==",
"Direction",
".",
"UP",
":",
"migration",
".",
"up",
"(",
"connection",
")",
"elif",
"direction",
"==",
"Direction",
".",
"DOWN",
":",
"migration",
".",
"down",
"(",
"connection",
")",
"else",
":",
"raise",
"UnknowDirectionError",
"if",
"migration",
".",
"check",
"(",
")",
":",
"trans",
".",
"commit",
"(",
")",
"else",
":",
"raise",
"MigrationError",
"(",
"\"Migration failed consistency checks\"",
")",
"except",
"Exception",
",",
"e",
":",
"trans",
".",
"rollback",
"(",
")",
"#XXX:dc: do more to introspect why we failed",
"raise",
"e"
] | Asserts an engine is configured and runs the registered migration in the
given direction
:param migration_name: key to a registered class in the
`migration_registry`
:param direction: Can be on of two values, UP or DOWN | [
"Asserts",
"an",
"engine",
"is",
"configured",
"and",
"runs",
"the",
"registered",
"migration",
"in",
"the",
"given",
"direction"
] | b98391cb3127d09b15b59c7c25dab07a968062fa | https://github.com/dcolish/Refugee/blob/b98391cb3127d09b15b59c7c25dab07a968062fa/refugee/manager.py#L129-L163 |
251,109 | nivardus/kclboot | kclboot/maven_jar.py | MavenJar.maven_url | def maven_url(self):
'''
Download-URL from Maven
'''
return '{prefix}/{path}/{artifact}/{version}/{filename}'.format(
prefix = MAVEN_PREFIX,
path = '/'.join(self.group.split('.')),
artifact = self.artifact,
version = self.version,
filename = self.filename) | python | def maven_url(self):
'''
Download-URL from Maven
'''
return '{prefix}/{path}/{artifact}/{version}/{filename}'.format(
prefix = MAVEN_PREFIX,
path = '/'.join(self.group.split('.')),
artifact = self.artifact,
version = self.version,
filename = self.filename) | [
"def",
"maven_url",
"(",
"self",
")",
":",
"return",
"'{prefix}/{path}/{artifact}/{version}/{filename}'",
".",
"format",
"(",
"prefix",
"=",
"MAVEN_PREFIX",
",",
"path",
"=",
"'/'",
".",
"join",
"(",
"self",
".",
"group",
".",
"split",
"(",
"'.'",
")",
")",
",",
"artifact",
"=",
"self",
".",
"artifact",
",",
"version",
"=",
"self",
".",
"version",
",",
"filename",
"=",
"self",
".",
"filename",
")"
] | Download-URL from Maven | [
"Download",
"-",
"URL",
"from",
"Maven"
] | aee054d9186938bec51f19e9ed8deed6ac6fe492 | https://github.com/nivardus/kclboot/blob/aee054d9186938bec51f19e9ed8deed6ac6fe492/kclboot/maven_jar.py#L28-L37 |
251,110 | nivardus/kclboot | kclboot/maven_jar.py | MavenJar.download_to | def download_to(self, folder):
'''
Download into a folder
'''
urlretrieve(self.maven_url, os.path.join(folder, self.filename)) | python | def download_to(self, folder):
'''
Download into a folder
'''
urlretrieve(self.maven_url, os.path.join(folder, self.filename)) | [
"def",
"download_to",
"(",
"self",
",",
"folder",
")",
":",
"urlretrieve",
"(",
"self",
".",
"maven_url",
",",
"os",
".",
"path",
".",
"join",
"(",
"folder",
",",
"self",
".",
"filename",
")",
")"
] | Download into a folder | [
"Download",
"into",
"a",
"folder"
] | aee054d9186938bec51f19e9ed8deed6ac6fe492 | https://github.com/nivardus/kclboot/blob/aee054d9186938bec51f19e9ed8deed6ac6fe492/kclboot/maven_jar.py#L39-L43 |
251,111 | insilicolife/micti | MICTI/radarPlot.py | radar_factory | def radar_factory(num_vars, frame='circle'):
"""Create a radar chart with `num_vars` axes.
This function creates a RadarAxes projection and registers it.
Parameters
----------
num_vars : int
Number of variables for radar chart.
frame : {'circle' | 'polygon'}
Shape of frame surrounding axes.
"""
# calculate evenly-spaced axis angles
theta = np.linspace(0, 2*np.pi, num_vars, endpoint=False)
# rotate theta such that the first axis is at the top
theta += np.pi/2
def draw_poly_patch(self):
verts = unit_poly_verts(theta)
return plt.Polygon(verts, closed=True, edgecolor='k')
def draw_circle_patch(self):
# unit circle centered on (0.5, 0.5)
return plt.Circle((0.5, 0.5), 0.5)
patch_dict = {'polygon': draw_poly_patch, 'circle': draw_circle_patch}
if frame not in patch_dict:
raise ValueError('unknown value for `frame`: %s' % frame)
class RadarAxes(PolarAxes):
name = 'radar'
# use 1 line segment to connect specified points
RESOLUTION = 1
# define draw_frame method
draw_patch = patch_dict[frame]
def fill(self, *args, **kwargs):
"""Override fill so that line is closed by default"""
closed = kwargs.pop('closed', True)
return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)
def plot(self, *args, **kwargs):
"""Override plot so that line is closed by default"""
lines = super(RadarAxes, self).plot(*args, **kwargs)
for line in lines:
self._close_line(line)
def _close_line(self, line):
x, y = line.get_data()
# FIXME: markers at x[0], y[0] get doubled-up
if x[0] != x[-1]:
x = np.concatenate((x, [x[0]]))
y = np.concatenate((y, [y[0]]))
line.set_data(x, y)
def set_varlabels(self, labels):
self.set_thetagrids(np.degrees(theta), labels)
def _gen_axes_patch(self):
return self.draw_patch()
def _gen_axes_spines(self):
if frame == 'circle':
return PolarAxes._gen_axes_spines(self)
# The following is a hack to get the spines (i.e. the axes frame)
# to draw correctly for a polygon frame.
# spine_type must be 'left', 'right', 'top', 'bottom', or `circle`.
spine_type = 'circle'
verts = unit_poly_verts(theta)
# close off polygon by repeating first vertex
verts.append(verts[0])
path = Path(verts)
spine = Spine(self, spine_type, path)
spine.set_transform(self.transAxes)
return {'polar': spine}
register_projection(RadarAxes)
return theta | python | def radar_factory(num_vars, frame='circle'):
"""Create a radar chart with `num_vars` axes.
This function creates a RadarAxes projection and registers it.
Parameters
----------
num_vars : int
Number of variables for radar chart.
frame : {'circle' | 'polygon'}
Shape of frame surrounding axes.
"""
# calculate evenly-spaced axis angles
theta = np.linspace(0, 2*np.pi, num_vars, endpoint=False)
# rotate theta such that the first axis is at the top
theta += np.pi/2
def draw_poly_patch(self):
verts = unit_poly_verts(theta)
return plt.Polygon(verts, closed=True, edgecolor='k')
def draw_circle_patch(self):
# unit circle centered on (0.5, 0.5)
return plt.Circle((0.5, 0.5), 0.5)
patch_dict = {'polygon': draw_poly_patch, 'circle': draw_circle_patch}
if frame not in patch_dict:
raise ValueError('unknown value for `frame`: %s' % frame)
class RadarAxes(PolarAxes):
name = 'radar'
# use 1 line segment to connect specified points
RESOLUTION = 1
# define draw_frame method
draw_patch = patch_dict[frame]
def fill(self, *args, **kwargs):
"""Override fill so that line is closed by default"""
closed = kwargs.pop('closed', True)
return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)
def plot(self, *args, **kwargs):
"""Override plot so that line is closed by default"""
lines = super(RadarAxes, self).plot(*args, **kwargs)
for line in lines:
self._close_line(line)
def _close_line(self, line):
x, y = line.get_data()
# FIXME: markers at x[0], y[0] get doubled-up
if x[0] != x[-1]:
x = np.concatenate((x, [x[0]]))
y = np.concatenate((y, [y[0]]))
line.set_data(x, y)
def set_varlabels(self, labels):
self.set_thetagrids(np.degrees(theta), labels)
def _gen_axes_patch(self):
return self.draw_patch()
def _gen_axes_spines(self):
if frame == 'circle':
return PolarAxes._gen_axes_spines(self)
# The following is a hack to get the spines (i.e. the axes frame)
# to draw correctly for a polygon frame.
# spine_type must be 'left', 'right', 'top', 'bottom', or `circle`.
spine_type = 'circle'
verts = unit_poly_verts(theta)
# close off polygon by repeating first vertex
verts.append(verts[0])
path = Path(verts)
spine = Spine(self, spine_type, path)
spine.set_transform(self.transAxes)
return {'polar': spine}
register_projection(RadarAxes)
return theta | [
"def",
"radar_factory",
"(",
"num_vars",
",",
"frame",
"=",
"'circle'",
")",
":",
"# calculate evenly-spaced axis angles",
"theta",
"=",
"np",
".",
"linspace",
"(",
"0",
",",
"2",
"*",
"np",
".",
"pi",
",",
"num_vars",
",",
"endpoint",
"=",
"False",
")",
"# rotate theta such that the first axis is at the top",
"theta",
"+=",
"np",
".",
"pi",
"/",
"2",
"def",
"draw_poly_patch",
"(",
"self",
")",
":",
"verts",
"=",
"unit_poly_verts",
"(",
"theta",
")",
"return",
"plt",
".",
"Polygon",
"(",
"verts",
",",
"closed",
"=",
"True",
",",
"edgecolor",
"=",
"'k'",
")",
"def",
"draw_circle_patch",
"(",
"self",
")",
":",
"# unit circle centered on (0.5, 0.5)",
"return",
"plt",
".",
"Circle",
"(",
"(",
"0.5",
",",
"0.5",
")",
",",
"0.5",
")",
"patch_dict",
"=",
"{",
"'polygon'",
":",
"draw_poly_patch",
",",
"'circle'",
":",
"draw_circle_patch",
"}",
"if",
"frame",
"not",
"in",
"patch_dict",
":",
"raise",
"ValueError",
"(",
"'unknown value for `frame`: %s'",
"%",
"frame",
")",
"class",
"RadarAxes",
"(",
"PolarAxes",
")",
":",
"name",
"=",
"'radar'",
"# use 1 line segment to connect specified points",
"RESOLUTION",
"=",
"1",
"# define draw_frame method",
"draw_patch",
"=",
"patch_dict",
"[",
"frame",
"]",
"def",
"fill",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\"Override fill so that line is closed by default\"\"\"",
"closed",
"=",
"kwargs",
".",
"pop",
"(",
"'closed'",
",",
"True",
")",
"return",
"super",
"(",
"RadarAxes",
",",
"self",
")",
".",
"fill",
"(",
"closed",
"=",
"closed",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"def",
"plot",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\"Override plot so that line is closed by default\"\"\"",
"lines",
"=",
"super",
"(",
"RadarAxes",
",",
"self",
")",
".",
"plot",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"for",
"line",
"in",
"lines",
":",
"self",
".",
"_close_line",
"(",
"line",
")",
"def",
"_close_line",
"(",
"self",
",",
"line",
")",
":",
"x",
",",
"y",
"=",
"line",
".",
"get_data",
"(",
")",
"# FIXME: markers at x[0], y[0] get doubled-up",
"if",
"x",
"[",
"0",
"]",
"!=",
"x",
"[",
"-",
"1",
"]",
":",
"x",
"=",
"np",
".",
"concatenate",
"(",
"(",
"x",
",",
"[",
"x",
"[",
"0",
"]",
"]",
")",
")",
"y",
"=",
"np",
".",
"concatenate",
"(",
"(",
"y",
",",
"[",
"y",
"[",
"0",
"]",
"]",
")",
")",
"line",
".",
"set_data",
"(",
"x",
",",
"y",
")",
"def",
"set_varlabels",
"(",
"self",
",",
"labels",
")",
":",
"self",
".",
"set_thetagrids",
"(",
"np",
".",
"degrees",
"(",
"theta",
")",
",",
"labels",
")",
"def",
"_gen_axes_patch",
"(",
"self",
")",
":",
"return",
"self",
".",
"draw_patch",
"(",
")",
"def",
"_gen_axes_spines",
"(",
"self",
")",
":",
"if",
"frame",
"==",
"'circle'",
":",
"return",
"PolarAxes",
".",
"_gen_axes_spines",
"(",
"self",
")",
"# The following is a hack to get the spines (i.e. the axes frame)",
"# to draw correctly for a polygon frame.",
"# spine_type must be 'left', 'right', 'top', 'bottom', or `circle`.",
"spine_type",
"=",
"'circle'",
"verts",
"=",
"unit_poly_verts",
"(",
"theta",
")",
"# close off polygon by repeating first vertex",
"verts",
".",
"append",
"(",
"verts",
"[",
"0",
"]",
")",
"path",
"=",
"Path",
"(",
"verts",
")",
"spine",
"=",
"Spine",
"(",
"self",
",",
"spine_type",
",",
"path",
")",
"spine",
".",
"set_transform",
"(",
"self",
".",
"transAxes",
")",
"return",
"{",
"'polar'",
":",
"spine",
"}",
"register_projection",
"(",
"RadarAxes",
")",
"return",
"theta"
] | Create a radar chart with `num_vars` axes.
This function creates a RadarAxes projection and registers it.
Parameters
----------
num_vars : int
Number of variables for radar chart.
frame : {'circle' | 'polygon'}
Shape of frame surrounding axes. | [
"Create",
"a",
"radar",
"chart",
"with",
"num_vars",
"axes",
"."
] | f12f46724295b57c4859e6acf7eab580fc355eb1 | https://github.com/insilicolife/micti/blob/f12f46724295b57c4859e6acf7eab580fc355eb1/MICTI/radarPlot.py#L9-L90 |
251,112 | insilicolife/micti | MICTI/radarPlot.py | unit_poly_verts | def unit_poly_verts(theta):
"""Return vertices of polygon for subplot axes.
This polygon is circumscribed by a unit circle centered at (0.5, 0.5)
"""
x0, y0, r = [0.5] * 3
verts = [(r*np.cos(t) + x0, r*np.sin(t) + y0) for t in theta]
return verts | python | def unit_poly_verts(theta):
"""Return vertices of polygon for subplot axes.
This polygon is circumscribed by a unit circle centered at (0.5, 0.5)
"""
x0, y0, r = [0.5] * 3
verts = [(r*np.cos(t) + x0, r*np.sin(t) + y0) for t in theta]
return verts | [
"def",
"unit_poly_verts",
"(",
"theta",
")",
":",
"x0",
",",
"y0",
",",
"r",
"=",
"[",
"0.5",
"]",
"*",
"3",
"verts",
"=",
"[",
"(",
"r",
"*",
"np",
".",
"cos",
"(",
"t",
")",
"+",
"x0",
",",
"r",
"*",
"np",
".",
"sin",
"(",
"t",
")",
"+",
"y0",
")",
"for",
"t",
"in",
"theta",
"]",
"return",
"verts"
] | Return vertices of polygon for subplot axes.
This polygon is circumscribed by a unit circle centered at (0.5, 0.5) | [
"Return",
"vertices",
"of",
"polygon",
"for",
"subplot",
"axes",
"."
] | f12f46724295b57c4859e6acf7eab580fc355eb1 | https://github.com/insilicolife/micti/blob/f12f46724295b57c4859e6acf7eab580fc355eb1/MICTI/radarPlot.py#L93-L100 |
251,113 | alfred82santa/dirty-loader | dirty_loader/factories.py | register_logging_factories | def register_logging_factories(loader):
"""
Registers default factories for logging standard package.
:param loader: Loader where you want register default logging factories
"""
loader.register_factory(logging.Logger, LoggerFactory)
loader.register_factory(logging.Handler, LoggingHandlerFactory) | python | def register_logging_factories(loader):
"""
Registers default factories for logging standard package.
:param loader: Loader where you want register default logging factories
"""
loader.register_factory(logging.Logger, LoggerFactory)
loader.register_factory(logging.Handler, LoggingHandlerFactory) | [
"def",
"register_logging_factories",
"(",
"loader",
")",
":",
"loader",
".",
"register_factory",
"(",
"logging",
".",
"Logger",
",",
"LoggerFactory",
")",
"loader",
".",
"register_factory",
"(",
"logging",
".",
"Handler",
",",
"LoggingHandlerFactory",
")"
] | Registers default factories for logging standard package.
:param loader: Loader where you want register default logging factories | [
"Registers",
"default",
"factories",
"for",
"logging",
"standard",
"package",
"."
] | 0d7895e3c84a0c197d804ce31305c5cba4c512e4 | https://github.com/alfred82santa/dirty-loader/blob/0d7895e3c84a0c197d804ce31305c5cba4c512e4/dirty_loader/factories.py#L91-L98 |
251,114 | w1ll1am23/pubnubsub-handler | pubnubsubhandler.py | PubNubSubscriptionHandler.add_subscription | def add_subscription(self, channel, callback_function):
"""
Add a channel to subscribe to and a callback function to
run when the channel receives an update.
If channel already exists, create a new "subscription"
and append another callback function.
Args:
channel (str): The channel to add a subscription too.
callback_function (func): The function to run on an
update to the passed in channel.
"""
if channel not in CHANNELS:
CHANNELS.append(channel)
SUBSCRIPTIONS[channel] = [callback_function]
else:
SUBSCRIPTIONS[channel].append(callback_function)
# If a channel gets added after subscription has already been called
# call subscribe on the individual channel, here.
if self._subscribed:
_LOGGER.info("New channel added after main subscribe call.")
self._pubnub.subscribe().channels(channel).execute() | python | def add_subscription(self, channel, callback_function):
"""
Add a channel to subscribe to and a callback function to
run when the channel receives an update.
If channel already exists, create a new "subscription"
and append another callback function.
Args:
channel (str): The channel to add a subscription too.
callback_function (func): The function to run on an
update to the passed in channel.
"""
if channel not in CHANNELS:
CHANNELS.append(channel)
SUBSCRIPTIONS[channel] = [callback_function]
else:
SUBSCRIPTIONS[channel].append(callback_function)
# If a channel gets added after subscription has already been called
# call subscribe on the individual channel, here.
if self._subscribed:
_LOGGER.info("New channel added after main subscribe call.")
self._pubnub.subscribe().channels(channel).execute() | [
"def",
"add_subscription",
"(",
"self",
",",
"channel",
",",
"callback_function",
")",
":",
"if",
"channel",
"not",
"in",
"CHANNELS",
":",
"CHANNELS",
".",
"append",
"(",
"channel",
")",
"SUBSCRIPTIONS",
"[",
"channel",
"]",
"=",
"[",
"callback_function",
"]",
"else",
":",
"SUBSCRIPTIONS",
"[",
"channel",
"]",
".",
"append",
"(",
"callback_function",
")",
"# If a channel gets added after subscription has already been called",
"# call subscribe on the individual channel, here.",
"if",
"self",
".",
"_subscribed",
":",
"_LOGGER",
".",
"info",
"(",
"\"New channel added after main subscribe call.\"",
")",
"self",
".",
"_pubnub",
".",
"subscribe",
"(",
")",
".",
"channels",
"(",
"channel",
")",
".",
"execute",
"(",
")"
] | Add a channel to subscribe to and a callback function to
run when the channel receives an update.
If channel already exists, create a new "subscription"
and append another callback function.
Args:
channel (str): The channel to add a subscription too.
callback_function (func): The function to run on an
update to the passed in channel. | [
"Add",
"a",
"channel",
"to",
"subscribe",
"to",
"and",
"a",
"callback",
"function",
"to",
"run",
"when",
"the",
"channel",
"receives",
"an",
"update",
".",
"If",
"channel",
"already",
"exists",
"create",
"a",
"new",
"subscription",
"and",
"append",
"another",
"callback",
"function",
"."
] | 0283c191d6042727f55a748f69a485d751f4cacb | https://github.com/w1ll1am23/pubnubsub-handler/blob/0283c191d6042727f55a748f69a485d751f4cacb/pubnubsubhandler.py#L56-L77 |
251,115 | w1ll1am23/pubnubsub-handler | pubnubsubhandler.py | PubNubSubscriptionHandler._run_keep_alive | def _run_keep_alive(self):
"""
Start a new thread timer to keep the keep_alive_function running
every keep_alive seconds.
"""
threading.Timer(self._keep_alive, self._run_keep_alive).start()
_LOGGER.info("Polling the API")
# This may or may not return something
self._keep_alive_function() | python | def _run_keep_alive(self):
"""
Start a new thread timer to keep the keep_alive_function running
every keep_alive seconds.
"""
threading.Timer(self._keep_alive, self._run_keep_alive).start()
_LOGGER.info("Polling the API")
# This may or may not return something
self._keep_alive_function() | [
"def",
"_run_keep_alive",
"(",
"self",
")",
":",
"threading",
".",
"Timer",
"(",
"self",
".",
"_keep_alive",
",",
"self",
".",
"_run_keep_alive",
")",
".",
"start",
"(",
")",
"_LOGGER",
".",
"info",
"(",
"\"Polling the API\"",
")",
"# This may or may not return something",
"self",
".",
"_keep_alive_function",
"(",
")"
] | Start a new thread timer to keep the keep_alive_function running
every keep_alive seconds. | [
"Start",
"a",
"new",
"thread",
"timer",
"to",
"keep",
"the",
"keep_alive_function",
"running",
"every",
"keep_alive",
"seconds",
"."
] | 0283c191d6042727f55a748f69a485d751f4cacb | https://github.com/w1ll1am23/pubnubsub-handler/blob/0283c191d6042727f55a748f69a485d751f4cacb/pubnubsubhandler.py#L87-L95 |
251,116 | w1ll1am23/pubnubsub-handler | pubnubsubhandler.py | PubNubSubscriptionHandler.unsubscribe | def unsubscribe(self):
"""
Completly stop all pubnub operations.
"""
_LOGGER.info("PubNub unsubscribing")
self._pubnub.unsubscribe_all()
self._pubnub.stop()
self._pubnub = None | python | def unsubscribe(self):
"""
Completly stop all pubnub operations.
"""
_LOGGER.info("PubNub unsubscribing")
self._pubnub.unsubscribe_all()
self._pubnub.stop()
self._pubnub = None | [
"def",
"unsubscribe",
"(",
"self",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"PubNub unsubscribing\"",
")",
"self",
".",
"_pubnub",
".",
"unsubscribe_all",
"(",
")",
"self",
".",
"_pubnub",
".",
"stop",
"(",
")",
"self",
".",
"_pubnub",
"=",
"None"
] | Completly stop all pubnub operations. | [
"Completly",
"stop",
"all",
"pubnub",
"operations",
"."
] | 0283c191d6042727f55a748f69a485d751f4cacb | https://github.com/w1ll1am23/pubnubsub-handler/blob/0283c191d6042727f55a748f69a485d751f4cacb/pubnubsubhandler.py#L97-L104 |
251,117 | w1ll1am23/pubnubsub-handler | pubnubsubhandler.py | PubNubSubscriptionHandler._subscribe | def _subscribe(self):
"""
Start the subscription to the channel list.
If self._keep_alive_function isn't None start timer thread to
run self._keep_alive_function every self._keep_alive amount of seconds.
"""
_LOGGER.info("PubNub subscribing")
self._pubnub.subscribe().channels(CHANNELS).execute()
if self._keep_alive_function is not None:
threading.Timer(self._keep_alive, self._run_keep_alive).start()
self._subscribed = True | python | def _subscribe(self):
"""
Start the subscription to the channel list.
If self._keep_alive_function isn't None start timer thread to
run self._keep_alive_function every self._keep_alive amount of seconds.
"""
_LOGGER.info("PubNub subscribing")
self._pubnub.subscribe().channels(CHANNELS).execute()
if self._keep_alive_function is not None:
threading.Timer(self._keep_alive, self._run_keep_alive).start()
self._subscribed = True | [
"def",
"_subscribe",
"(",
"self",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"PubNub subscribing\"",
")",
"self",
".",
"_pubnub",
".",
"subscribe",
"(",
")",
".",
"channels",
"(",
"CHANNELS",
")",
".",
"execute",
"(",
")",
"if",
"self",
".",
"_keep_alive_function",
"is",
"not",
"None",
":",
"threading",
".",
"Timer",
"(",
"self",
".",
"_keep_alive",
",",
"self",
".",
"_run_keep_alive",
")",
".",
"start",
"(",
")",
"self",
".",
"_subscribed",
"=",
"True"
] | Start the subscription to the channel list.
If self._keep_alive_function isn't None start timer thread to
run self._keep_alive_function every self._keep_alive amount of seconds. | [
"Start",
"the",
"subscription",
"to",
"the",
"channel",
"list",
".",
"If",
"self",
".",
"_keep_alive_function",
"isn",
"t",
"None",
"start",
"timer",
"thread",
"to",
"run",
"self",
".",
"_keep_alive_function",
"every",
"self",
".",
"_keep_alive",
"amount",
"of",
"seconds",
"."
] | 0283c191d6042727f55a748f69a485d751f4cacb | https://github.com/w1ll1am23/pubnubsub-handler/blob/0283c191d6042727f55a748f69a485d751f4cacb/pubnubsubhandler.py#L106-L116 |
251,118 | w1ll1am23/pubnubsub-handler | pubnubsubhandler.py | PubNubSubCallback.status | def status(self, pubnub, status):
"""
Things to do on different status updates.
"""
if status.operation == PNOperationType.PNSubscribeOperation \
or status.operation == PNOperationType.PNUnsubscribeOperation:
if status.category == PNStatusCategory.PNConnectedCategory:
# This is expected for a subscribe, this means there is no error or issue whatsoever
_LOGGER.info("PubNub connected")
elif status.category == PNStatusCategory.PNReconnectedCategory:
# This usually occurs if subscribe temporarily fails but reconnects. This means
# there was an error but there is no longer any issue
_LOGGER.info("PubNub reconnected")
elif status.category == PNStatusCategory.PNDisconnectedCategory:
# This is the expected category for an unsubscribe. This means there
# was no error in unsubscribing from everything
_LOGGER.info("PubNub unsubscribed")
elif status.category == PNStatusCategory.PNUnexpectedDisconnectCategory:
# This is usually an issue with the internet connection, this is an error, handle appropriately
# retry will be called automatically
_LOGGER.info("PubNub disconnected (lost internet?)")
else:
# This is usually an issue with the internet connection, this is an error, handle appropriately
# retry will be called automatically
_LOGGER.info("PubNub disconnected (lost internet?)")
elif status.operation == PNOperationType.PNHeartbeatOperation:
# Heartbeat operations can in fact have errors, so it is important to check first for an error.
# For more information on how to configure heartbeat notifications through the status
# PNObjectEventListener callback, consult <link to the PNCONFIGURATION heartbeart config>
if status.is_error():
# There was an error with the heartbeat operation, handle here
_LOGGER.info("PubNub failed heartbeat")
else:
# Heartbeat operation was successful
_LOGGER.info("PubNub heartbeat")
else:
pass | python | def status(self, pubnub, status):
"""
Things to do on different status updates.
"""
if status.operation == PNOperationType.PNSubscribeOperation \
or status.operation == PNOperationType.PNUnsubscribeOperation:
if status.category == PNStatusCategory.PNConnectedCategory:
# This is expected for a subscribe, this means there is no error or issue whatsoever
_LOGGER.info("PubNub connected")
elif status.category == PNStatusCategory.PNReconnectedCategory:
# This usually occurs if subscribe temporarily fails but reconnects. This means
# there was an error but there is no longer any issue
_LOGGER.info("PubNub reconnected")
elif status.category == PNStatusCategory.PNDisconnectedCategory:
# This is the expected category for an unsubscribe. This means there
# was no error in unsubscribing from everything
_LOGGER.info("PubNub unsubscribed")
elif status.category == PNStatusCategory.PNUnexpectedDisconnectCategory:
# This is usually an issue with the internet connection, this is an error, handle appropriately
# retry will be called automatically
_LOGGER.info("PubNub disconnected (lost internet?)")
else:
# This is usually an issue with the internet connection, this is an error, handle appropriately
# retry will be called automatically
_LOGGER.info("PubNub disconnected (lost internet?)")
elif status.operation == PNOperationType.PNHeartbeatOperation:
# Heartbeat operations can in fact have errors, so it is important to check first for an error.
# For more information on how to configure heartbeat notifications through the status
# PNObjectEventListener callback, consult <link to the PNCONFIGURATION heartbeart config>
if status.is_error():
# There was an error with the heartbeat operation, handle here
_LOGGER.info("PubNub failed heartbeat")
else:
# Heartbeat operation was successful
_LOGGER.info("PubNub heartbeat")
else:
pass | [
"def",
"status",
"(",
"self",
",",
"pubnub",
",",
"status",
")",
":",
"if",
"status",
".",
"operation",
"==",
"PNOperationType",
".",
"PNSubscribeOperation",
"or",
"status",
".",
"operation",
"==",
"PNOperationType",
".",
"PNUnsubscribeOperation",
":",
"if",
"status",
".",
"category",
"==",
"PNStatusCategory",
".",
"PNConnectedCategory",
":",
"# This is expected for a subscribe, this means there is no error or issue whatsoever",
"_LOGGER",
".",
"info",
"(",
"\"PubNub connected\"",
")",
"elif",
"status",
".",
"category",
"==",
"PNStatusCategory",
".",
"PNReconnectedCategory",
":",
"# This usually occurs if subscribe temporarily fails but reconnects. This means",
"# there was an error but there is no longer any issue",
"_LOGGER",
".",
"info",
"(",
"\"PubNub reconnected\"",
")",
"elif",
"status",
".",
"category",
"==",
"PNStatusCategory",
".",
"PNDisconnectedCategory",
":",
"# This is the expected category for an unsubscribe. This means there",
"# was no error in unsubscribing from everything",
"_LOGGER",
".",
"info",
"(",
"\"PubNub unsubscribed\"",
")",
"elif",
"status",
".",
"category",
"==",
"PNStatusCategory",
".",
"PNUnexpectedDisconnectCategory",
":",
"# This is usually an issue with the internet connection, this is an error, handle appropriately",
"# retry will be called automatically",
"_LOGGER",
".",
"info",
"(",
"\"PubNub disconnected (lost internet?)\"",
")",
"else",
":",
"# This is usually an issue with the internet connection, this is an error, handle appropriately",
"# retry will be called automatically",
"_LOGGER",
".",
"info",
"(",
"\"PubNub disconnected (lost internet?)\"",
")",
"elif",
"status",
".",
"operation",
"==",
"PNOperationType",
".",
"PNHeartbeatOperation",
":",
"# Heartbeat operations can in fact have errors, so it is important to check first for an error.",
"# For more information on how to configure heartbeat notifications through the status",
"# PNObjectEventListener callback, consult <link to the PNCONFIGURATION heartbeart config>",
"if",
"status",
".",
"is_error",
"(",
")",
":",
"# There was an error with the heartbeat operation, handle here",
"_LOGGER",
".",
"info",
"(",
"\"PubNub failed heartbeat\"",
")",
"else",
":",
"# Heartbeat operation was successful",
"_LOGGER",
".",
"info",
"(",
"\"PubNub heartbeat\"",
")",
"else",
":",
"pass"
] | Things to do on different status updates. | [
"Things",
"to",
"do",
"on",
"different",
"status",
"updates",
"."
] | 0283c191d6042727f55a748f69a485d751f4cacb | https://github.com/w1ll1am23/pubnubsub-handler/blob/0283c191d6042727f55a748f69a485d751f4cacb/pubnubsubhandler.py#L124-L160 |
251,119 | dstufft/crust | crust/query.py | Query.clone | def clone(self, klass=None, memo=None, **kwargs):
"""
Creates a copy of the current instance. The 'kwargs' parameter can be
used by clients to update attributes after copying has taken place.
"""
obj = Empty()
obj.__class__ = klass or self.__class__
obj.resource = self.resource
obj.filters = self.filters.copy()
obj.order_by = self.order_by
obj.low_mark = self.low_mark
obj.high_mark = self.high_mark
obj.__dict__.update(kwargs)
return obj | python | def clone(self, klass=None, memo=None, **kwargs):
"""
Creates a copy of the current instance. The 'kwargs' parameter can be
used by clients to update attributes after copying has taken place.
"""
obj = Empty()
obj.__class__ = klass or self.__class__
obj.resource = self.resource
obj.filters = self.filters.copy()
obj.order_by = self.order_by
obj.low_mark = self.low_mark
obj.high_mark = self.high_mark
obj.__dict__.update(kwargs)
return obj | [
"def",
"clone",
"(",
"self",
",",
"klass",
"=",
"None",
",",
"memo",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"obj",
"=",
"Empty",
"(",
")",
"obj",
".",
"__class__",
"=",
"klass",
"or",
"self",
".",
"__class__",
"obj",
".",
"resource",
"=",
"self",
".",
"resource",
"obj",
".",
"filters",
"=",
"self",
".",
"filters",
".",
"copy",
"(",
")",
"obj",
".",
"order_by",
"=",
"self",
".",
"order_by",
"obj",
".",
"low_mark",
"=",
"self",
".",
"low_mark",
"obj",
".",
"high_mark",
"=",
"self",
".",
"high_mark",
"obj",
".",
"__dict__",
".",
"update",
"(",
"kwargs",
")",
"return",
"obj"
] | Creates a copy of the current instance. The 'kwargs' parameter can be
used by clients to update attributes after copying has taken place. | [
"Creates",
"a",
"copy",
"of",
"the",
"current",
"instance",
".",
"The",
"kwargs",
"parameter",
"can",
"be",
"used",
"by",
"clients",
"to",
"update",
"attributes",
"after",
"copying",
"has",
"taken",
"place",
"."
] | 5d4011ecace12fd3f68a03a17dbefb78390a9fc0 | https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/query.py#L35-L53 |
251,120 | dstufft/crust | crust/query.py | Query.results | def results(self, limit=100):
"""
Yields the results from the API, efficiently handling the pagination and
properly passing all paramaters.
"""
limited = True if self.high_mark is not None else False
rmax = self.high_mark - self.low_mark if limited else None
rnum = 0
params = self.get_params()
params["offset"] = self.low_mark
params["limit"] = limit
while not limited and rmax is None or rnum < rmax:
if limited or rmax is not None:
rleft = rmax - rnum
params["limit"] = rleft if rleft < limit else limit
r = self.resource._meta.api.http_resource("GET", self.resource._meta.resource_name, params=params)
data = self.resource._meta.api.resource_deserialize(r.text)
if not limited:
rmax = data["meta"]["total_count"]
if data["meta"]["total_count"] < rmax:
rmax = data["meta"]["total_count"]
params["offset"] = data["meta"]["offset"] + data["meta"]["limit"]
for item in data["objects"]:
rnum += 1
yield item | python | def results(self, limit=100):
"""
Yields the results from the API, efficiently handling the pagination and
properly passing all paramaters.
"""
limited = True if self.high_mark is not None else False
rmax = self.high_mark - self.low_mark if limited else None
rnum = 0
params = self.get_params()
params["offset"] = self.low_mark
params["limit"] = limit
while not limited and rmax is None or rnum < rmax:
if limited or rmax is not None:
rleft = rmax - rnum
params["limit"] = rleft if rleft < limit else limit
r = self.resource._meta.api.http_resource("GET", self.resource._meta.resource_name, params=params)
data = self.resource._meta.api.resource_deserialize(r.text)
if not limited:
rmax = data["meta"]["total_count"]
if data["meta"]["total_count"] < rmax:
rmax = data["meta"]["total_count"]
params["offset"] = data["meta"]["offset"] + data["meta"]["limit"]
for item in data["objects"]:
rnum += 1
yield item | [
"def",
"results",
"(",
"self",
",",
"limit",
"=",
"100",
")",
":",
"limited",
"=",
"True",
"if",
"self",
".",
"high_mark",
"is",
"not",
"None",
"else",
"False",
"rmax",
"=",
"self",
".",
"high_mark",
"-",
"self",
".",
"low_mark",
"if",
"limited",
"else",
"None",
"rnum",
"=",
"0",
"params",
"=",
"self",
".",
"get_params",
"(",
")",
"params",
"[",
"\"offset\"",
"]",
"=",
"self",
".",
"low_mark",
"params",
"[",
"\"limit\"",
"]",
"=",
"limit",
"while",
"not",
"limited",
"and",
"rmax",
"is",
"None",
"or",
"rnum",
"<",
"rmax",
":",
"if",
"limited",
"or",
"rmax",
"is",
"not",
"None",
":",
"rleft",
"=",
"rmax",
"-",
"rnum",
"params",
"[",
"\"limit\"",
"]",
"=",
"rleft",
"if",
"rleft",
"<",
"limit",
"else",
"limit",
"r",
"=",
"self",
".",
"resource",
".",
"_meta",
".",
"api",
".",
"http_resource",
"(",
"\"GET\"",
",",
"self",
".",
"resource",
".",
"_meta",
".",
"resource_name",
",",
"params",
"=",
"params",
")",
"data",
"=",
"self",
".",
"resource",
".",
"_meta",
".",
"api",
".",
"resource_deserialize",
"(",
"r",
".",
"text",
")",
"if",
"not",
"limited",
":",
"rmax",
"=",
"data",
"[",
"\"meta\"",
"]",
"[",
"\"total_count\"",
"]",
"if",
"data",
"[",
"\"meta\"",
"]",
"[",
"\"total_count\"",
"]",
"<",
"rmax",
":",
"rmax",
"=",
"data",
"[",
"\"meta\"",
"]",
"[",
"\"total_count\"",
"]",
"params",
"[",
"\"offset\"",
"]",
"=",
"data",
"[",
"\"meta\"",
"]",
"[",
"\"offset\"",
"]",
"+",
"data",
"[",
"\"meta\"",
"]",
"[",
"\"limit\"",
"]",
"for",
"item",
"in",
"data",
"[",
"\"objects\"",
"]",
":",
"rnum",
"+=",
"1",
"yield",
"item"
] | Yields the results from the API, efficiently handling the pagination and
properly passing all paramaters. | [
"Yields",
"the",
"results",
"from",
"the",
"API",
"efficiently",
"handling",
"the",
"pagination",
"and",
"properly",
"passing",
"all",
"paramaters",
"."
] | 5d4011ecace12fd3f68a03a17dbefb78390a9fc0 | https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/query.py#L101-L132 |
251,121 | dstufft/crust | crust/query.py | Query.delete | def delete(self):
"""
Deletes the results of this query, it first fetches all the items to be
deletes and then issues a PATCH against the list uri of the resource.
"""
uris = [obj["resource_uri"] for obj in self.results()]
data = self.resource._meta.api.resource_serialize({"objects": [], "deleted_objects": uris})
self.resource._meta.api.http_resource("PATCH", self.resource._meta.resource_name, data=data)
return len(uris) | python | def delete(self):
"""
Deletes the results of this query, it first fetches all the items to be
deletes and then issues a PATCH against the list uri of the resource.
"""
uris = [obj["resource_uri"] for obj in self.results()]
data = self.resource._meta.api.resource_serialize({"objects": [], "deleted_objects": uris})
self.resource._meta.api.http_resource("PATCH", self.resource._meta.resource_name, data=data)
return len(uris) | [
"def",
"delete",
"(",
"self",
")",
":",
"uris",
"=",
"[",
"obj",
"[",
"\"resource_uri\"",
"]",
"for",
"obj",
"in",
"self",
".",
"results",
"(",
")",
"]",
"data",
"=",
"self",
".",
"resource",
".",
"_meta",
".",
"api",
".",
"resource_serialize",
"(",
"{",
"\"objects\"",
":",
"[",
"]",
",",
"\"deleted_objects\"",
":",
"uris",
"}",
")",
"self",
".",
"resource",
".",
"_meta",
".",
"api",
".",
"http_resource",
"(",
"\"PATCH\"",
",",
"self",
".",
"resource",
".",
"_meta",
".",
"resource_name",
",",
"data",
"=",
"data",
")",
"return",
"len",
"(",
"uris",
")"
] | Deletes the results of this query, it first fetches all the items to be
deletes and then issues a PATCH against the list uri of the resource. | [
"Deletes",
"the",
"results",
"of",
"this",
"query",
"it",
"first",
"fetches",
"all",
"the",
"items",
"to",
"be",
"deletes",
"and",
"then",
"issues",
"a",
"PATCH",
"against",
"the",
"list",
"uri",
"of",
"the",
"resource",
"."
] | 5d4011ecace12fd3f68a03a17dbefb78390a9fc0 | https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/query.py#L134-L143 |
251,122 | dstufft/crust | crust/query.py | Query.get_count | def get_count(self):
"""
Gets the total_count using the current filter constraints.
"""
params = self.get_params()
params["offset"] = self.low_mark
params["limit"] = 1
r = self.resource._meta.api.http_resource("GET", self.resource._meta.resource_name, params=params)
data = self.resource._meta.api.resource_deserialize(r.text)
number = data["meta"]["total_count"]
# Apply offset and limit constraints manually, since using limit/offset
# in the API doesn't change the total_count output.
number = max(0, number - self.low_mark)
if self.high_mark is not None:
number = min(number, self.high_mark - self.low_mark)
return number | python | def get_count(self):
"""
Gets the total_count using the current filter constraints.
"""
params = self.get_params()
params["offset"] = self.low_mark
params["limit"] = 1
r = self.resource._meta.api.http_resource("GET", self.resource._meta.resource_name, params=params)
data = self.resource._meta.api.resource_deserialize(r.text)
number = data["meta"]["total_count"]
# Apply offset and limit constraints manually, since using limit/offset
# in the API doesn't change the total_count output.
number = max(0, number - self.low_mark)
if self.high_mark is not None:
number = min(number, self.high_mark - self.low_mark)
return number | [
"def",
"get_count",
"(",
"self",
")",
":",
"params",
"=",
"self",
".",
"get_params",
"(",
")",
"params",
"[",
"\"offset\"",
"]",
"=",
"self",
".",
"low_mark",
"params",
"[",
"\"limit\"",
"]",
"=",
"1",
"r",
"=",
"self",
".",
"resource",
".",
"_meta",
".",
"api",
".",
"http_resource",
"(",
"\"GET\"",
",",
"self",
".",
"resource",
".",
"_meta",
".",
"resource_name",
",",
"params",
"=",
"params",
")",
"data",
"=",
"self",
".",
"resource",
".",
"_meta",
".",
"api",
".",
"resource_deserialize",
"(",
"r",
".",
"text",
")",
"number",
"=",
"data",
"[",
"\"meta\"",
"]",
"[",
"\"total_count\"",
"]",
"# Apply offset and limit constraints manually, since using limit/offset",
"# in the API doesn't change the total_count output.",
"number",
"=",
"max",
"(",
"0",
",",
"number",
"-",
"self",
".",
"low_mark",
")",
"if",
"self",
".",
"high_mark",
"is",
"not",
"None",
":",
"number",
"=",
"min",
"(",
"number",
",",
"self",
".",
"high_mark",
"-",
"self",
".",
"low_mark",
")",
"return",
"number"
] | Gets the total_count using the current filter constraints. | [
"Gets",
"the",
"total_count",
"using",
"the",
"current",
"filter",
"constraints",
"."
] | 5d4011ecace12fd3f68a03a17dbefb78390a9fc0 | https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/query.py#L157-L176 |
251,123 | dstufft/crust | crust/query.py | QuerySet.iterator | def iterator(self):
"""
An iterator over the results from applying this QuerySet to the api.
"""
for item in self.query.results():
obj = self.resource(**item)
yield obj | python | def iterator(self):
"""
An iterator over the results from applying this QuerySet to the api.
"""
for item in self.query.results():
obj = self.resource(**item)
yield obj | [
"def",
"iterator",
"(",
"self",
")",
":",
"for",
"item",
"in",
"self",
".",
"query",
".",
"results",
"(",
")",
":",
"obj",
"=",
"self",
".",
"resource",
"(",
"*",
"*",
"item",
")",
"yield",
"obj"
] | An iterator over the results from applying this QuerySet to the api. | [
"An",
"iterator",
"over",
"the",
"results",
"from",
"applying",
"this",
"QuerySet",
"to",
"the",
"api",
"."
] | 5d4011ecace12fd3f68a03a17dbefb78390a9fc0 | https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/query.py#L335-L343 |
251,124 | dstufft/crust | crust/query.py | QuerySet.count | def count(self):
"""
Returns the number of records as an integer.
If the QuerySet is already fully cached this simply returns the length
of the cached results set to avoid an api call.
"""
if self._result_cache is not None and not self._iter:
return len(self._result_cache)
return self.query.get_count() | python | def count(self):
"""
Returns the number of records as an integer.
If the QuerySet is already fully cached this simply returns the length
of the cached results set to avoid an api call.
"""
if self._result_cache is not None and not self._iter:
return len(self._result_cache)
return self.query.get_count() | [
"def",
"count",
"(",
"self",
")",
":",
"if",
"self",
".",
"_result_cache",
"is",
"not",
"None",
"and",
"not",
"self",
".",
"_iter",
":",
"return",
"len",
"(",
"self",
".",
"_result_cache",
")",
"return",
"self",
".",
"query",
".",
"get_count",
"(",
")"
] | Returns the number of records as an integer.
If the QuerySet is already fully cached this simply returns the length
of the cached results set to avoid an api call. | [
"Returns",
"the",
"number",
"of",
"records",
"as",
"an",
"integer",
"."
] | 5d4011ecace12fd3f68a03a17dbefb78390a9fc0 | https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/query.py#L345-L355 |
251,125 | dstufft/crust | crust/query.py | QuerySet.create | def create(self, **kwargs):
"""
Creates a new object with the given kwargs, saving it to the api
and returning the created object.
"""
obj = self.resource(**kwargs)
obj.save(force_insert=True)
return obj | python | def create(self, **kwargs):
"""
Creates a new object with the given kwargs, saving it to the api
and returning the created object.
"""
obj = self.resource(**kwargs)
obj.save(force_insert=True)
return obj | [
"def",
"create",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"obj",
"=",
"self",
".",
"resource",
"(",
"*",
"*",
"kwargs",
")",
"obj",
".",
"save",
"(",
"force_insert",
"=",
"True",
")",
"return",
"obj"
] | Creates a new object with the given kwargs, saving it to the api
and returning the created object. | [
"Creates",
"a",
"new",
"object",
"with",
"the",
"given",
"kwargs",
"saving",
"it",
"to",
"the",
"api",
"and",
"returning",
"the",
"created",
"object",
"."
] | 5d4011ecace12fd3f68a03a17dbefb78390a9fc0 | https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/query.py#L382-L389 |
251,126 | treycucco/bidon | bidon/util/__init__.py | exclude | def exclude(source, keys, *, transform=None):
"""Returns a dictionary excluding keys from a source dictionary.
:source: a dictionary
:keys: a set of keys, or a predicate function that accepting a key
:transform: a function that transforms the values
"""
check = keys if callable(keys) else lambda key: key in keys
return {key: transform(source[key]) if transform else source[key]
for key in source if not check(key)} | python | def exclude(source, keys, *, transform=None):
"""Returns a dictionary excluding keys from a source dictionary.
:source: a dictionary
:keys: a set of keys, or a predicate function that accepting a key
:transform: a function that transforms the values
"""
check = keys if callable(keys) else lambda key: key in keys
return {key: transform(source[key]) if transform else source[key]
for key in source if not check(key)} | [
"def",
"exclude",
"(",
"source",
",",
"keys",
",",
"*",
",",
"transform",
"=",
"None",
")",
":",
"check",
"=",
"keys",
"if",
"callable",
"(",
"keys",
")",
"else",
"lambda",
"key",
":",
"key",
"in",
"keys",
"return",
"{",
"key",
":",
"transform",
"(",
"source",
"[",
"key",
"]",
")",
"if",
"transform",
"else",
"source",
"[",
"key",
"]",
"for",
"key",
"in",
"source",
"if",
"not",
"check",
"(",
"key",
")",
"}"
] | Returns a dictionary excluding keys from a source dictionary.
:source: a dictionary
:keys: a set of keys, or a predicate function that accepting a key
:transform: a function that transforms the values | [
"Returns",
"a",
"dictionary",
"excluding",
"keys",
"from",
"a",
"source",
"dictionary",
"."
] | d9f24596841d0e69e8ac70a1d1a1deecea95e340 | https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/util/__init__.py#L32-L41 |
251,127 | treycucco/bidon | bidon/util/__init__.py | pick | def pick(source, keys, *, transform=None):
"""Returns a dictionary including only specified keys from a source dictionary.
:source: a dictionary
:keys: a set of keys, or a predicate function that accepting a key
:transform: a function that transforms the values
"""
check = keys if callable(keys) else lambda key: key in keys
return {key: transform(source[key]) if transform else source[key]
for key in source if check(key)} | python | def pick(source, keys, *, transform=None):
"""Returns a dictionary including only specified keys from a source dictionary.
:source: a dictionary
:keys: a set of keys, or a predicate function that accepting a key
:transform: a function that transforms the values
"""
check = keys if callable(keys) else lambda key: key in keys
return {key: transform(source[key]) if transform else source[key]
for key in source if check(key)} | [
"def",
"pick",
"(",
"source",
",",
"keys",
",",
"*",
",",
"transform",
"=",
"None",
")",
":",
"check",
"=",
"keys",
"if",
"callable",
"(",
"keys",
")",
"else",
"lambda",
"key",
":",
"key",
"in",
"keys",
"return",
"{",
"key",
":",
"transform",
"(",
"source",
"[",
"key",
"]",
")",
"if",
"transform",
"else",
"source",
"[",
"key",
"]",
"for",
"key",
"in",
"source",
"if",
"check",
"(",
"key",
")",
"}"
] | Returns a dictionary including only specified keys from a source dictionary.
:source: a dictionary
:keys: a set of keys, or a predicate function that accepting a key
:transform: a function that transforms the values | [
"Returns",
"a",
"dictionary",
"including",
"only",
"specified",
"keys",
"from",
"a",
"source",
"dictionary",
"."
] | d9f24596841d0e69e8ac70a1d1a1deecea95e340 | https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/util/__init__.py#L44-L53 |
251,128 | treycucco/bidon | bidon/util/__init__.py | json_default | def json_default(obj):
"""Convert an object to JSON, via the defaults set with register_json_default.
:obj: the object to convert
"""
for default in _JSON_DEFAULTS:
if default[0](obj):
return default[1](obj)
raise TypeError(repr(obj) + " is not JSON serializable") | python | def json_default(obj):
"""Convert an object to JSON, via the defaults set with register_json_default.
:obj: the object to convert
"""
for default in _JSON_DEFAULTS:
if default[0](obj):
return default[1](obj)
raise TypeError(repr(obj) + " is not JSON serializable") | [
"def",
"json_default",
"(",
"obj",
")",
":",
"for",
"default",
"in",
"_JSON_DEFAULTS",
":",
"if",
"default",
"[",
"0",
"]",
"(",
"obj",
")",
":",
"return",
"default",
"[",
"1",
"]",
"(",
"obj",
")",
"raise",
"TypeError",
"(",
"repr",
"(",
"obj",
")",
"+",
"\" is not JSON serializable\"",
")"
] | Convert an object to JSON, via the defaults set with register_json_default.
:obj: the object to convert | [
"Convert",
"an",
"object",
"to",
"JSON",
"via",
"the",
"defaults",
"set",
"with",
"register_json_default",
"."
] | d9f24596841d0e69e8ac70a1d1a1deecea95e340 | https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/util/__init__.py#L67-L75 |
251,129 | treycucco/bidon | bidon/util/__init__.py | to_json | def to_json(obj, pretty=False):
"""Converts an object to JSON, using the defaults specified in register_json_default.
:obj: the object to convert to JSON
:pretty: if True, extra whitespace is added to make the output easier to read
"""
sort_keys = False
indent = None
separators = (",", ":")
if isinstance(pretty, tuple):
sort_keys, indent, separators = pretty
elif pretty is True:
sort_keys = True
indent = 2
separators = (", ", ": ")
return json.dumps(obj, sort_keys=sort_keys, indent=indent, separators=separators,
default=json_default) | python | def to_json(obj, pretty=False):
"""Converts an object to JSON, using the defaults specified in register_json_default.
:obj: the object to convert to JSON
:pretty: if True, extra whitespace is added to make the output easier to read
"""
sort_keys = False
indent = None
separators = (",", ":")
if isinstance(pretty, tuple):
sort_keys, indent, separators = pretty
elif pretty is True:
sort_keys = True
indent = 2
separators = (", ", ": ")
return json.dumps(obj, sort_keys=sort_keys, indent=indent, separators=separators,
default=json_default) | [
"def",
"to_json",
"(",
"obj",
",",
"pretty",
"=",
"False",
")",
":",
"sort_keys",
"=",
"False",
"indent",
"=",
"None",
"separators",
"=",
"(",
"\",\"",
",",
"\":\"",
")",
"if",
"isinstance",
"(",
"pretty",
",",
"tuple",
")",
":",
"sort_keys",
",",
"indent",
",",
"separators",
"=",
"pretty",
"elif",
"pretty",
"is",
"True",
":",
"sort_keys",
"=",
"True",
"indent",
"=",
"2",
"separators",
"=",
"(",
"\", \"",
",",
"\": \"",
")",
"return",
"json",
".",
"dumps",
"(",
"obj",
",",
"sort_keys",
"=",
"sort_keys",
",",
"indent",
"=",
"indent",
",",
"separators",
"=",
"separators",
",",
"default",
"=",
"json_default",
")"
] | Converts an object to JSON, using the defaults specified in register_json_default.
:obj: the object to convert to JSON
:pretty: if True, extra whitespace is added to make the output easier to read | [
"Converts",
"an",
"object",
"to",
"JSON",
"using",
"the",
"defaults",
"specified",
"in",
"register_json_default",
"."
] | d9f24596841d0e69e8ac70a1d1a1deecea95e340 | https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/util/__init__.py#L78-L96 |
251,130 | treycucco/bidon | bidon/util/__init__.py | has_value | def has_value(obj, name):
"""A flexible method for getting values from objects by name.
returns:
- obj is None: (False, None)
- obj is dict: (name in obj, obj.get(name))
- obj hasattr(name): (True, getattr(obj, name))
- else: (False, None)
:obj: the object to pull values from
:name: the name to use when getting the value
"""
if obj is None:
return (False, None)
elif isinstance(obj, dict):
return (name in obj, obj.get(name))
elif hasattr(obj, name):
return (True, getattr(obj, name))
elif hasattr(obj, "__getitem__") and hasattr(obj, "__contains__") and name in obj:
return (True, obj[name])
else:
return (False, None) | python | def has_value(obj, name):
"""A flexible method for getting values from objects by name.
returns:
- obj is None: (False, None)
- obj is dict: (name in obj, obj.get(name))
- obj hasattr(name): (True, getattr(obj, name))
- else: (False, None)
:obj: the object to pull values from
:name: the name to use when getting the value
"""
if obj is None:
return (False, None)
elif isinstance(obj, dict):
return (name in obj, obj.get(name))
elif hasattr(obj, name):
return (True, getattr(obj, name))
elif hasattr(obj, "__getitem__") and hasattr(obj, "__contains__") and name in obj:
return (True, obj[name])
else:
return (False, None) | [
"def",
"has_value",
"(",
"obj",
",",
"name",
")",
":",
"if",
"obj",
"is",
"None",
":",
"return",
"(",
"False",
",",
"None",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"return",
"(",
"name",
"in",
"obj",
",",
"obj",
".",
"get",
"(",
"name",
")",
")",
"elif",
"hasattr",
"(",
"obj",
",",
"name",
")",
":",
"return",
"(",
"True",
",",
"getattr",
"(",
"obj",
",",
"name",
")",
")",
"elif",
"hasattr",
"(",
"obj",
",",
"\"__getitem__\"",
")",
"and",
"hasattr",
"(",
"obj",
",",
"\"__contains__\"",
")",
"and",
"name",
"in",
"obj",
":",
"return",
"(",
"True",
",",
"obj",
"[",
"name",
"]",
")",
"else",
":",
"return",
"(",
"False",
",",
"None",
")"
] | A flexible method for getting values from objects by name.
returns:
- obj is None: (False, None)
- obj is dict: (name in obj, obj.get(name))
- obj hasattr(name): (True, getattr(obj, name))
- else: (False, None)
:obj: the object to pull values from
:name: the name to use when getting the value | [
"A",
"flexible",
"method",
"for",
"getting",
"values",
"from",
"objects",
"by",
"name",
"."
] | d9f24596841d0e69e8ac70a1d1a1deecea95e340 | https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/util/__init__.py#L99-L120 |
251,131 | treycucco/bidon | bidon/util/__init__.py | set_value | def set_value(obj, name, value):
"""A flexible method for setting a value on an object.
If the object implements __setitem__ (such as a dict) performs obj[name] = value, else performs
setattr(obj, name, value).
:obj: the object to set the value on
:name: the name to assign the value to
:value: the value to assign
"""
if hasattr(obj, "__setitem__"):
obj[name] = value
else:
setattr(obj, name, value) | python | def set_value(obj, name, value):
"""A flexible method for setting a value on an object.
If the object implements __setitem__ (such as a dict) performs obj[name] = value, else performs
setattr(obj, name, value).
:obj: the object to set the value on
:name: the name to assign the value to
:value: the value to assign
"""
if hasattr(obj, "__setitem__"):
obj[name] = value
else:
setattr(obj, name, value) | [
"def",
"set_value",
"(",
"obj",
",",
"name",
",",
"value",
")",
":",
"if",
"hasattr",
"(",
"obj",
",",
"\"__setitem__\"",
")",
":",
"obj",
"[",
"name",
"]",
"=",
"value",
"else",
":",
"setattr",
"(",
"obj",
",",
"name",
",",
"value",
")"
] | A flexible method for setting a value on an object.
If the object implements __setitem__ (such as a dict) performs obj[name] = value, else performs
setattr(obj, name, value).
:obj: the object to set the value on
:name: the name to assign the value to
:value: the value to assign | [
"A",
"flexible",
"method",
"for",
"setting",
"a",
"value",
"on",
"an",
"object",
"."
] | d9f24596841d0e69e8ac70a1d1a1deecea95e340 | https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/util/__init__.py#L141-L154 |
251,132 | treycucco/bidon | bidon/util/__init__.py | with_defaults | def with_defaults(method, nparams, defaults=None):
"""Call method with nparams positional parameters, all non-specified defaults are passed None.
:method: the method to call
:nparams: the number of parameters the function expects
:defaults: the default values to pass in for the last len(defaults) params
"""
args = [None] * nparams if not defaults else defaults + max(nparams - len(defaults), 0) * [None]
return method(*args) | python | def with_defaults(method, nparams, defaults=None):
"""Call method with nparams positional parameters, all non-specified defaults are passed None.
:method: the method to call
:nparams: the number of parameters the function expects
:defaults: the default values to pass in for the last len(defaults) params
"""
args = [None] * nparams if not defaults else defaults + max(nparams - len(defaults), 0) * [None]
return method(*args) | [
"def",
"with_defaults",
"(",
"method",
",",
"nparams",
",",
"defaults",
"=",
"None",
")",
":",
"args",
"=",
"[",
"None",
"]",
"*",
"nparams",
"if",
"not",
"defaults",
"else",
"defaults",
"+",
"max",
"(",
"nparams",
"-",
"len",
"(",
"defaults",
")",
",",
"0",
")",
"*",
"[",
"None",
"]",
"return",
"method",
"(",
"*",
"args",
")"
] | Call method with nparams positional parameters, all non-specified defaults are passed None.
:method: the method to call
:nparams: the number of parameters the function expects
:defaults: the default values to pass in for the last len(defaults) params | [
"Call",
"method",
"with",
"nparams",
"positional",
"parameters",
"all",
"non",
"-",
"specified",
"defaults",
"are",
"passed",
"None",
"."
] | d9f24596841d0e69e8ac70a1d1a1deecea95e340 | https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/util/__init__.py#L157-L165 |
251,133 | treycucco/bidon | bidon/util/__init__.py | delegate | def delegate(from_owner, to_owner, methods):
"""Creates methods on from_owner to call through to methods on to_owner.
:from_owner: the object to delegate to
:to_owner: the owner on which to delegate from
:methods: a list of methods to delegate
"""
for method in methods:
_delegate(from_owner, to_owner, method) | python | def delegate(from_owner, to_owner, methods):
"""Creates methods on from_owner to call through to methods on to_owner.
:from_owner: the object to delegate to
:to_owner: the owner on which to delegate from
:methods: a list of methods to delegate
"""
for method in methods:
_delegate(from_owner, to_owner, method) | [
"def",
"delegate",
"(",
"from_owner",
",",
"to_owner",
",",
"methods",
")",
":",
"for",
"method",
"in",
"methods",
":",
"_delegate",
"(",
"from_owner",
",",
"to_owner",
",",
"method",
")"
] | Creates methods on from_owner to call through to methods on to_owner.
:from_owner: the object to delegate to
:to_owner: the owner on which to delegate from
:methods: a list of methods to delegate | [
"Creates",
"methods",
"on",
"from_owner",
"to",
"call",
"through",
"to",
"methods",
"on",
"to_owner",
"."
] | d9f24596841d0e69e8ac70a1d1a1deecea95e340 | https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/util/__init__.py#L177-L185 |
251,134 | treycucco/bidon | bidon/util/__init__.py | _delegate | def _delegate(from_owner, to_owner, method):
"""Creates a method on from_owner to calls through to the same method on to_owner.
:from_owner: the object to delegate to
:to_owner: the owner on which to delegate from
:methods: the method to delegate
"""
dgate = lambda self, *args, **kwargs: getattr(getattr(self, to_owner), method)(*args, **kwargs)
dgate.__name__ = method
dgate.__doc__ = "Delegates to {0}.{1}: {2}".format(to_owner, method, method.__doc__)
setattr(from_owner, method, dgate) | python | def _delegate(from_owner, to_owner, method):
"""Creates a method on from_owner to calls through to the same method on to_owner.
:from_owner: the object to delegate to
:to_owner: the owner on which to delegate from
:methods: the method to delegate
"""
dgate = lambda self, *args, **kwargs: getattr(getattr(self, to_owner), method)(*args, **kwargs)
dgate.__name__ = method
dgate.__doc__ = "Delegates to {0}.{1}: {2}".format(to_owner, method, method.__doc__)
setattr(from_owner, method, dgate) | [
"def",
"_delegate",
"(",
"from_owner",
",",
"to_owner",
",",
"method",
")",
":",
"dgate",
"=",
"lambda",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
":",
"getattr",
"(",
"getattr",
"(",
"self",
",",
"to_owner",
")",
",",
"method",
")",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"dgate",
".",
"__name__",
"=",
"method",
"dgate",
".",
"__doc__",
"=",
"\"Delegates to {0}.{1}: {2}\"",
".",
"format",
"(",
"to_owner",
",",
"method",
",",
"method",
".",
"__doc__",
")",
"setattr",
"(",
"from_owner",
",",
"method",
",",
"dgate",
")"
] | Creates a method on from_owner to calls through to the same method on to_owner.
:from_owner: the object to delegate to
:to_owner: the owner on which to delegate from
:methods: the method to delegate | [
"Creates",
"a",
"method",
"on",
"from_owner",
"to",
"calls",
"through",
"to",
"the",
"same",
"method",
"on",
"to_owner",
"."
] | d9f24596841d0e69e8ac70a1d1a1deecea95e340 | https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/util/__init__.py#L188-L198 |
251,135 | opinkerfi/nago | nago/extensions/__init__.py | get_methods | def get_methods(extension_name):
""" Return all methods in extension that have nago_access set """
extension = get_extension(extension_name)
methods = {}
for name, i in inspect.getmembers(extension):
if hasattr(i, 'nago_access'):
api_name = i.nago_name
methods[api_name] = i
return methods | python | def get_methods(extension_name):
""" Return all methods in extension that have nago_access set """
extension = get_extension(extension_name)
methods = {}
for name, i in inspect.getmembers(extension):
if hasattr(i, 'nago_access'):
api_name = i.nago_name
methods[api_name] = i
return methods | [
"def",
"get_methods",
"(",
"extension_name",
")",
":",
"extension",
"=",
"get_extension",
"(",
"extension_name",
")",
"methods",
"=",
"{",
"}",
"for",
"name",
",",
"i",
"in",
"inspect",
".",
"getmembers",
"(",
"extension",
")",
":",
"if",
"hasattr",
"(",
"i",
",",
"'nago_access'",
")",
":",
"api_name",
"=",
"i",
".",
"nago_name",
"methods",
"[",
"api_name",
"]",
"=",
"i",
"return",
"methods"
] | Return all methods in extension that have nago_access set | [
"Return",
"all",
"methods",
"in",
"extension",
"that",
"have",
"nago_access",
"set"
] | 85e1bdd1de0122f56868a483e7599e1b36a439b0 | https://github.com/opinkerfi/nago/blob/85e1bdd1de0122f56868a483e7599e1b36a439b0/nago/extensions/__init__.py#L51-L59 |
251,136 | bramwelt/field | field/__init__.py | column_converter | def column_converter(string):
"""
Converts column arguments to integers.
- Accepts columns in form of INT, or the range INT-INT.
- Returns a list of one or more integers.
"""
column = string.strip(',')
if '-' in column:
column_range = map(int, column.split('-'))
# For decreasing ranges, increment the larger value, reverse the
# passing to range (so it will accept the input), and finally
# reverse the output ([::-1])
if column_range[0] > column_range[1]:
column_range[0] += 1
return [i for i in range(*column_range[::-1])][::-1]
# For normal ranges, increment the larger value.
column_range[1] += 1
return [i for i in range(*column_range)]
if ',' in column:
columns = column.split(',')
return map(int, columns)
return [int(column)] | python | def column_converter(string):
"""
Converts column arguments to integers.
- Accepts columns in form of INT, or the range INT-INT.
- Returns a list of one or more integers.
"""
column = string.strip(',')
if '-' in column:
column_range = map(int, column.split('-'))
# For decreasing ranges, increment the larger value, reverse the
# passing to range (so it will accept the input), and finally
# reverse the output ([::-1])
if column_range[0] > column_range[1]:
column_range[0] += 1
return [i for i in range(*column_range[::-1])][::-1]
# For normal ranges, increment the larger value.
column_range[1] += 1
return [i for i in range(*column_range)]
if ',' in column:
columns = column.split(',')
return map(int, columns)
return [int(column)] | [
"def",
"column_converter",
"(",
"string",
")",
":",
"column",
"=",
"string",
".",
"strip",
"(",
"','",
")",
"if",
"'-'",
"in",
"column",
":",
"column_range",
"=",
"map",
"(",
"int",
",",
"column",
".",
"split",
"(",
"'-'",
")",
")",
"# For decreasing ranges, increment the larger value, reverse the",
"# passing to range (so it will accept the input), and finally",
"# reverse the output ([::-1])",
"if",
"column_range",
"[",
"0",
"]",
">",
"column_range",
"[",
"1",
"]",
":",
"column_range",
"[",
"0",
"]",
"+=",
"1",
"return",
"[",
"i",
"for",
"i",
"in",
"range",
"(",
"*",
"column_range",
"[",
":",
":",
"-",
"1",
"]",
")",
"]",
"[",
":",
":",
"-",
"1",
"]",
"# For normal ranges, increment the larger value.",
"column_range",
"[",
"1",
"]",
"+=",
"1",
"return",
"[",
"i",
"for",
"i",
"in",
"range",
"(",
"*",
"column_range",
")",
"]",
"if",
"','",
"in",
"column",
":",
"columns",
"=",
"column",
".",
"split",
"(",
"','",
")",
"return",
"map",
"(",
"int",
",",
"columns",
")",
"return",
"[",
"int",
"(",
"column",
")",
"]"
] | Converts column arguments to integers.
- Accepts columns in form of INT, or the range INT-INT.
- Returns a list of one or more integers. | [
"Converts",
"column",
"arguments",
"to",
"integers",
"."
] | 05f38170d080fb48e76aa984bf4aa6b3d05ea6dc | https://github.com/bramwelt/field/blob/05f38170d080fb48e76aa984bf4aa6b3d05ea6dc/field/__init__.py#L49-L71 |
251,137 | bramwelt/field | field/__init__.py | check_columns | def check_columns(column, line, columns):
"""
Make sure the column is the minimum between the largest column asked
for and the max column available in the line.
"""
return column <= min(len(line), max(columns)) | python | def check_columns(column, line, columns):
"""
Make sure the column is the minimum between the largest column asked
for and the max column available in the line.
"""
return column <= min(len(line), max(columns)) | [
"def",
"check_columns",
"(",
"column",
",",
"line",
",",
"columns",
")",
":",
"return",
"column",
"<=",
"min",
"(",
"len",
"(",
"line",
")",
",",
"max",
"(",
"columns",
")",
")"
] | Make sure the column is the minimum between the largest column asked
for and the max column available in the line. | [
"Make",
"sure",
"the",
"column",
"is",
"the",
"minimum",
"between",
"the",
"largest",
"column",
"asked",
"for",
"and",
"the",
"max",
"column",
"available",
"in",
"the",
"line",
"."
] | 05f38170d080fb48e76aa984bf4aa6b3d05ea6dc | https://github.com/bramwelt/field/blob/05f38170d080fb48e76aa984bf4aa6b3d05ea6dc/field/__init__.py#L92-L97 |
251,138 | synw/goerr | goerr/__init__.py | Err.panic | def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) | python | def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) | [
"def",
"panic",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"_err",
"(",
"\"fatal\"",
",",
"*",
"args",
")",
"if",
"self",
".",
"test_errs_mode",
"is",
"False",
":",
"# pragma: no cover",
"sys",
".",
"exit",
"(",
"1",
")"
] | Creates a fatal error and exit | [
"Creates",
"a",
"fatal",
"error",
"and",
"exit"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L59-L65 |
251,139 | synw/goerr | goerr/__init__.py | Err.warning | def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error | python | def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error | [
"def",
"warning",
"(",
"self",
",",
"*",
"args",
")",
"->",
"\"Err\"",
":",
"error",
"=",
"self",
".",
"_create_err",
"(",
"\"warning\"",
",",
"*",
"args",
")",
"print",
"(",
"self",
".",
"_errmsg",
"(",
"error",
")",
")",
"return",
"error"
] | Creates a warning message | [
"Creates",
"a",
"warning",
"message"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L67-L73 |
251,140 | synw/goerr | goerr/__init__.py | Err.info | def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error | python | def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error | [
"def",
"info",
"(",
"self",
",",
"*",
"args",
")",
"->",
"\"Err\"",
":",
"error",
"=",
"self",
".",
"_create_err",
"(",
"\"info\"",
",",
"*",
"args",
")",
"print",
"(",
"self",
".",
"_errmsg",
"(",
"error",
")",
")",
"return",
"error"
] | Creates an info message | [
"Creates",
"an",
"info",
"message"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L75-L81 |
251,141 | synw/goerr | goerr/__init__.py | Err.debug | def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error | python | def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error | [
"def",
"debug",
"(",
"self",
",",
"*",
"args",
")",
"->",
"\"Err\"",
":",
"error",
"=",
"self",
".",
"_create_err",
"(",
"\"debug\"",
",",
"*",
"args",
")",
"print",
"(",
"self",
".",
"_errmsg",
"(",
"error",
")",
")",
"return",
"error"
] | Creates a debug message | [
"Creates",
"a",
"debug",
"message"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L83-L89 |
251,142 | synw/goerr | goerr/__init__.py | Err._create_err | def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error | python | def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error | [
"def",
"_create_err",
"(",
"self",
",",
"errclass",
":",
"str",
",",
"*",
"args",
")",
"->",
"\"Err\"",
":",
"error",
"=",
"self",
".",
"_new_err",
"(",
"errclass",
",",
"*",
"args",
")",
"self",
".",
"_add",
"(",
"error",
")",
"return",
"error"
] | Create an error | [
"Create",
"an",
"error"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L91-L97 |
251,143 | synw/goerr | goerr/__init__.py | Err._err | def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error | python | def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error | [
"def",
"_err",
"(",
"self",
",",
"errclass",
":",
"str",
"=",
"\"error\"",
",",
"*",
"args",
")",
"->",
"\"Err\"",
":",
"error",
"=",
"self",
".",
"_new_err",
"(",
"errclass",
",",
"*",
"args",
")",
"if",
"self",
".",
"log_errs",
"is",
"True",
":",
"sep",
"=",
"\" \"",
"if",
"self",
".",
"log_format",
"==",
"\"csv\"",
":",
"sep",
"=",
"\",\"",
"msg",
"=",
"str",
"(",
"datetime",
".",
"now",
"(",
")",
")",
"+",
"sep",
"+",
"self",
".",
"_errmsg",
"(",
"error",
",",
"msgformat",
"=",
"self",
".",
"log_format",
")",
"self",
".",
"logger",
".",
"error",
"(",
"msg",
")",
"print",
"(",
"self",
".",
"_errmsg",
"(",
"error",
")",
")",
"self",
".",
"_add",
"(",
"error",
")",
"return",
"error"
] | Creates an error | [
"Creates",
"an",
"error"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L99-L113 |
251,144 | synw/goerr | goerr/__init__.py | Err._headline | def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg | python | def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg | [
"def",
"_headline",
"(",
"self",
",",
"error",
",",
"i",
":",
"int",
")",
"->",
"str",
":",
"msgs",
"=",
"Msg",
"(",
")",
"# get the error title",
"if",
"error",
".",
"errclass",
"==",
"\"fatal\"",
":",
"msg",
"=",
"msgs",
".",
"fatal",
"(",
"i",
")",
"elif",
"error",
".",
"errclass",
"==",
"\"warning\"",
":",
"msg",
"=",
"msgs",
".",
"warning",
"(",
"i",
")",
"elif",
"error",
".",
"errclass",
"==",
"\"info\"",
":",
"msg",
"=",
"msgs",
".",
"info",
"(",
"i",
")",
"elif",
"error",
".",
"errclass",
"==",
"\"debug\"",
":",
"msg",
"=",
"msgs",
".",
"debug",
"(",
"i",
")",
"elif",
"error",
".",
"errclass",
"==",
"\"via\"",
":",
"msg",
"=",
"msgs",
".",
"via",
"(",
"i",
")",
"else",
":",
"msg",
"=",
"msgs",
".",
"error",
"(",
"i",
")",
"# function name",
"if",
"error",
".",
"function",
"is",
"not",
"None",
":",
"msg",
"+=",
"\" from \"",
"+",
"colors",
".",
"bold",
"(",
"error",
".",
"function",
")",
"if",
"error",
".",
"caller",
"is",
"not",
"None",
":",
"msg",
"+=",
"\" called from \"",
"+",
"colors",
".",
"bold",
"(",
"error",
".",
"caller",
")",
"if",
"error",
".",
"caller_msg",
"is",
"not",
"None",
":",
"msg",
"+=",
"\"\\n\"",
"+",
"error",
".",
"caller_msg",
"if",
"error",
".",
"function",
"is",
"not",
"None",
"and",
"error",
".",
"msg",
"is",
"not",
"None",
":",
"msg",
"+=",
"\": \"",
"else",
":",
"msg",
"=",
"msg",
"+",
"\" \"",
"if",
"error",
".",
"errtype",
"is",
"not",
"None",
":",
"msg",
"+=",
"error",
".",
"errtype",
"+",
"\" : \"",
"if",
"error",
".",
"msg",
"is",
"not",
"None",
":",
"msg",
"+=",
"error",
".",
"msg",
"return",
"msg"
] | Format the error message's headline | [
"Format",
"the",
"error",
"message",
"s",
"headline"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L190-L223 |
251,145 | synw/goerr | goerr/__init__.py | Err._errmsg | def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg | python | def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg | [
"def",
"_errmsg",
"(",
"self",
",",
"error",
":",
"\"Err\"",
",",
"tb",
":",
"bool",
"=",
"False",
",",
"i",
":",
"int",
"=",
"None",
",",
"msgformat",
":",
"str",
"=",
"\"terminal\"",
")",
"->",
"str",
":",
"if",
"msgformat",
"==",
"\"terminal\"",
":",
"msg",
"=",
"self",
".",
"_headline",
"(",
"error",
",",
"i",
")",
"if",
"error",
".",
"ex",
"is",
"not",
"None",
":",
"msg",
"+=",
"\"\\n\"",
"+",
"\"line \"",
"+",
"colors",
".",
"bold",
"(",
"str",
"(",
"error",
".",
"line",
")",
")",
"msg",
"+=",
"\": \"",
"+",
"colors",
".",
"yellow",
"(",
"error",
".",
"code",
")",
"msg",
"+=",
"\"\\n\"",
"+",
"str",
"(",
"error",
".",
"file",
")",
"if",
"self",
".",
"errs_traceback",
"is",
"True",
"or",
"tb",
"is",
"True",
":",
"if",
"error",
".",
"tb",
"is",
"not",
"None",
":",
"msg",
"+=",
"\"\\n\"",
"+",
"error",
".",
"tb",
"elif",
"msgformat",
"==",
"\"csv\"",
":",
"sep",
"=",
"\",\"",
"msg",
"=",
"error",
".",
"msg",
"+",
"sep",
"msg",
"+=",
"str",
"(",
"error",
".",
"line",
")",
"+",
"sep",
"+",
"error",
".",
"code",
"+",
"sep",
"msg",
"+=",
"str",
"(",
"error",
".",
"file",
")",
"elif",
"msgformat",
"==",
"\"text\"",
":",
"sep",
"=",
"\",\"",
"msg",
"=",
"error",
".",
"msg",
"if",
"error",
".",
"ex",
"is",
"not",
"None",
":",
"msg",
"+=",
"sep",
"+",
"str",
"(",
"error",
".",
"line",
")",
"+",
"sep",
"+",
"error",
".",
"code",
"+",
"sep",
"msg",
"+=",
"str",
"(",
"error",
".",
"file",
")",
"+",
"sep",
"if",
"self",
".",
"errs_traceback",
"is",
"True",
"or",
"tb",
"is",
"True",
":",
"if",
"error",
".",
"tb",
"is",
"not",
"None",
":",
"msg",
"+=",
"sep",
"+",
"error",
".",
"tb",
"elif",
"msgformat",
"==",
"\"dict\"",
":",
"msg",
"=",
"{",
"\"date\"",
":",
"datetime",
".",
"now",
"(",
")",
"}",
"if",
"error",
".",
"ex",
"is",
"not",
"None",
":",
"msg",
"[",
"\"msg\"",
"]",
"=",
"error",
".",
"msg",
"msg",
"[",
"\"line\"",
"]",
"=",
"error",
".",
"line",
"msg",
"[",
"\"code\"",
"]",
"=",
"error",
".",
"code",
"msg",
"[",
"\"file\"",
"]",
"=",
"error",
".",
"file",
"if",
"self",
".",
"errs_traceback",
"is",
"True",
"or",
"tb",
"is",
"True",
":",
"if",
"error",
".",
"tb",
"is",
"not",
"None",
":",
"msg",
"[",
"\"traceback\"",
"]",
"=",
"error",
".",
"tb",
"return",
"msg"
] | Get the error message | [
"Get",
"the",
"error",
"message"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L225-L263 |
251,146 | synw/goerr | goerr/__init__.py | Err._print_errs | def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1 | python | def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1 | [
"def",
"_print_errs",
"(",
"self",
")",
":",
"i",
"=",
"0",
"for",
"error",
"in",
"self",
".",
"errors",
":",
"print",
"(",
"self",
".",
"_errmsg",
"(",
"error",
",",
"tb",
"=",
"True",
",",
"i",
"=",
"i",
")",
")",
"# for spacing",
"if",
"self",
".",
"errs_traceback",
"is",
"False",
":",
"print",
"(",
")",
"i",
"+=",
"1"
] | Prints the errors trace with tracebacks | [
"Prints",
"the",
"errors",
"trace",
"with",
"tracebacks"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L271-L281 |
251,147 | synw/goerr | goerr/__init__.py | Err._add | def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error) | python | def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error) | [
"def",
"_add",
"(",
"self",
",",
"error",
":",
"\"Err\"",
")",
":",
"if",
"self",
".",
"trace_errs",
"is",
"True",
":",
"self",
".",
"errors",
".",
"append",
"(",
"error",
")"
] | Adds an error to the trace if required | [
"Adds",
"an",
"error",
"to",
"the",
"trace",
"if",
"required"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L283-L288 |
251,148 | synw/goerr | goerr/__init__.py | Err._get_caller | def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True | python | def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True | [
"def",
"_get_caller",
"(",
"self",
",",
"callers",
":",
"List",
"[",
"str",
"]",
",",
"function",
":",
"str",
")",
"->",
"str",
":",
"is_next",
"=",
"False",
"for",
"c",
"in",
"callers",
":",
"if",
"is_next",
"is",
"True",
":",
"return",
"c",
"if",
"function",
"==",
"c",
":",
"is_next",
"=",
"True"
] | Get the caller function from the provided function | [
"Get",
"the",
"caller",
"function",
"from",
"the",
"provided",
"function"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L290-L299 |
251,149 | synw/goerr | goerr/__init__.py | Err._get_args | def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg | python | def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg | [
"def",
"_get_args",
"(",
"self",
",",
"*",
"args",
")",
"->",
"(",
"Exception",
",",
"str",
")",
":",
"ex",
"=",
"None",
"msg",
"=",
"None",
"for",
"arg",
"in",
"args",
":",
"if",
"isinstance",
"(",
"arg",
",",
"str",
")",
":",
"msg",
"=",
"arg",
"elif",
"isinstance",
"(",
"arg",
",",
"Exception",
")",
":",
"ex",
"=",
"arg",
"return",
"ex",
",",
"msg"
] | Returns exception and message from the provided arguments | [
"Returns",
"exception",
"and",
"message",
"from",
"the",
"provided",
"arguments"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L301-L312 |
251,150 | synw/goerr | goerr/__init__.py | Trace.trace | def trace(self):
"""
Print the errors trace if there are some errors
"""
if len(self.errors) > 0:
numerrs = len(self.errors)
print("========= Trace (" + str(numerrs) + ") =========")
self._print_errs()
self.errors = [] | python | def trace(self):
"""
Print the errors trace if there are some errors
"""
if len(self.errors) > 0:
numerrs = len(self.errors)
print("========= Trace (" + str(numerrs) + ") =========")
self._print_errs()
self.errors = [] | [
"def",
"trace",
"(",
"self",
")",
":",
"if",
"len",
"(",
"self",
".",
"errors",
")",
">",
"0",
":",
"numerrs",
"=",
"len",
"(",
"self",
".",
"errors",
")",
"print",
"(",
"\"========= Trace (\"",
"+",
"str",
"(",
"numerrs",
")",
"+",
"\") =========\"",
")",
"self",
".",
"_print_errs",
"(",
")",
"self",
".",
"errors",
"=",
"[",
"]"
] | Print the errors trace if there are some errors | [
"Print",
"the",
"errors",
"trace",
"if",
"there",
"are",
"some",
"errors"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L331-L339 |
251,151 | synw/goerr | goerr/__init__.py | Trace.via | def via(self, *args):
"""
Creates an empty error to record in the stack
trace
"""
error = None
if len(self.errors) > 0:
error = self._err("via", *args)
return error | python | def via(self, *args):
"""
Creates an empty error to record in the stack
trace
"""
error = None
if len(self.errors) > 0:
error = self._err("via", *args)
return error | [
"def",
"via",
"(",
"self",
",",
"*",
"args",
")",
":",
"error",
"=",
"None",
"if",
"len",
"(",
"self",
".",
"errors",
")",
">",
"0",
":",
"error",
"=",
"self",
".",
"_err",
"(",
"\"via\"",
",",
"*",
"args",
")",
"return",
"error"
] | Creates an empty error to record in the stack
trace | [
"Creates",
"an",
"empty",
"error",
"to",
"record",
"in",
"the",
"stack",
"trace"
] | 08b3809d6715bffe26899a769d96fa5de8573faf | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L341-L349 |
251,152 | gdelnegro/django-translation-server | translation_server/management/commands/make_translation_migrations.py | Command.__create_translation_migration | def __create_translation_migration(self):
""" Create an empty migration """
migrations_dir = os.path.join(self.BASE_DIR, self.app_name + "/migrations/")
dependency_migration = os.path.basename(max(glob.iglob(migrations_dir + '*.py'), key=os.path.getctime)).replace(
".py", "")
"""
If there's no migration before this, which is unlikely to happen, then create a migration without dependencies
"""
if "__init__" in dependency_migration:
dependency_migration = ""
""" Make an empty migration """
call_command('makemigrations', self.app_name, "--empty")
""" Get last migration name and edit it, adding the new code """
last_migration_file = max(glob.iglob(migrations_dir + '*.py'), key=os.path.getctime)
new_lines = self.__create_translation_lines()
translation_type_lines = self.__create_translation_type_lines()
if len(dependency_migration) > 0:
dependency_string = "('%(app_name)s', '%(dependency)s')," % {'app_name': self.app_name,
'dependency': dependency_migration}
else:
dependency_string = ""
try:
if len(new_lines) > 0:
with open(last_migration_file, 'w+') as file:
file.write(self.migration_string % {
'django_version': django.get_version(),
'timestamp': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'translation_strings': "\n".join(new_lines),
'tags_to_remove': ", ".join('"{0}"'.format(tag) for tag in self.updated_translations),
'dependency_string': dependency_string,
'app_name': 'translation_server',
'translation_type_strings': "\n".join(translation_type_lines),
'translation_types_to_remove': ", ".join(
'"{0}"'.format(tag) for tag in self.updated_translation_types),
})
else:
os.remove(last_migration_file)
self.stdout.write(self.style.NOTICE("There was no new translations to make migrations"))
return
except Exception as error:
os.remove(last_migration_file)
raise error
else:
self.__update_translation()
self.stdout.write(self.style.SUCCESS("Translation migration file create successfully"))
return | python | def __create_translation_migration(self):
""" Create an empty migration """
migrations_dir = os.path.join(self.BASE_DIR, self.app_name + "/migrations/")
dependency_migration = os.path.basename(max(glob.iglob(migrations_dir + '*.py'), key=os.path.getctime)).replace(
".py", "")
"""
If there's no migration before this, which is unlikely to happen, then create a migration without dependencies
"""
if "__init__" in dependency_migration:
dependency_migration = ""
""" Make an empty migration """
call_command('makemigrations', self.app_name, "--empty")
""" Get last migration name and edit it, adding the new code """
last_migration_file = max(glob.iglob(migrations_dir + '*.py'), key=os.path.getctime)
new_lines = self.__create_translation_lines()
translation_type_lines = self.__create_translation_type_lines()
if len(dependency_migration) > 0:
dependency_string = "('%(app_name)s', '%(dependency)s')," % {'app_name': self.app_name,
'dependency': dependency_migration}
else:
dependency_string = ""
try:
if len(new_lines) > 0:
with open(last_migration_file, 'w+') as file:
file.write(self.migration_string % {
'django_version': django.get_version(),
'timestamp': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'translation_strings': "\n".join(new_lines),
'tags_to_remove': ", ".join('"{0}"'.format(tag) for tag in self.updated_translations),
'dependency_string': dependency_string,
'app_name': 'translation_server',
'translation_type_strings': "\n".join(translation_type_lines),
'translation_types_to_remove': ", ".join(
'"{0}"'.format(tag) for tag in self.updated_translation_types),
})
else:
os.remove(last_migration_file)
self.stdout.write(self.style.NOTICE("There was no new translations to make migrations"))
return
except Exception as error:
os.remove(last_migration_file)
raise error
else:
self.__update_translation()
self.stdout.write(self.style.SUCCESS("Translation migration file create successfully"))
return | [
"def",
"__create_translation_migration",
"(",
"self",
")",
":",
"migrations_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"BASE_DIR",
",",
"self",
".",
"app_name",
"+",
"\"/migrations/\"",
")",
"dependency_migration",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"max",
"(",
"glob",
".",
"iglob",
"(",
"migrations_dir",
"+",
"'*.py'",
")",
",",
"key",
"=",
"os",
".",
"path",
".",
"getctime",
")",
")",
".",
"replace",
"(",
"\".py\"",
",",
"\"\"",
")",
"\"\"\"\n If there's no migration before this, which is unlikely to happen, then create a migration without dependencies\n \"\"\"",
"if",
"\"__init__\"",
"in",
"dependency_migration",
":",
"dependency_migration",
"=",
"\"\"",
"\"\"\" Make an empty migration \"\"\"",
"call_command",
"(",
"'makemigrations'",
",",
"self",
".",
"app_name",
",",
"\"--empty\"",
")",
"\"\"\" Get last migration name and edit it, adding the new code \"\"\"",
"last_migration_file",
"=",
"max",
"(",
"glob",
".",
"iglob",
"(",
"migrations_dir",
"+",
"'*.py'",
")",
",",
"key",
"=",
"os",
".",
"path",
".",
"getctime",
")",
"new_lines",
"=",
"self",
".",
"__create_translation_lines",
"(",
")",
"translation_type_lines",
"=",
"self",
".",
"__create_translation_type_lines",
"(",
")",
"if",
"len",
"(",
"dependency_migration",
")",
">",
"0",
":",
"dependency_string",
"=",
"\"('%(app_name)s', '%(dependency)s'),\"",
"%",
"{",
"'app_name'",
":",
"self",
".",
"app_name",
",",
"'dependency'",
":",
"dependency_migration",
"}",
"else",
":",
"dependency_string",
"=",
"\"\"",
"try",
":",
"if",
"len",
"(",
"new_lines",
")",
">",
"0",
":",
"with",
"open",
"(",
"last_migration_file",
",",
"'w+'",
")",
"as",
"file",
":",
"file",
".",
"write",
"(",
"self",
".",
"migration_string",
"%",
"{",
"'django_version'",
":",
"django",
".",
"get_version",
"(",
")",
",",
"'timestamp'",
":",
"datetime",
".",
"now",
"(",
")",
".",
"strftime",
"(",
"'%Y-%m-%d %H:%M:%S'",
")",
",",
"'translation_strings'",
":",
"\"\\n\"",
".",
"join",
"(",
"new_lines",
")",
",",
"'tags_to_remove'",
":",
"\", \"",
".",
"join",
"(",
"'\"{0}\"'",
".",
"format",
"(",
"tag",
")",
"for",
"tag",
"in",
"self",
".",
"updated_translations",
")",
",",
"'dependency_string'",
":",
"dependency_string",
",",
"'app_name'",
":",
"'translation_server'",
",",
"'translation_type_strings'",
":",
"\"\\n\"",
".",
"join",
"(",
"translation_type_lines",
")",
",",
"'translation_types_to_remove'",
":",
"\", \"",
".",
"join",
"(",
"'\"{0}\"'",
".",
"format",
"(",
"tag",
")",
"for",
"tag",
"in",
"self",
".",
"updated_translation_types",
")",
",",
"}",
")",
"else",
":",
"os",
".",
"remove",
"(",
"last_migration_file",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"self",
".",
"style",
".",
"NOTICE",
"(",
"\"There was no new translations to make migrations\"",
")",
")",
"return",
"except",
"Exception",
"as",
"error",
":",
"os",
".",
"remove",
"(",
"last_migration_file",
")",
"raise",
"error",
"else",
":",
"self",
".",
"__update_translation",
"(",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"self",
".",
"style",
".",
"SUCCESS",
"(",
"\"Translation migration file create successfully\"",
")",
")",
"return"
] | Create an empty migration | [
"Create",
"an",
"empty",
"migration"
] | 0f9de98d1cb07a42e1d323e20a384074ad28da57 | https://github.com/gdelnegro/django-translation-server/blob/0f9de98d1cb07a42e1d323e20a384074ad28da57/translation_server/management/commands/make_translation_migrations.py#L148-L193 |
251,153 | rinocloud/rinocloud-python | rinocloud/config.py | set_local_path | def set_local_path(directory, create_dir=False):
"""
sets path for local saving of information
if create is true we will create the folder even if it doesnt exist
"""
if not os.path.exists(directory) and create_dir is True:
os.makedirs(directory)
if not os.path.exists(directory) and create_dir is False:
raise AttributeError("Path '%s' does not exist, to make it pass create_dir=True to rinocloud.set_local_path" % directory)
if os.path.isdir(directory):
rinocloud.path = directory
return directory | python | def set_local_path(directory, create_dir=False):
"""
sets path for local saving of information
if create is true we will create the folder even if it doesnt exist
"""
if not os.path.exists(directory) and create_dir is True:
os.makedirs(directory)
if not os.path.exists(directory) and create_dir is False:
raise AttributeError("Path '%s' does not exist, to make it pass create_dir=True to rinocloud.set_local_path" % directory)
if os.path.isdir(directory):
rinocloud.path = directory
return directory | [
"def",
"set_local_path",
"(",
"directory",
",",
"create_dir",
"=",
"False",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"directory",
")",
"and",
"create_dir",
"is",
"True",
":",
"os",
".",
"makedirs",
"(",
"directory",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"directory",
")",
"and",
"create_dir",
"is",
"False",
":",
"raise",
"AttributeError",
"(",
"\"Path '%s' does not exist, to make it pass create_dir=True to rinocloud.set_local_path\"",
"%",
"directory",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"directory",
")",
":",
"rinocloud",
".",
"path",
"=",
"directory",
"return",
"directory"
] | sets path for local saving of information
if create is true we will create the folder even if it doesnt exist | [
"sets",
"path",
"for",
"local",
"saving",
"of",
"information",
"if",
"create",
"is",
"true",
"we",
"will",
"create",
"the",
"folder",
"even",
"if",
"it",
"doesnt",
"exist"
] | 7c4bf994a518f961cffedb7260fc1e4fa1838b38 | https://github.com/rinocloud/rinocloud-python/blob/7c4bf994a518f961cffedb7260fc1e4fa1838b38/rinocloud/config.py#L6-L20 |
251,154 | kaniblu/klogger | klogger.py | task | def task(name=None, t=INFO, *args, **kwargs):
"""
This decorator modifies current function such that its start, end, and
duration is logged in console. If the task name is not given, it will
attempt to infer it from the function name. Optionally, the decorator
can log information into files.
"""
def c_run(name, f, t, args, kwargs):
def run(*largs, **lkwargs):
thread = __get_current_thread()
old_name = __THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY]
__THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] = name
r = log(name, f, t, largs, lkwargs, *args, **kwargs)
__THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] = old_name
return r
return run
if callable(name):
f = name
name = f.__name__
return c_run(name, f, t, args, kwargs)
if name == None:
def wrapped(f):
name = f.__name__
return c_run(name, f, t, args, kwargs)
return wrapped
else:
return lambda f: c_run(name, f, t, args, kwargs) | python | def task(name=None, t=INFO, *args, **kwargs):
"""
This decorator modifies current function such that its start, end, and
duration is logged in console. If the task name is not given, it will
attempt to infer it from the function name. Optionally, the decorator
can log information into files.
"""
def c_run(name, f, t, args, kwargs):
def run(*largs, **lkwargs):
thread = __get_current_thread()
old_name = __THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY]
__THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] = name
r = log(name, f, t, largs, lkwargs, *args, **kwargs)
__THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] = old_name
return r
return run
if callable(name):
f = name
name = f.__name__
return c_run(name, f, t, args, kwargs)
if name == None:
def wrapped(f):
name = f.__name__
return c_run(name, f, t, args, kwargs)
return wrapped
else:
return lambda f: c_run(name, f, t, args, kwargs) | [
"def",
"task",
"(",
"name",
"=",
"None",
",",
"t",
"=",
"INFO",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"c_run",
"(",
"name",
",",
"f",
",",
"t",
",",
"args",
",",
"kwargs",
")",
":",
"def",
"run",
"(",
"*",
"largs",
",",
"*",
"*",
"lkwargs",
")",
":",
"thread",
"=",
"__get_current_thread",
"(",
")",
"old_name",
"=",
"__THREAD_PARAMS",
"[",
"thread",
"]",
"[",
"__THREAD_PARAMS_FNAME_KEY",
"]",
"__THREAD_PARAMS",
"[",
"thread",
"]",
"[",
"__THREAD_PARAMS_FNAME_KEY",
"]",
"=",
"name",
"r",
"=",
"log",
"(",
"name",
",",
"f",
",",
"t",
",",
"largs",
",",
"lkwargs",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"__THREAD_PARAMS",
"[",
"thread",
"]",
"[",
"__THREAD_PARAMS_FNAME_KEY",
"]",
"=",
"old_name",
"return",
"r",
"return",
"run",
"if",
"callable",
"(",
"name",
")",
":",
"f",
"=",
"name",
"name",
"=",
"f",
".",
"__name__",
"return",
"c_run",
"(",
"name",
",",
"f",
",",
"t",
",",
"args",
",",
"kwargs",
")",
"if",
"name",
"==",
"None",
":",
"def",
"wrapped",
"(",
"f",
")",
":",
"name",
"=",
"f",
".",
"__name__",
"return",
"c_run",
"(",
"name",
",",
"f",
",",
"t",
",",
"args",
",",
"kwargs",
")",
"return",
"wrapped",
"else",
":",
"return",
"lambda",
"f",
":",
"c_run",
"(",
"name",
",",
"f",
",",
"t",
",",
"args",
",",
"kwargs",
")"
] | This decorator modifies current function such that its start, end, and
duration is logged in console. If the task name is not given, it will
attempt to infer it from the function name. Optionally, the decorator
can log information into files. | [
"This",
"decorator",
"modifies",
"current",
"function",
"such",
"that",
"its",
"start",
"end",
"and",
"duration",
"is",
"logged",
"in",
"console",
".",
"If",
"the",
"task",
"name",
"is",
"not",
"given",
"it",
"will",
"attempt",
"to",
"infer",
"it",
"from",
"the",
"function",
"name",
".",
"Optionally",
"the",
"decorator",
"can",
"log",
"information",
"into",
"files",
"."
] | e23075134f2a3aa3e2a044f68eeacedf686969d7 | https://github.com/kaniblu/klogger/blob/e23075134f2a3aa3e2a044f68eeacedf686969d7/klogger.py#L261-L295 |
251,155 | kaniblu/klogger | klogger.py | progress_task | def progress_task(name=None, t=INFO, max_value=100, *args, **kwargs):
"""
This decorator extends the basic @task decorator by allowing users to
display some form of progress on the console. The module can receive
an increment in the progress through "tick_progress".
"""
return task(name=name, t=t, init_progress=True, max_value=max_value,
*args, **kwargs) | python | def progress_task(name=None, t=INFO, max_value=100, *args, **kwargs):
"""
This decorator extends the basic @task decorator by allowing users to
display some form of progress on the console. The module can receive
an increment in the progress through "tick_progress".
"""
return task(name=name, t=t, init_progress=True, max_value=max_value,
*args, **kwargs) | [
"def",
"progress_task",
"(",
"name",
"=",
"None",
",",
"t",
"=",
"INFO",
",",
"max_value",
"=",
"100",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"task",
"(",
"name",
"=",
"name",
",",
"t",
"=",
"t",
",",
"init_progress",
"=",
"True",
",",
"max_value",
"=",
"max_value",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | This decorator extends the basic @task decorator by allowing users to
display some form of progress on the console. The module can receive
an increment in the progress through "tick_progress". | [
"This",
"decorator",
"extends",
"the",
"basic"
] | e23075134f2a3aa3e2a044f68eeacedf686969d7 | https://github.com/kaniblu/klogger/blob/e23075134f2a3aa3e2a044f68eeacedf686969d7/klogger.py#L298-L305 |
251,156 | davisd50/sparc.cache | sparc/cache/splunk/area.py | CacheAreaForSplunkKV.current_kv_names | def current_kv_names(self):
"""Return set of string names of current available Splunk KV collections"""
return current_kv_names(self.sci, self.username, self.appname, request=self._request) | python | def current_kv_names(self):
"""Return set of string names of current available Splunk KV collections"""
return current_kv_names(self.sci, self.username, self.appname, request=self._request) | [
"def",
"current_kv_names",
"(",
"self",
")",
":",
"return",
"current_kv_names",
"(",
"self",
".",
"sci",
",",
"self",
".",
"username",
",",
"self",
".",
"appname",
",",
"request",
"=",
"self",
".",
"_request",
")"
] | Return set of string names of current available Splunk KV collections | [
"Return",
"set",
"of",
"string",
"names",
"of",
"current",
"available",
"Splunk",
"KV",
"collections"
] | f2378aad48c368a53820e97b093ace790d4d4121 | https://github.com/davisd50/sparc.cache/blob/f2378aad48c368a53820e97b093ace790d4d4121/sparc/cache/splunk/area.py#L51-L53 |
251,157 | davisd50/sparc.cache | sparc/cache/splunk/area.py | CacheAreaForSplunkKV.get | def get(self, CachableItem):
"""Returns current ICachedItem for ICachableItem or None if not cached"""
cached_item = self.mapper.get(CachableItem)
r = self.request('get',
self.url+"storage/collections/data/"+self.collname+'/'+cached_item.getId(),
data={'output_mode': 'json'})
if r.ok:
# we need to update the object with the values found in the cache area
data = r.json()
for name in self.mapper.mapper:
setattr(cached_item, name, data[name])
return cached_item
return None | python | def get(self, CachableItem):
"""Returns current ICachedItem for ICachableItem or None if not cached"""
cached_item = self.mapper.get(CachableItem)
r = self.request('get',
self.url+"storage/collections/data/"+self.collname+'/'+cached_item.getId(),
data={'output_mode': 'json'})
if r.ok:
# we need to update the object with the values found in the cache area
data = r.json()
for name in self.mapper.mapper:
setattr(cached_item, name, data[name])
return cached_item
return None | [
"def",
"get",
"(",
"self",
",",
"CachableItem",
")",
":",
"cached_item",
"=",
"self",
".",
"mapper",
".",
"get",
"(",
"CachableItem",
")",
"r",
"=",
"self",
".",
"request",
"(",
"'get'",
",",
"self",
".",
"url",
"+",
"\"storage/collections/data/\"",
"+",
"self",
".",
"collname",
"+",
"'/'",
"+",
"cached_item",
".",
"getId",
"(",
")",
",",
"data",
"=",
"{",
"'output_mode'",
":",
"'json'",
"}",
")",
"if",
"r",
".",
"ok",
":",
"# we need to update the object with the values found in the cache area",
"data",
"=",
"r",
".",
"json",
"(",
")",
"for",
"name",
"in",
"self",
".",
"mapper",
".",
"mapper",
":",
"setattr",
"(",
"cached_item",
",",
"name",
",",
"data",
"[",
"name",
"]",
")",
"return",
"cached_item",
"return",
"None"
] | Returns current ICachedItem for ICachableItem or None if not cached | [
"Returns",
"current",
"ICachedItem",
"for",
"ICachableItem",
"or",
"None",
"if",
"not",
"cached"
] | f2378aad48c368a53820e97b093ace790d4d4121 | https://github.com/davisd50/sparc.cache/blob/f2378aad48c368a53820e97b093ace790d4d4121/sparc/cache/splunk/area.py#L94-L106 |
251,158 | davisd50/sparc.cache | sparc/cache/splunk/area.py | CacheAreaForSplunkKV.isDirty | def isDirty(self, CachableItem):
"""True if cached information requires update for ICachableItem"""
_cachedItem = self.get(CachableItem)
if not _cachedItem:
return True
_newCacheItem = self.mapper.get(CachableItem)
return False if _cachedItem == _newCacheItem else True | python | def isDirty(self, CachableItem):
"""True if cached information requires update for ICachableItem"""
_cachedItem = self.get(CachableItem)
if not _cachedItem:
return True
_newCacheItem = self.mapper.get(CachableItem)
return False if _cachedItem == _newCacheItem else True | [
"def",
"isDirty",
"(",
"self",
",",
"CachableItem",
")",
":",
"_cachedItem",
"=",
"self",
".",
"get",
"(",
"CachableItem",
")",
"if",
"not",
"_cachedItem",
":",
"return",
"True",
"_newCacheItem",
"=",
"self",
".",
"mapper",
".",
"get",
"(",
"CachableItem",
")",
"return",
"False",
"if",
"_cachedItem",
"==",
"_newCacheItem",
"else",
"True"
] | True if cached information requires update for ICachableItem | [
"True",
"if",
"cached",
"information",
"requires",
"update",
"for",
"ICachableItem"
] | f2378aad48c368a53820e97b093ace790d4d4121 | https://github.com/davisd50/sparc.cache/blob/f2378aad48c368a53820e97b093ace790d4d4121/sparc/cache/splunk/area.py#L109-L115 |
251,159 | davisd50/sparc.cache | sparc/cache/splunk/area.py | CacheAreaForSplunkKV.cache | def cache(self, CachableItem):
"""Updates caches area with latest item information returning
ICachedItem if cache updates were required.
Issues ICacheObjectCreatedEvent, and ICacheObjectModifiedEvent for
ICacheArea/ICachableItem combo.
"""
_cachedItem = self.get(CachableItem)
if not _cachedItem:
_cachedItem = self.mapper.get(CachableItem)
self._add(_cachedItem)
logger.debug("new cachable item added to Splunk KV cache area {id: %s, type: %s}", str(_cachedItem.getId()), str(_cachedItem.__class__))
notify(CacheObjectCreatedEvent(_cachedItem, self))
return _cachedItem
else:
_newCacheItem = self.mapper.get(CachableItem)
if _cachedItem != _newCacheItem:
logger.debug("Cachable item modified in Splunk KV cache area {id: %s, type: %s}", str(_newCacheItem.getId()), str(_newCacheItem.__class__))
self._update(_newCacheItem)
notify(CacheObjectModifiedEvent(_newCacheItem, self))
return _newCacheItem
return None | python | def cache(self, CachableItem):
"""Updates caches area with latest item information returning
ICachedItem if cache updates were required.
Issues ICacheObjectCreatedEvent, and ICacheObjectModifiedEvent for
ICacheArea/ICachableItem combo.
"""
_cachedItem = self.get(CachableItem)
if not _cachedItem:
_cachedItem = self.mapper.get(CachableItem)
self._add(_cachedItem)
logger.debug("new cachable item added to Splunk KV cache area {id: %s, type: %s}", str(_cachedItem.getId()), str(_cachedItem.__class__))
notify(CacheObjectCreatedEvent(_cachedItem, self))
return _cachedItem
else:
_newCacheItem = self.mapper.get(CachableItem)
if _cachedItem != _newCacheItem:
logger.debug("Cachable item modified in Splunk KV cache area {id: %s, type: %s}", str(_newCacheItem.getId()), str(_newCacheItem.__class__))
self._update(_newCacheItem)
notify(CacheObjectModifiedEvent(_newCacheItem, self))
return _newCacheItem
return None | [
"def",
"cache",
"(",
"self",
",",
"CachableItem",
")",
":",
"_cachedItem",
"=",
"self",
".",
"get",
"(",
"CachableItem",
")",
"if",
"not",
"_cachedItem",
":",
"_cachedItem",
"=",
"self",
".",
"mapper",
".",
"get",
"(",
"CachableItem",
")",
"self",
".",
"_add",
"(",
"_cachedItem",
")",
"logger",
".",
"debug",
"(",
"\"new cachable item added to Splunk KV cache area {id: %s, type: %s}\"",
",",
"str",
"(",
"_cachedItem",
".",
"getId",
"(",
")",
")",
",",
"str",
"(",
"_cachedItem",
".",
"__class__",
")",
")",
"notify",
"(",
"CacheObjectCreatedEvent",
"(",
"_cachedItem",
",",
"self",
")",
")",
"return",
"_cachedItem",
"else",
":",
"_newCacheItem",
"=",
"self",
".",
"mapper",
".",
"get",
"(",
"CachableItem",
")",
"if",
"_cachedItem",
"!=",
"_newCacheItem",
":",
"logger",
".",
"debug",
"(",
"\"Cachable item modified in Splunk KV cache area {id: %s, type: %s}\"",
",",
"str",
"(",
"_newCacheItem",
".",
"getId",
"(",
")",
")",
",",
"str",
"(",
"_newCacheItem",
".",
"__class__",
")",
")",
"self",
".",
"_update",
"(",
"_newCacheItem",
")",
"notify",
"(",
"CacheObjectModifiedEvent",
"(",
"_newCacheItem",
",",
"self",
")",
")",
"return",
"_newCacheItem",
"return",
"None"
] | Updates caches area with latest item information returning
ICachedItem if cache updates were required.
Issues ICacheObjectCreatedEvent, and ICacheObjectModifiedEvent for
ICacheArea/ICachableItem combo. | [
"Updates",
"caches",
"area",
"with",
"latest",
"item",
"information",
"returning",
"ICachedItem",
"if",
"cache",
"updates",
"were",
"required",
"."
] | f2378aad48c368a53820e97b093ace790d4d4121 | https://github.com/davisd50/sparc.cache/blob/f2378aad48c368a53820e97b093ace790d4d4121/sparc/cache/splunk/area.py#L117-L138 |
251,160 | eddiejessup/agaro | agaro/run_utils.py | run_model | def run_model(t_output_every, output_dir=None, m=None, force_resume=True,
**iterate_args):
"""Convenience function to combine making a Runner object, and
running it for some time.
Parameters
----------
m: Model
Model to run.
iterate_args:
Arguments to pass to :meth:`Runner.iterate`.
Others:
see :class:`Runner`.
Returns
-------
r: Runner
runner object after it has finished running for the required time.
"""
r = runner.Runner(output_dir, m, force_resume)
print(r)
r.iterate(t_output_every=t_output_every, **iterate_args)
return r | python | def run_model(t_output_every, output_dir=None, m=None, force_resume=True,
**iterate_args):
"""Convenience function to combine making a Runner object, and
running it for some time.
Parameters
----------
m: Model
Model to run.
iterate_args:
Arguments to pass to :meth:`Runner.iterate`.
Others:
see :class:`Runner`.
Returns
-------
r: Runner
runner object after it has finished running for the required time.
"""
r = runner.Runner(output_dir, m, force_resume)
print(r)
r.iterate(t_output_every=t_output_every, **iterate_args)
return r | [
"def",
"run_model",
"(",
"t_output_every",
",",
"output_dir",
"=",
"None",
",",
"m",
"=",
"None",
",",
"force_resume",
"=",
"True",
",",
"*",
"*",
"iterate_args",
")",
":",
"r",
"=",
"runner",
".",
"Runner",
"(",
"output_dir",
",",
"m",
",",
"force_resume",
")",
"print",
"(",
"r",
")",
"r",
".",
"iterate",
"(",
"t_output_every",
"=",
"t_output_every",
",",
"*",
"*",
"iterate_args",
")",
"return",
"r"
] | Convenience function to combine making a Runner object, and
running it for some time.
Parameters
----------
m: Model
Model to run.
iterate_args:
Arguments to pass to :meth:`Runner.iterate`.
Others:
see :class:`Runner`.
Returns
-------
r: Runner
runner object after it has finished running for the required time. | [
"Convenience",
"function",
"to",
"combine",
"making",
"a",
"Runner",
"object",
"and",
"running",
"it",
"for",
"some",
"time",
"."
] | b2feb45d6129d749088c70b3e9290af7ca7c7d33 | https://github.com/eddiejessup/agaro/blob/b2feb45d6129d749088c70b3e9290af7ca7c7d33/agaro/run_utils.py#L7-L29 |
251,161 | eddiejessup/agaro | agaro/run_utils.py | resume_runs | def resume_runs(dirnames, t_output_every, t_upto, parallel=False):
"""Resume many models, and run.
Parameters
----------
dirnames: list[str]
List of output directory paths from which to resume.
output_every: int
see :class:`Runner`.
t_upto: float
Run each model until the time is equal to this
parallel: bool
Whether or not to run the models in parallel, using the Multiprocessing
library. If `True`, the number of concurrent tasks will be equal to
one less than the number of available cores detected.
"""
run_model_partial = partial(run_model, t_output_every, force_resume=True,
t_upto=t_upto)
run_func(run_model_partial, dirnames, parallel) | python | def resume_runs(dirnames, t_output_every, t_upto, parallel=False):
"""Resume many models, and run.
Parameters
----------
dirnames: list[str]
List of output directory paths from which to resume.
output_every: int
see :class:`Runner`.
t_upto: float
Run each model until the time is equal to this
parallel: bool
Whether or not to run the models in parallel, using the Multiprocessing
library. If `True`, the number of concurrent tasks will be equal to
one less than the number of available cores detected.
"""
run_model_partial = partial(run_model, t_output_every, force_resume=True,
t_upto=t_upto)
run_func(run_model_partial, dirnames, parallel) | [
"def",
"resume_runs",
"(",
"dirnames",
",",
"t_output_every",
",",
"t_upto",
",",
"parallel",
"=",
"False",
")",
":",
"run_model_partial",
"=",
"partial",
"(",
"run_model",
",",
"t_output_every",
",",
"force_resume",
"=",
"True",
",",
"t_upto",
"=",
"t_upto",
")",
"run_func",
"(",
"run_model_partial",
",",
"dirnames",
",",
"parallel",
")"
] | Resume many models, and run.
Parameters
----------
dirnames: list[str]
List of output directory paths from which to resume.
output_every: int
see :class:`Runner`.
t_upto: float
Run each model until the time is equal to this
parallel: bool
Whether or not to run the models in parallel, using the Multiprocessing
library. If `True`, the number of concurrent tasks will be equal to
one less than the number of available cores detected. | [
"Resume",
"many",
"models",
"and",
"run",
"."
] | b2feb45d6129d749088c70b3e9290af7ca7c7d33 | https://github.com/eddiejessup/agaro/blob/b2feb45d6129d749088c70b3e9290af7ca7c7d33/agaro/run_utils.py#L32-L50 |
251,162 | eddiejessup/agaro | agaro/run_utils.py | run_kwarg_scan | def run_kwarg_scan(ModelClass, model_kwarg_sets,
t_output_every, t_upto,
force_resume=True, parallel=False):
"""Run many models with the same parameters but variable `field`.
For each `val` in `vals`, a new model will be made, and run up to a time.
The output directory is automatically generated from the model arguments.
Parameters
----------
ModelClass: type
A class or factory function that returns a model object by
calling `ModelClass(model_kwargs)`
model_kwarg_sets: list[dict]
List of argument sets, each of which can instantiate a model.
t_output_every: float
see :class:`Runner`.
t_upto: float
Run each model until the time is equal to this
parallel: bool
Whether or not to run the models in parallel, using the Multiprocessing
library. If `True`, the number of concurrent tasks will be equal to
one less than the number of available cores detected.
"""
task_runner = _TaskRunner(ModelClass, t_output_every, t_upto, force_resume)
run_func(task_runner, model_kwarg_sets, parallel) | python | def run_kwarg_scan(ModelClass, model_kwarg_sets,
t_output_every, t_upto,
force_resume=True, parallel=False):
"""Run many models with the same parameters but variable `field`.
For each `val` in `vals`, a new model will be made, and run up to a time.
The output directory is automatically generated from the model arguments.
Parameters
----------
ModelClass: type
A class or factory function that returns a model object by
calling `ModelClass(model_kwargs)`
model_kwarg_sets: list[dict]
List of argument sets, each of which can instantiate a model.
t_output_every: float
see :class:`Runner`.
t_upto: float
Run each model until the time is equal to this
parallel: bool
Whether or not to run the models in parallel, using the Multiprocessing
library. If `True`, the number of concurrent tasks will be equal to
one less than the number of available cores detected.
"""
task_runner = _TaskRunner(ModelClass, t_output_every, t_upto, force_resume)
run_func(task_runner, model_kwarg_sets, parallel) | [
"def",
"run_kwarg_scan",
"(",
"ModelClass",
",",
"model_kwarg_sets",
",",
"t_output_every",
",",
"t_upto",
",",
"force_resume",
"=",
"True",
",",
"parallel",
"=",
"False",
")",
":",
"task_runner",
"=",
"_TaskRunner",
"(",
"ModelClass",
",",
"t_output_every",
",",
"t_upto",
",",
"force_resume",
")",
"run_func",
"(",
"task_runner",
",",
"model_kwarg_sets",
",",
"parallel",
")"
] | Run many models with the same parameters but variable `field`.
For each `val` in `vals`, a new model will be made, and run up to a time.
The output directory is automatically generated from the model arguments.
Parameters
----------
ModelClass: type
A class or factory function that returns a model object by
calling `ModelClass(model_kwargs)`
model_kwarg_sets: list[dict]
List of argument sets, each of which can instantiate a model.
t_output_every: float
see :class:`Runner`.
t_upto: float
Run each model until the time is equal to this
parallel: bool
Whether or not to run the models in parallel, using the Multiprocessing
library. If `True`, the number of concurrent tasks will be equal to
one less than the number of available cores detected. | [
"Run",
"many",
"models",
"with",
"the",
"same",
"parameters",
"but",
"variable",
"field",
"."
] | b2feb45d6129d749088c70b3e9290af7ca7c7d33 | https://github.com/eddiejessup/agaro/blob/b2feb45d6129d749088c70b3e9290af7ca7c7d33/agaro/run_utils.py#L74-L99 |
251,163 | eddiejessup/agaro | agaro/run_utils.py | run_field_scan | def run_field_scan(ModelClass, model_kwargs, t_output_every, t_upto, field,
vals, force_resume=True, parallel=False):
"""Run many models with a range of parameter sets.
Parameters
----------
ModelClass: callable
A class or factory function that returns a model object by
calling `ModelClass(model_kwargs)`
model_kwargs: dict
See `ModelClass` explanation.
t_output_every: float
see :class:`Runner`.
t_upto: float
Run each model until the time is equal to this
field: str
The name of the field to be varied, whose values are in `vals`.
vals: array_like
Iterable of values to use to instantiate each Model object.
parallel: bool
Whether or not to run the models in parallel, using the Multiprocessing
library. If `True`, the number of concurrent tasks will be equal to
one less than the number of available cores detected.
"""
model_kwarg_sets = [dict(model_kwargs, field=val) for val in vals]
run_kwarg_scan(ModelClass, model_kwarg_sets,
t_output_every, t_upto, force_resume, parallel) | python | def run_field_scan(ModelClass, model_kwargs, t_output_every, t_upto, field,
vals, force_resume=True, parallel=False):
"""Run many models with a range of parameter sets.
Parameters
----------
ModelClass: callable
A class or factory function that returns a model object by
calling `ModelClass(model_kwargs)`
model_kwargs: dict
See `ModelClass` explanation.
t_output_every: float
see :class:`Runner`.
t_upto: float
Run each model until the time is equal to this
field: str
The name of the field to be varied, whose values are in `vals`.
vals: array_like
Iterable of values to use to instantiate each Model object.
parallel: bool
Whether or not to run the models in parallel, using the Multiprocessing
library. If `True`, the number of concurrent tasks will be equal to
one less than the number of available cores detected.
"""
model_kwarg_sets = [dict(model_kwargs, field=val) for val in vals]
run_kwarg_scan(ModelClass, model_kwarg_sets,
t_output_every, t_upto, force_resume, parallel) | [
"def",
"run_field_scan",
"(",
"ModelClass",
",",
"model_kwargs",
",",
"t_output_every",
",",
"t_upto",
",",
"field",
",",
"vals",
",",
"force_resume",
"=",
"True",
",",
"parallel",
"=",
"False",
")",
":",
"model_kwarg_sets",
"=",
"[",
"dict",
"(",
"model_kwargs",
",",
"field",
"=",
"val",
")",
"for",
"val",
"in",
"vals",
"]",
"run_kwarg_scan",
"(",
"ModelClass",
",",
"model_kwarg_sets",
",",
"t_output_every",
",",
"t_upto",
",",
"force_resume",
",",
"parallel",
")"
] | Run many models with a range of parameter sets.
Parameters
----------
ModelClass: callable
A class or factory function that returns a model object by
calling `ModelClass(model_kwargs)`
model_kwargs: dict
See `ModelClass` explanation.
t_output_every: float
see :class:`Runner`.
t_upto: float
Run each model until the time is equal to this
field: str
The name of the field to be varied, whose values are in `vals`.
vals: array_like
Iterable of values to use to instantiate each Model object.
parallel: bool
Whether or not to run the models in parallel, using the Multiprocessing
library. If `True`, the number of concurrent tasks will be equal to
one less than the number of available cores detected. | [
"Run",
"many",
"models",
"with",
"a",
"range",
"of",
"parameter",
"sets",
"."
] | b2feb45d6129d749088c70b3e9290af7ca7c7d33 | https://github.com/eddiejessup/agaro/blob/b2feb45d6129d749088c70b3e9290af7ca7c7d33/agaro/run_utils.py#L102-L128 |
251,164 | EricCrosson/stump | stump/stump.py | configure | def configure(logger=None):
"""Pass stump a logger to use. If no logger is supplied, a basic logger
of level INFO will print to stdout.
"""
global LOGGER
if logger is None:
LOGGER = logging.basicConfig(stream=sys.stdout, level=logging.INFO)
else:
LOGGER = logger | python | def configure(logger=None):
"""Pass stump a logger to use. If no logger is supplied, a basic logger
of level INFO will print to stdout.
"""
global LOGGER
if logger is None:
LOGGER = logging.basicConfig(stream=sys.stdout, level=logging.INFO)
else:
LOGGER = logger | [
"def",
"configure",
"(",
"logger",
"=",
"None",
")",
":",
"global",
"LOGGER",
"if",
"logger",
"is",
"None",
":",
"LOGGER",
"=",
"logging",
".",
"basicConfig",
"(",
"stream",
"=",
"sys",
".",
"stdout",
",",
"level",
"=",
"logging",
".",
"INFO",
")",
"else",
":",
"LOGGER",
"=",
"logger"
] | Pass stump a logger to use. If no logger is supplied, a basic logger
of level INFO will print to stdout. | [
"Pass",
"stump",
"a",
"logger",
"to",
"use",
".",
"If",
"no",
"logger",
"is",
"supplied",
"a",
"basic",
"logger",
"of",
"level",
"INFO",
"will",
"print",
"to",
"stdout",
"."
] | eb4d9f0dbe2642f86d47ca1b5f51fb7801bb09ab | https://github.com/EricCrosson/stump/blob/eb4d9f0dbe2642f86d47ca1b5f51fb7801bb09ab/stump/stump.py#L19-L28 |
251,165 | EricCrosson/stump | stump/stump.py | _stump | def _stump(f, *args, **kwargs):
"""Worker for the common actions of all stump methods, aka the secret
sauce.
*Keyword parameters*
- log :: integer
- Specifies a custom level of logging to pass to the active logger.
- Default: INFO
- print_time :: bool
- Include timestamp in message
- print_return :: bool
- include the return value in the functions exit message
- postfix_only :: bool
- omit the functions entering message
- prefix_only :: bool
- omit the functions exiting message
*Exceptions:*
- IndexError and ValueError
- will be returned if *args contains a string that does not correspond to
a parameter name of the decorated function, or if there are more '{}'s
than there are *args.
"""
global LOGGER
def aux(*xs, **kws):
f_kws = kws.copy()
f_kws.update(dict(zip(inspect.getfullargspec(f).args, xs)))
level = kwargs.get('log', logging.INFO)
post = kwargs.get('postfix_only', False)
pre = kwargs.get('prefix_only', False)
print_return = kwargs.get('print_return', False)
print_time = kwargs.get('print_time', False)
# prepare locals for later uses in string interpolation
fn = f.__name__
timestr = '%s:' % _timestr() if print_time else ''
# get message
try:
message = list(args).pop(0)
timestr = ':' + timestr
except IndexError:
message = fn
fn = ''
# format message
try:
report = '{fn}{timestr}{arg}'.format(**locals(),
arg=message.format(**f_kws))
except KeyError:
report = '{fn}{timestr}{error}'.\
format(**locals(), error='KeyError in decorator usage')
if not post:
LOGGER.log(level, '%s...', report)
try:
ret = f(*xs, **kws)
except Exception as e:
try:
with_message = ' with message %s' % str(e)
if str(e) == '':
raise Exception() # use default value
except:
with_message = ''
LOGGER.log(level, '%s...threw exception %s%s',
report, type(e).__name__, with_message)
raise
if not pre:
if print_return:
LOGGER.log(level, '%s...done (returning %s)', report, ret)
else:
LOGGER.log(level, '%s...done', report)
return ret
return aux | python | def _stump(f, *args, **kwargs):
"""Worker for the common actions of all stump methods, aka the secret
sauce.
*Keyword parameters*
- log :: integer
- Specifies a custom level of logging to pass to the active logger.
- Default: INFO
- print_time :: bool
- Include timestamp in message
- print_return :: bool
- include the return value in the functions exit message
- postfix_only :: bool
- omit the functions entering message
- prefix_only :: bool
- omit the functions exiting message
*Exceptions:*
- IndexError and ValueError
- will be returned if *args contains a string that does not correspond to
a parameter name of the decorated function, or if there are more '{}'s
than there are *args.
"""
global LOGGER
def aux(*xs, **kws):
f_kws = kws.copy()
f_kws.update(dict(zip(inspect.getfullargspec(f).args, xs)))
level = kwargs.get('log', logging.INFO)
post = kwargs.get('postfix_only', False)
pre = kwargs.get('prefix_only', False)
print_return = kwargs.get('print_return', False)
print_time = kwargs.get('print_time', False)
# prepare locals for later uses in string interpolation
fn = f.__name__
timestr = '%s:' % _timestr() if print_time else ''
# get message
try:
message = list(args).pop(0)
timestr = ':' + timestr
except IndexError:
message = fn
fn = ''
# format message
try:
report = '{fn}{timestr}{arg}'.format(**locals(),
arg=message.format(**f_kws))
except KeyError:
report = '{fn}{timestr}{error}'.\
format(**locals(), error='KeyError in decorator usage')
if not post:
LOGGER.log(level, '%s...', report)
try:
ret = f(*xs, **kws)
except Exception as e:
try:
with_message = ' with message %s' % str(e)
if str(e) == '':
raise Exception() # use default value
except:
with_message = ''
LOGGER.log(level, '%s...threw exception %s%s',
report, type(e).__name__, with_message)
raise
if not pre:
if print_return:
LOGGER.log(level, '%s...done (returning %s)', report, ret)
else:
LOGGER.log(level, '%s...done', report)
return ret
return aux | [
"def",
"_stump",
"(",
"f",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"global",
"LOGGER",
"def",
"aux",
"(",
"*",
"xs",
",",
"*",
"*",
"kws",
")",
":",
"f_kws",
"=",
"kws",
".",
"copy",
"(",
")",
"f_kws",
".",
"update",
"(",
"dict",
"(",
"zip",
"(",
"inspect",
".",
"getfullargspec",
"(",
"f",
")",
".",
"args",
",",
"xs",
")",
")",
")",
"level",
"=",
"kwargs",
".",
"get",
"(",
"'log'",
",",
"logging",
".",
"INFO",
")",
"post",
"=",
"kwargs",
".",
"get",
"(",
"'postfix_only'",
",",
"False",
")",
"pre",
"=",
"kwargs",
".",
"get",
"(",
"'prefix_only'",
",",
"False",
")",
"print_return",
"=",
"kwargs",
".",
"get",
"(",
"'print_return'",
",",
"False",
")",
"print_time",
"=",
"kwargs",
".",
"get",
"(",
"'print_time'",
",",
"False",
")",
"# prepare locals for later uses in string interpolation",
"fn",
"=",
"f",
".",
"__name__",
"timestr",
"=",
"'%s:'",
"%",
"_timestr",
"(",
")",
"if",
"print_time",
"else",
"''",
"# get message",
"try",
":",
"message",
"=",
"list",
"(",
"args",
")",
".",
"pop",
"(",
"0",
")",
"timestr",
"=",
"':'",
"+",
"timestr",
"except",
"IndexError",
":",
"message",
"=",
"fn",
"fn",
"=",
"''",
"# format message",
"try",
":",
"report",
"=",
"'{fn}{timestr}{arg}'",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
",",
"arg",
"=",
"message",
".",
"format",
"(",
"*",
"*",
"f_kws",
")",
")",
"except",
"KeyError",
":",
"report",
"=",
"'{fn}{timestr}{error}'",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
",",
"error",
"=",
"'KeyError in decorator usage'",
")",
"if",
"not",
"post",
":",
"LOGGER",
".",
"log",
"(",
"level",
",",
"'%s...'",
",",
"report",
")",
"try",
":",
"ret",
"=",
"f",
"(",
"*",
"xs",
",",
"*",
"*",
"kws",
")",
"except",
"Exception",
"as",
"e",
":",
"try",
":",
"with_message",
"=",
"' with message %s'",
"%",
"str",
"(",
"e",
")",
"if",
"str",
"(",
"e",
")",
"==",
"''",
":",
"raise",
"Exception",
"(",
")",
"# use default value",
"except",
":",
"with_message",
"=",
"''",
"LOGGER",
".",
"log",
"(",
"level",
",",
"'%s...threw exception %s%s'",
",",
"report",
",",
"type",
"(",
"e",
")",
".",
"__name__",
",",
"with_message",
")",
"raise",
"if",
"not",
"pre",
":",
"if",
"print_return",
":",
"LOGGER",
".",
"log",
"(",
"level",
",",
"'%s...done (returning %s)'",
",",
"report",
",",
"ret",
")",
"else",
":",
"LOGGER",
".",
"log",
"(",
"level",
",",
"'%s...done'",
",",
"report",
")",
"return",
"ret",
"return",
"aux"
] | Worker for the common actions of all stump methods, aka the secret
sauce.
*Keyword parameters*
- log :: integer
- Specifies a custom level of logging to pass to the active logger.
- Default: INFO
- print_time :: bool
- Include timestamp in message
- print_return :: bool
- include the return value in the functions exit message
- postfix_only :: bool
- omit the functions entering message
- prefix_only :: bool
- omit the functions exiting message
*Exceptions:*
- IndexError and ValueError
- will be returned if *args contains a string that does not correspond to
a parameter name of the decorated function, or if there are more '{}'s
than there are *args. | [
"Worker",
"for",
"the",
"common",
"actions",
"of",
"all",
"stump",
"methods",
"aka",
"the",
"secret",
"sauce",
"."
] | eb4d9f0dbe2642f86d47ca1b5f51fb7801bb09ab | https://github.com/EricCrosson/stump/blob/eb4d9f0dbe2642f86d47ca1b5f51fb7801bb09ab/stump/stump.py#L526-L602 |
251,166 | uda/djaccount | account/models.py | Account.get_full_name | def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '{first_name} {last_name}'.format(first_name=self.first_name, last_name=self.last_name)
return full_name.strip() | python | def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '{first_name} {last_name}'.format(first_name=self.first_name, last_name=self.last_name)
return full_name.strip() | [
"def",
"get_full_name",
"(",
"self",
")",
":",
"full_name",
"=",
"'{first_name} {last_name}'",
".",
"format",
"(",
"first_name",
"=",
"self",
".",
"first_name",
",",
"last_name",
"=",
"self",
".",
"last_name",
")",
"return",
"full_name",
".",
"strip",
"(",
")"
] | Returns the first_name plus the last_name, with a space in between. | [
"Returns",
"the",
"first_name",
"plus",
"the",
"last_name",
"with",
"a",
"space",
"in",
"between",
"."
] | 3012659ada04008d6c03a191b206c6d218aff836 | https://github.com/uda/djaccount/blob/3012659ada04008d6c03a191b206c6d218aff836/account/models.py#L47-L52 |
251,167 | dustinmm80/healthy | package_utils.py | main | def main():
"""
Main function for this module
"""
sandbox = create_sandbox()
directory = download_package_to_sandbox(
sandbox,
'https://pypi.python.org/packages/source/c/checkmyreqs/checkmyreqs-0.1.6.tar.gz'
)
print(directory)
destroy_sandbox(sandbox) | python | def main():
"""
Main function for this module
"""
sandbox = create_sandbox()
directory = download_package_to_sandbox(
sandbox,
'https://pypi.python.org/packages/source/c/checkmyreqs/checkmyreqs-0.1.6.tar.gz'
)
print(directory)
destroy_sandbox(sandbox) | [
"def",
"main",
"(",
")",
":",
"sandbox",
"=",
"create_sandbox",
"(",
")",
"directory",
"=",
"download_package_to_sandbox",
"(",
"sandbox",
",",
"'https://pypi.python.org/packages/source/c/checkmyreqs/checkmyreqs-0.1.6.tar.gz'",
")",
"print",
"(",
"directory",
")",
"destroy_sandbox",
"(",
"sandbox",
")"
] | Main function for this module | [
"Main",
"function",
"for",
"this",
"module"
] | b59016c3f578ca45b6ce857a2d5c4584b8542288 | https://github.com/dustinmm80/healthy/blob/b59016c3f578ca45b6ce857a2d5c4584b8542288/package_utils.py#L60-L70 |
251,168 | jkenlooper/chill | src/chill/public.py | check_map | def check_map(uri, url_root):
"""
return a tuple of the rule and kw.
"""
# TODO: Building the Map each time this is called seems like it could be more effiecent.
result = []
try:
result = db.execute(text(fetch_query_string('select_route_where_dynamic.sql'))).fetchall()
except OperationalError as err:
current_app.logger.error("OperationalError: %s", err)
return (None, None)
if result:
#routes = result.as_dict()
#(routes, col_names) = rowify(result, c.description)
#current_app.logger.debug( [x['rule'] for x in routes] )
rules = map( lambda r: Rule(r['rule'], endpoint='dynamic'), result )
d_map = Map( rules )
map_adapter = d_map.bind(url_root)
#current_app.logger.debug(uri)
try:
(rule, rule_kw) = map_adapter.match(path_info=uri, return_rule=True)
#current_app.logger.debug(rule)
return (str(rule), rule_kw)
except HTTPException:
pass
return (None, {}) | python | def check_map(uri, url_root):
"""
return a tuple of the rule and kw.
"""
# TODO: Building the Map each time this is called seems like it could be more effiecent.
result = []
try:
result = db.execute(text(fetch_query_string('select_route_where_dynamic.sql'))).fetchall()
except OperationalError as err:
current_app.logger.error("OperationalError: %s", err)
return (None, None)
if result:
#routes = result.as_dict()
#(routes, col_names) = rowify(result, c.description)
#current_app.logger.debug( [x['rule'] for x in routes] )
rules = map( lambda r: Rule(r['rule'], endpoint='dynamic'), result )
d_map = Map( rules )
map_adapter = d_map.bind(url_root)
#current_app.logger.debug(uri)
try:
(rule, rule_kw) = map_adapter.match(path_info=uri, return_rule=True)
#current_app.logger.debug(rule)
return (str(rule), rule_kw)
except HTTPException:
pass
return (None, {}) | [
"def",
"check_map",
"(",
"uri",
",",
"url_root",
")",
":",
"# TODO: Building the Map each time this is called seems like it could be more effiecent.",
"result",
"=",
"[",
"]",
"try",
":",
"result",
"=",
"db",
".",
"execute",
"(",
"text",
"(",
"fetch_query_string",
"(",
"'select_route_where_dynamic.sql'",
")",
")",
")",
".",
"fetchall",
"(",
")",
"except",
"OperationalError",
"as",
"err",
":",
"current_app",
".",
"logger",
".",
"error",
"(",
"\"OperationalError: %s\"",
",",
"err",
")",
"return",
"(",
"None",
",",
"None",
")",
"if",
"result",
":",
"#routes = result.as_dict()",
"#(routes, col_names) = rowify(result, c.description)",
"#current_app.logger.debug( [x['rule'] for x in routes] )",
"rules",
"=",
"map",
"(",
"lambda",
"r",
":",
"Rule",
"(",
"r",
"[",
"'rule'",
"]",
",",
"endpoint",
"=",
"'dynamic'",
")",
",",
"result",
")",
"d_map",
"=",
"Map",
"(",
"rules",
")",
"map_adapter",
"=",
"d_map",
".",
"bind",
"(",
"url_root",
")",
"#current_app.logger.debug(uri)",
"try",
":",
"(",
"rule",
",",
"rule_kw",
")",
"=",
"map_adapter",
".",
"match",
"(",
"path_info",
"=",
"uri",
",",
"return_rule",
"=",
"True",
")",
"#current_app.logger.debug(rule)",
"return",
"(",
"str",
"(",
"rule",
")",
",",
"rule_kw",
")",
"except",
"HTTPException",
":",
"pass",
"return",
"(",
"None",
",",
"{",
"}",
")"
] | return a tuple of the rule and kw. | [
"return",
"a",
"tuple",
"of",
"the",
"rule",
"and",
"kw",
"."
] | 35360c17c2a3b769ecb5406c6dabcf4cc70bd76f | https://github.com/jkenlooper/chill/blob/35360c17c2a3b769ecb5406c6dabcf4cc70bd76f/src/chill/public.py#L29-L54 |
251,169 | jkenlooper/chill | src/chill/public.py | route_handler | def route_handler(context, content, pargs, kwargs):
"""
Route shortcode works a lot like rendering a page based on the url or
route. This allows inserting in rendered HTML within another page.
Activate it with the 'shortcodes' template filter. Within the content use
the chill route shortcode: "[chill route /path/to/something/]" where the
'[chill' and ']' are the shortcode starting and ending tags. And 'route' is
this route handler that takes one argument which is the url.
"""
(node, rule_kw) = node_from_uri(pargs[0])
if node == None:
return u"<!-- 404 '{0}' -->".format(pargs[0])
rule_kw.update( node )
values = rule_kw
values.update( request.form.to_dict(flat=True) )
values.update( request.args.to_dict(flat=True) )
values['method'] = request.method
noderequest = values.copy()
noderequest.pop('node_id')
noderequest.pop('name')
noderequest.pop('value')
rendered = render_node(node['id'], noderequest=noderequest, **values)
if rendered:
if not isinstance(rendered, (str, unicode, int, float)):
# return a json string
return encoder.encode(rendered)
return rendered
# Nothing to show, so nothing found
return "<!-- 404 '{0}' -->".format(pargs[0]) | python | def route_handler(context, content, pargs, kwargs):
"""
Route shortcode works a lot like rendering a page based on the url or
route. This allows inserting in rendered HTML within another page.
Activate it with the 'shortcodes' template filter. Within the content use
the chill route shortcode: "[chill route /path/to/something/]" where the
'[chill' and ']' are the shortcode starting and ending tags. And 'route' is
this route handler that takes one argument which is the url.
"""
(node, rule_kw) = node_from_uri(pargs[0])
if node == None:
return u"<!-- 404 '{0}' -->".format(pargs[0])
rule_kw.update( node )
values = rule_kw
values.update( request.form.to_dict(flat=True) )
values.update( request.args.to_dict(flat=True) )
values['method'] = request.method
noderequest = values.copy()
noderequest.pop('node_id')
noderequest.pop('name')
noderequest.pop('value')
rendered = render_node(node['id'], noderequest=noderequest, **values)
if rendered:
if not isinstance(rendered, (str, unicode, int, float)):
# return a json string
return encoder.encode(rendered)
return rendered
# Nothing to show, so nothing found
return "<!-- 404 '{0}' -->".format(pargs[0]) | [
"def",
"route_handler",
"(",
"context",
",",
"content",
",",
"pargs",
",",
"kwargs",
")",
":",
"(",
"node",
",",
"rule_kw",
")",
"=",
"node_from_uri",
"(",
"pargs",
"[",
"0",
"]",
")",
"if",
"node",
"==",
"None",
":",
"return",
"u\"<!-- 404 '{0}' -->\"",
".",
"format",
"(",
"pargs",
"[",
"0",
"]",
")",
"rule_kw",
".",
"update",
"(",
"node",
")",
"values",
"=",
"rule_kw",
"values",
".",
"update",
"(",
"request",
".",
"form",
".",
"to_dict",
"(",
"flat",
"=",
"True",
")",
")",
"values",
".",
"update",
"(",
"request",
".",
"args",
".",
"to_dict",
"(",
"flat",
"=",
"True",
")",
")",
"values",
"[",
"'method'",
"]",
"=",
"request",
".",
"method",
"noderequest",
"=",
"values",
".",
"copy",
"(",
")",
"noderequest",
".",
"pop",
"(",
"'node_id'",
")",
"noderequest",
".",
"pop",
"(",
"'name'",
")",
"noderequest",
".",
"pop",
"(",
"'value'",
")",
"rendered",
"=",
"render_node",
"(",
"node",
"[",
"'id'",
"]",
",",
"noderequest",
"=",
"noderequest",
",",
"*",
"*",
"values",
")",
"if",
"rendered",
":",
"if",
"not",
"isinstance",
"(",
"rendered",
",",
"(",
"str",
",",
"unicode",
",",
"int",
",",
"float",
")",
")",
":",
"# return a json string",
"return",
"encoder",
".",
"encode",
"(",
"rendered",
")",
"return",
"rendered",
"# Nothing to show, so nothing found",
"return",
"\"<!-- 404 '{0}' -->\"",
".",
"format",
"(",
"pargs",
"[",
"0",
"]",
")"
] | Route shortcode works a lot like rendering a page based on the url or
route. This allows inserting in rendered HTML within another page.
Activate it with the 'shortcodes' template filter. Within the content use
the chill route shortcode: "[chill route /path/to/something/]" where the
'[chill' and ']' are the shortcode starting and ending tags. And 'route' is
this route handler that takes one argument which is the url. | [
"Route",
"shortcode",
"works",
"a",
"lot",
"like",
"rendering",
"a",
"page",
"based",
"on",
"the",
"url",
"or",
"route",
".",
"This",
"allows",
"inserting",
"in",
"rendered",
"HTML",
"within",
"another",
"page",
"."
] | 35360c17c2a3b769ecb5406c6dabcf4cc70bd76f | https://github.com/jkenlooper/chill/blob/35360c17c2a3b769ecb5406c6dabcf4cc70bd76f/src/chill/public.py#L241-L276 |
251,170 | jkenlooper/chill | src/chill/public.py | page_uri_handler | def page_uri_handler(context, content, pargs, kwargs):
"""
Shortcode for getting the link to internal pages using the flask `url_for`
method.
Activate with 'shortcodes' template filter. Within the content use the
chill page_uri shortcode: "[chill page_uri idofapage]". The argument is the
'uri' for a page that chill uses.
Does not verify the link to see if it's valid.
"""
uri = pargs[0]
return url_for('.page_uri', uri=uri) | python | def page_uri_handler(context, content, pargs, kwargs):
"""
Shortcode for getting the link to internal pages using the flask `url_for`
method.
Activate with 'shortcodes' template filter. Within the content use the
chill page_uri shortcode: "[chill page_uri idofapage]". The argument is the
'uri' for a page that chill uses.
Does not verify the link to see if it's valid.
"""
uri = pargs[0]
return url_for('.page_uri', uri=uri) | [
"def",
"page_uri_handler",
"(",
"context",
",",
"content",
",",
"pargs",
",",
"kwargs",
")",
":",
"uri",
"=",
"pargs",
"[",
"0",
"]",
"return",
"url_for",
"(",
"'.page_uri'",
",",
"uri",
"=",
"uri",
")"
] | Shortcode for getting the link to internal pages using the flask `url_for`
method.
Activate with 'shortcodes' template filter. Within the content use the
chill page_uri shortcode: "[chill page_uri idofapage]". The argument is the
'uri' for a page that chill uses.
Does not verify the link to see if it's valid. | [
"Shortcode",
"for",
"getting",
"the",
"link",
"to",
"internal",
"pages",
"using",
"the",
"flask",
"url_for",
"method",
"."
] | 35360c17c2a3b769ecb5406c6dabcf4cc70bd76f | https://github.com/jkenlooper/chill/blob/35360c17c2a3b769ecb5406c6dabcf4cc70bd76f/src/chill/public.py#L279-L291 |
251,171 | jkenlooper/chill | src/chill/public.py | PageView.get | def get(self, uri=''):
"For sql queries that start with 'SELECT ...'"
(node, rule_kw) = node_from_uri(uri)
if node == None:
abort(404)
rule_kw.update( node )
values = rule_kw
xhr_data = request.get_json()
if xhr_data:
values.update( xhr_data )
values.update( request.form.to_dict(flat=True) )
values.update( request.args.to_dict(flat=True) )
values.update( request.cookies )
values['method'] = request.method
noderequest = values.copy()
noderequest.pop('node_id')
noderequest.pop('name')
noderequest.pop('value')
current_app.logger.debug("get kw: %s", values)
rendered = render_node(node['id'], noderequest=noderequest, **values)
current_app.logger.debug("rendered: %s", rendered)
if rendered:
if not isinstance(rendered, (str, unicode, int, float)):
# return a json string
return encoder.encode(rendered)
return rendered
# Nothing to show, so nothing found
abort(404) | python | def get(self, uri=''):
"For sql queries that start with 'SELECT ...'"
(node, rule_kw) = node_from_uri(uri)
if node == None:
abort(404)
rule_kw.update( node )
values = rule_kw
xhr_data = request.get_json()
if xhr_data:
values.update( xhr_data )
values.update( request.form.to_dict(flat=True) )
values.update( request.args.to_dict(flat=True) )
values.update( request.cookies )
values['method'] = request.method
noderequest = values.copy()
noderequest.pop('node_id')
noderequest.pop('name')
noderequest.pop('value')
current_app.logger.debug("get kw: %s", values)
rendered = render_node(node['id'], noderequest=noderequest, **values)
current_app.logger.debug("rendered: %s", rendered)
if rendered:
if not isinstance(rendered, (str, unicode, int, float)):
# return a json string
return encoder.encode(rendered)
return rendered
# Nothing to show, so nothing found
abort(404) | [
"def",
"get",
"(",
"self",
",",
"uri",
"=",
"''",
")",
":",
"(",
"node",
",",
"rule_kw",
")",
"=",
"node_from_uri",
"(",
"uri",
")",
"if",
"node",
"==",
"None",
":",
"abort",
"(",
"404",
")",
"rule_kw",
".",
"update",
"(",
"node",
")",
"values",
"=",
"rule_kw",
"xhr_data",
"=",
"request",
".",
"get_json",
"(",
")",
"if",
"xhr_data",
":",
"values",
".",
"update",
"(",
"xhr_data",
")",
"values",
".",
"update",
"(",
"request",
".",
"form",
".",
"to_dict",
"(",
"flat",
"=",
"True",
")",
")",
"values",
".",
"update",
"(",
"request",
".",
"args",
".",
"to_dict",
"(",
"flat",
"=",
"True",
")",
")",
"values",
".",
"update",
"(",
"request",
".",
"cookies",
")",
"values",
"[",
"'method'",
"]",
"=",
"request",
".",
"method",
"noderequest",
"=",
"values",
".",
"copy",
"(",
")",
"noderequest",
".",
"pop",
"(",
"'node_id'",
")",
"noderequest",
".",
"pop",
"(",
"'name'",
")",
"noderequest",
".",
"pop",
"(",
"'value'",
")",
"current_app",
".",
"logger",
".",
"debug",
"(",
"\"get kw: %s\"",
",",
"values",
")",
"rendered",
"=",
"render_node",
"(",
"node",
"[",
"'id'",
"]",
",",
"noderequest",
"=",
"noderequest",
",",
"*",
"*",
"values",
")",
"current_app",
".",
"logger",
".",
"debug",
"(",
"\"rendered: %s\"",
",",
"rendered",
")",
"if",
"rendered",
":",
"if",
"not",
"isinstance",
"(",
"rendered",
",",
"(",
"str",
",",
"unicode",
",",
"int",
",",
"float",
")",
")",
":",
"# return a json string",
"return",
"encoder",
".",
"encode",
"(",
"rendered",
")",
"return",
"rendered",
"# Nothing to show, so nothing found",
"abort",
"(",
"404",
")"
] | For sql queries that start with 'SELECT ... | [
"For",
"sql",
"queries",
"that",
"start",
"with",
"SELECT",
"..."
] | 35360c17c2a3b769ecb5406c6dabcf4cc70bd76f | https://github.com/jkenlooper/chill/blob/35360c17c2a3b769ecb5406c6dabcf4cc70bd76f/src/chill/public.py#L123-L153 |
251,172 | jkenlooper/chill | src/chill/public.py | PageView.post | def post(self, uri=''):
"For sql queries that start with 'INSERT ...'"
# get node...
(node, rule_kw) = node_from_uri(uri, method=request.method)
rule_kw.update( node )
values = rule_kw
xhr_data = request.get_json()
if xhr_data:
values.update( xhr_data )
values.update( request.form.to_dict(flat=True) )
values.update( request.args.to_dict(flat=True) )
values['method'] = request.method
# Execute the sql query with the data
_query(node['id'], **values)
response = make_response('ok', 201)
return response | python | def post(self, uri=''):
"For sql queries that start with 'INSERT ...'"
# get node...
(node, rule_kw) = node_from_uri(uri, method=request.method)
rule_kw.update( node )
values = rule_kw
xhr_data = request.get_json()
if xhr_data:
values.update( xhr_data )
values.update( request.form.to_dict(flat=True) )
values.update( request.args.to_dict(flat=True) )
values['method'] = request.method
# Execute the sql query with the data
_query(node['id'], **values)
response = make_response('ok', 201)
return response | [
"def",
"post",
"(",
"self",
",",
"uri",
"=",
"''",
")",
":",
"# get node...",
"(",
"node",
",",
"rule_kw",
")",
"=",
"node_from_uri",
"(",
"uri",
",",
"method",
"=",
"request",
".",
"method",
")",
"rule_kw",
".",
"update",
"(",
"node",
")",
"values",
"=",
"rule_kw",
"xhr_data",
"=",
"request",
".",
"get_json",
"(",
")",
"if",
"xhr_data",
":",
"values",
".",
"update",
"(",
"xhr_data",
")",
"values",
".",
"update",
"(",
"request",
".",
"form",
".",
"to_dict",
"(",
"flat",
"=",
"True",
")",
")",
"values",
".",
"update",
"(",
"request",
".",
"args",
".",
"to_dict",
"(",
"flat",
"=",
"True",
")",
")",
"values",
"[",
"'method'",
"]",
"=",
"request",
".",
"method",
"# Execute the sql query with the data",
"_query",
"(",
"node",
"[",
"'id'",
"]",
",",
"*",
"*",
"values",
")",
"response",
"=",
"make_response",
"(",
"'ok'",
",",
"201",
")",
"return",
"response"
] | For sql queries that start with 'INSERT ... | [
"For",
"sql",
"queries",
"that",
"start",
"with",
"INSERT",
"..."
] | 35360c17c2a3b769ecb5406c6dabcf4cc70bd76f | https://github.com/jkenlooper/chill/blob/35360c17c2a3b769ecb5406c6dabcf4cc70bd76f/src/chill/public.py#L156-L175 |
251,173 | tiany/django-aliyun-storage | aliyunstorage/backends.py | get_aliyun_config | def get_aliyun_config(name, default=None):
'''
Get configuration variable from environment variable or
or django settings.py
'''
config = os.environ.get(name, getattr(settings, name, default))
if config is not None:
if isinstance(config, str):
return config.strip()
else:
return config
else:
raise ImproperlyConfigured(
'Can not get config for {} either in environment'
'variable or in settings.py'.format(name)) | python | def get_aliyun_config(name, default=None):
'''
Get configuration variable from environment variable or
or django settings.py
'''
config = os.environ.get(name, getattr(settings, name, default))
if config is not None:
if isinstance(config, str):
return config.strip()
else:
return config
else:
raise ImproperlyConfigured(
'Can not get config for {} either in environment'
'variable or in settings.py'.format(name)) | [
"def",
"get_aliyun_config",
"(",
"name",
",",
"default",
"=",
"None",
")",
":",
"config",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"name",
",",
"getattr",
"(",
"settings",
",",
"name",
",",
"default",
")",
")",
"if",
"config",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"config",
",",
"str",
")",
":",
"return",
"config",
".",
"strip",
"(",
")",
"else",
":",
"return",
"config",
"else",
":",
"raise",
"ImproperlyConfigured",
"(",
"'Can not get config for {} either in environment'",
"'variable or in settings.py'",
".",
"format",
"(",
"name",
")",
")"
] | Get configuration variable from environment variable or
or django settings.py | [
"Get",
"configuration",
"variable",
"from",
"environment",
"variable",
"or",
"or",
"django",
"settings",
".",
"py"
] | 582760ea05d333d37ff1a3bebff94437d7965a88 | https://github.com/tiany/django-aliyun-storage/blob/582760ea05d333d37ff1a3bebff94437d7965a88/aliyunstorage/backends.py#L18-L32 |
251,174 | minhhoit/yacms | yacms/twitter/templatetags/twitter_tags.py | tweets_for | def tweets_for(query_type, args, per_user=None):
"""
Retrieve tweets for a user, list or search term. The optional
``per_user`` arg limits the number of tweets per user, for
example to allow a fair spread of tweets per user for a list.
"""
lookup = {"query_type": query_type, "value": args[0]}
try:
tweets = Tweet.objects.get_for(**lookup)
except TwitterQueryException:
return []
if per_user is not None:
_tweets = defaultdict(list)
for tweet in tweets:
if len(_tweets[tweet.user_name]) < per_user:
_tweets[tweet.user_name].append(tweet)
tweets = sum(_tweets.values(), [])
tweets.sort(key=lambda t: t.created_at, reverse=True)
if len(args) > 1 and str(args[-1]).isdigit():
tweets = tweets[:int(args[-1])]
return tweets | python | def tweets_for(query_type, args, per_user=None):
"""
Retrieve tweets for a user, list or search term. The optional
``per_user`` arg limits the number of tweets per user, for
example to allow a fair spread of tweets per user for a list.
"""
lookup = {"query_type": query_type, "value": args[0]}
try:
tweets = Tweet.objects.get_for(**lookup)
except TwitterQueryException:
return []
if per_user is not None:
_tweets = defaultdict(list)
for tweet in tweets:
if len(_tweets[tweet.user_name]) < per_user:
_tweets[tweet.user_name].append(tweet)
tweets = sum(_tweets.values(), [])
tweets.sort(key=lambda t: t.created_at, reverse=True)
if len(args) > 1 and str(args[-1]).isdigit():
tweets = tweets[:int(args[-1])]
return tweets | [
"def",
"tweets_for",
"(",
"query_type",
",",
"args",
",",
"per_user",
"=",
"None",
")",
":",
"lookup",
"=",
"{",
"\"query_type\"",
":",
"query_type",
",",
"\"value\"",
":",
"args",
"[",
"0",
"]",
"}",
"try",
":",
"tweets",
"=",
"Tweet",
".",
"objects",
".",
"get_for",
"(",
"*",
"*",
"lookup",
")",
"except",
"TwitterQueryException",
":",
"return",
"[",
"]",
"if",
"per_user",
"is",
"not",
"None",
":",
"_tweets",
"=",
"defaultdict",
"(",
"list",
")",
"for",
"tweet",
"in",
"tweets",
":",
"if",
"len",
"(",
"_tweets",
"[",
"tweet",
".",
"user_name",
"]",
")",
"<",
"per_user",
":",
"_tweets",
"[",
"tweet",
".",
"user_name",
"]",
".",
"append",
"(",
"tweet",
")",
"tweets",
"=",
"sum",
"(",
"_tweets",
".",
"values",
"(",
")",
",",
"[",
"]",
")",
"tweets",
".",
"sort",
"(",
"key",
"=",
"lambda",
"t",
":",
"t",
".",
"created_at",
",",
"reverse",
"=",
"True",
")",
"if",
"len",
"(",
"args",
")",
">",
"1",
"and",
"str",
"(",
"args",
"[",
"-",
"1",
"]",
")",
".",
"isdigit",
"(",
")",
":",
"tweets",
"=",
"tweets",
"[",
":",
"int",
"(",
"args",
"[",
"-",
"1",
"]",
")",
"]",
"return",
"tweets"
] | Retrieve tweets for a user, list or search term. The optional
``per_user`` arg limits the number of tweets per user, for
example to allow a fair spread of tweets per user for a list. | [
"Retrieve",
"tweets",
"for",
"a",
"user",
"list",
"or",
"search",
"term",
".",
"The",
"optional",
"per_user",
"arg",
"limits",
"the",
"number",
"of",
"tweets",
"per",
"user",
"for",
"example",
"to",
"allow",
"a",
"fair",
"spread",
"of",
"tweets",
"per",
"user",
"for",
"a",
"list",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/twitter/templatetags/twitter_tags.py#L16-L36 |
251,175 | minhhoit/yacms | yacms/twitter/templatetags/twitter_tags.py | tweets_default | def tweets_default(*args):
"""
Tweets for the default settings.
"""
query_type = settings.TWITTER_DEFAULT_QUERY_TYPE
args = (settings.TWITTER_DEFAULT_QUERY,
settings.TWITTER_DEFAULT_NUM_TWEETS)
per_user = None
if query_type == QUERY_TYPE_LIST:
per_user = 1
return tweets_for(query_type, args, per_user=per_user) | python | def tweets_default(*args):
"""
Tweets for the default settings.
"""
query_type = settings.TWITTER_DEFAULT_QUERY_TYPE
args = (settings.TWITTER_DEFAULT_QUERY,
settings.TWITTER_DEFAULT_NUM_TWEETS)
per_user = None
if query_type == QUERY_TYPE_LIST:
per_user = 1
return tweets_for(query_type, args, per_user=per_user) | [
"def",
"tweets_default",
"(",
"*",
"args",
")",
":",
"query_type",
"=",
"settings",
".",
"TWITTER_DEFAULT_QUERY_TYPE",
"args",
"=",
"(",
"settings",
".",
"TWITTER_DEFAULT_QUERY",
",",
"settings",
".",
"TWITTER_DEFAULT_NUM_TWEETS",
")",
"per_user",
"=",
"None",
"if",
"query_type",
"==",
"QUERY_TYPE_LIST",
":",
"per_user",
"=",
"1",
"return",
"tweets_for",
"(",
"query_type",
",",
"args",
",",
"per_user",
"=",
"per_user",
")"
] | Tweets for the default settings. | [
"Tweets",
"for",
"the",
"default",
"settings",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/twitter/templatetags/twitter_tags.py#L64-L74 |
251,176 | gebn/nibble | nibble/util.py | decode_cli_arg | def decode_cli_arg(arg):
"""
Turn a bytestring provided by `argparse` into unicode.
:param arg: The bytestring to decode.
:return: The argument as a unicode object.
:raises ValueError: If arg is None.
"""
if arg is None:
raise ValueError('Argument cannot be None')
if sys.version_info.major == 3:
# already decoded
return arg
return arg.decode(sys.getfilesystemencoding()) | python | def decode_cli_arg(arg):
"""
Turn a bytestring provided by `argparse` into unicode.
:param arg: The bytestring to decode.
:return: The argument as a unicode object.
:raises ValueError: If arg is None.
"""
if arg is None:
raise ValueError('Argument cannot be None')
if sys.version_info.major == 3:
# already decoded
return arg
return arg.decode(sys.getfilesystemencoding()) | [
"def",
"decode_cli_arg",
"(",
"arg",
")",
":",
"if",
"arg",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Argument cannot be None'",
")",
"if",
"sys",
".",
"version_info",
".",
"major",
"==",
"3",
":",
"# already decoded",
"return",
"arg",
"return",
"arg",
".",
"decode",
"(",
"sys",
".",
"getfilesystemencoding",
"(",
")",
")"
] | Turn a bytestring provided by `argparse` into unicode.
:param arg: The bytestring to decode.
:return: The argument as a unicode object.
:raises ValueError: If arg is None. | [
"Turn",
"a",
"bytestring",
"provided",
"by",
"argparse",
"into",
"unicode",
"."
] | e82a2c43509ed38f3d039040591cc630fa676cb0 | https://github.com/gebn/nibble/blob/e82a2c43509ed38f3d039040591cc630fa676cb0/nibble/util.py#L17-L32 |
251,177 | gebn/nibble | nibble/util.py | log_level_from_vebosity | def log_level_from_vebosity(verbosity):
"""
Get the `logging` module log level from a verbosity.
:param verbosity: The number of times the `-v` option was specified.
:return: The corresponding log level.
"""
if verbosity == 0:
return logging.WARNING
if verbosity == 1:
return logging.INFO
return logging.DEBUG | python | def log_level_from_vebosity(verbosity):
"""
Get the `logging` module log level from a verbosity.
:param verbosity: The number of times the `-v` option was specified.
:return: The corresponding log level.
"""
if verbosity == 0:
return logging.WARNING
if verbosity == 1:
return logging.INFO
return logging.DEBUG | [
"def",
"log_level_from_vebosity",
"(",
"verbosity",
")",
":",
"if",
"verbosity",
"==",
"0",
":",
"return",
"logging",
".",
"WARNING",
"if",
"verbosity",
"==",
"1",
":",
"return",
"logging",
".",
"INFO",
"return",
"logging",
".",
"DEBUG"
] | Get the `logging` module log level from a verbosity.
:param verbosity: The number of times the `-v` option was specified.
:return: The corresponding log level. | [
"Get",
"the",
"logging",
"module",
"log",
"level",
"from",
"a",
"verbosity",
"."
] | e82a2c43509ed38f3d039040591cc630fa676cb0 | https://github.com/gebn/nibble/blob/e82a2c43509ed38f3d039040591cc630fa676cb0/nibble/util.py#L35-L46 |
251,178 | riquito/richinput | richinput/richinput.py | get_cursor_position | def get_cursor_position():
"""Write an escape sequence to ask for the current cursor position.
Since the result is written on the standard input, this function
should not be used if you expect that your input has been pasted,
because the characters in the buffer would be read before the
answer about the cursor."""
# "cursor position report" in ECMA-48.
it = get_char(u'\x1b[6n')
sequence = consume_escape_sequence(it, next(it))
# sequence format is \x1b[<row>;<col>R
return tuple(int(x) for x in sequence[2:-1].split(u';')) | python | def get_cursor_position():
"""Write an escape sequence to ask for the current cursor position.
Since the result is written on the standard input, this function
should not be used if you expect that your input has been pasted,
because the characters in the buffer would be read before the
answer about the cursor."""
# "cursor position report" in ECMA-48.
it = get_char(u'\x1b[6n')
sequence = consume_escape_sequence(it, next(it))
# sequence format is \x1b[<row>;<col>R
return tuple(int(x) for x in sequence[2:-1].split(u';')) | [
"def",
"get_cursor_position",
"(",
")",
":",
"# \"cursor position report\" in ECMA-48.",
"it",
"=",
"get_char",
"(",
"u'\\x1b[6n'",
")",
"sequence",
"=",
"consume_escape_sequence",
"(",
"it",
",",
"next",
"(",
"it",
")",
")",
"# sequence format is \\x1b[<row>;<col>R",
"return",
"tuple",
"(",
"int",
"(",
"x",
")",
"for",
"x",
"in",
"sequence",
"[",
"2",
":",
"-",
"1",
"]",
".",
"split",
"(",
"u';'",
")",
")"
] | Write an escape sequence to ask for the current cursor position.
Since the result is written on the standard input, this function
should not be used if you expect that your input has been pasted,
because the characters in the buffer would be read before the
answer about the cursor. | [
"Write",
"an",
"escape",
"sequence",
"to",
"ask",
"for",
"the",
"current",
"cursor",
"position",
".",
"Since",
"the",
"result",
"is",
"written",
"on",
"the",
"standard",
"input",
"this",
"function",
"should",
"not",
"be",
"used",
"if",
"you",
"expect",
"that",
"your",
"input",
"has",
"been",
"pasted",
"because",
"the",
"characters",
"in",
"the",
"buffer",
"would",
"be",
"read",
"before",
"the",
"answer",
"about",
"the",
"cursor",
"."
] | 858c6068d80377148b89dcf9107f4e46a2b464d4 | https://github.com/riquito/richinput/blob/858c6068d80377148b89dcf9107f4e46a2b464d4/richinput/richinput.py#L221-L233 |
251,179 | klen/bottle-jade | bottle_jade.py | Environment.get_template | def get_template(self, path):
""" Load and compile template. """
if self.options['debug'] and self.options['cache_size']:
return self.cache.get(path, self.cache_template(path))
return self.load_template(path) | python | def get_template(self, path):
""" Load and compile template. """
if self.options['debug'] and self.options['cache_size']:
return self.cache.get(path, self.cache_template(path))
return self.load_template(path) | [
"def",
"get_template",
"(",
"self",
",",
"path",
")",
":",
"if",
"self",
".",
"options",
"[",
"'debug'",
"]",
"and",
"self",
".",
"options",
"[",
"'cache_size'",
"]",
":",
"return",
"self",
".",
"cache",
".",
"get",
"(",
"path",
",",
"self",
".",
"cache_template",
"(",
"path",
")",
")",
"return",
"self",
".",
"load_template",
"(",
"path",
")"
] | Load and compile template. | [
"Load",
"and",
"compile",
"template",
"."
] | a8eb3b5d8e741540ea85cbc0c18952fbd68d7476 | https://github.com/klen/bottle-jade/blob/a8eb3b5d8e741540ea85cbc0c18952fbd68d7476/bottle_jade.py#L150-L155 |
251,180 | klmitch/framer | framer/framers.py | StructFramer.decode_length | def decode_length(self, data, state):
"""
Extract and decode a frame length from the data buffer. The
consumed data should be removed from the buffer. If the
length data is incomplete, must raise a ``NoFrames``
exception.
:param data: A ``bytearray`` instance containing the data so
far read.
:param state: An instance of ``FramerState``. If the buffer
contains a partial encoded length, this object
can be used to store state information to allow
the remainder of the length to be read.
:returns: The frame length, as an integer.
"""
# Do we have enough data yet?
if len(data) < self.fmt.size:
raise exc.NoFrames()
# Extract the length
length = self.fmt.unpack(six.binary_type(data[:self.fmt.size]))[0]
del data[:self.fmt.size]
# Return the length
return length | python | def decode_length(self, data, state):
"""
Extract and decode a frame length from the data buffer. The
consumed data should be removed from the buffer. If the
length data is incomplete, must raise a ``NoFrames``
exception.
:param data: A ``bytearray`` instance containing the data so
far read.
:param state: An instance of ``FramerState``. If the buffer
contains a partial encoded length, this object
can be used to store state information to allow
the remainder of the length to be read.
:returns: The frame length, as an integer.
"""
# Do we have enough data yet?
if len(data) < self.fmt.size:
raise exc.NoFrames()
# Extract the length
length = self.fmt.unpack(six.binary_type(data[:self.fmt.size]))[0]
del data[:self.fmt.size]
# Return the length
return length | [
"def",
"decode_length",
"(",
"self",
",",
"data",
",",
"state",
")",
":",
"# Do we have enough data yet?",
"if",
"len",
"(",
"data",
")",
"<",
"self",
".",
"fmt",
".",
"size",
":",
"raise",
"exc",
".",
"NoFrames",
"(",
")",
"# Extract the length",
"length",
"=",
"self",
".",
"fmt",
".",
"unpack",
"(",
"six",
".",
"binary_type",
"(",
"data",
"[",
":",
"self",
".",
"fmt",
".",
"size",
"]",
")",
")",
"[",
"0",
"]",
"del",
"data",
"[",
":",
"self",
".",
"fmt",
".",
"size",
"]",
"# Return the length",
"return",
"length"
] | Extract and decode a frame length from the data buffer. The
consumed data should be removed from the buffer. If the
length data is incomplete, must raise a ``NoFrames``
exception.
:param data: A ``bytearray`` instance containing the data so
far read.
:param state: An instance of ``FramerState``. If the buffer
contains a partial encoded length, this object
can be used to store state information to allow
the remainder of the length to be read.
:returns: The frame length, as an integer. | [
"Extract",
"and",
"decode",
"a",
"frame",
"length",
"from",
"the",
"data",
"buffer",
".",
"The",
"consumed",
"data",
"should",
"be",
"removed",
"from",
"the",
"buffer",
".",
"If",
"the",
"length",
"data",
"is",
"incomplete",
"must",
"raise",
"a",
"NoFrames",
"exception",
"."
] | bd34cee9737793dab61d1d8973930b64bd08acb4 | https://github.com/klmitch/framer/blob/bd34cee9737793dab61d1d8973930b64bd08acb4/framer/framers.py#L600-L626 |
251,181 | openbermuda/ripl | ripl/csv2json.py | Csv2Json.interpret | def interpret(self, infile):
""" Process a file of rest and return json """
# need row headings
data = pandas.read_csv(infile)
# FIXME find the right foo
return json.dumps(data.foo()) | python | def interpret(self, infile):
""" Process a file of rest and return json """
# need row headings
data = pandas.read_csv(infile)
# FIXME find the right foo
return json.dumps(data.foo()) | [
"def",
"interpret",
"(",
"self",
",",
"infile",
")",
":",
"# need row headings",
"data",
"=",
"pandas",
".",
"read_csv",
"(",
"infile",
")",
"# FIXME find the right foo",
"return",
"json",
".",
"dumps",
"(",
"data",
".",
"foo",
"(",
")",
")"
] | Process a file of rest and return json | [
"Process",
"a",
"file",
"of",
"rest",
"and",
"return",
"json"
] | 4886b1a697e4b81c2202db9cb977609e034f8e70 | https://github.com/openbermuda/ripl/blob/4886b1a697e4b81c2202db9cb977609e034f8e70/ripl/csv2json.py#L23-L30 |
251,182 | bsdlp/solutions | solutions/__init__.py | binary_search | def binary_search(data, target, lo=0, hi=None):
"""
Perform binary search on sorted list data for target. Returns int
representing position of target in data.
"""
hi = hi if hi is not None else len(data)
mid = (lo + hi) // 2
if hi < 2 or hi > len(data) or target > data[-1] or target < data[0]:
return -1
if data[mid] > target:
return binary_search(data, target, lo=lo, hi=mid)
elif data[mid] < target:
return binary_search(data, target, lo=(mid + 1), hi=hi)
elif data[mid] == target:
return mid | python | def binary_search(data, target, lo=0, hi=None):
"""
Perform binary search on sorted list data for target. Returns int
representing position of target in data.
"""
hi = hi if hi is not None else len(data)
mid = (lo + hi) // 2
if hi < 2 or hi > len(data) or target > data[-1] or target < data[0]:
return -1
if data[mid] > target:
return binary_search(data, target, lo=lo, hi=mid)
elif data[mid] < target:
return binary_search(data, target, lo=(mid + 1), hi=hi)
elif data[mid] == target:
return mid | [
"def",
"binary_search",
"(",
"data",
",",
"target",
",",
"lo",
"=",
"0",
",",
"hi",
"=",
"None",
")",
":",
"hi",
"=",
"hi",
"if",
"hi",
"is",
"not",
"None",
"else",
"len",
"(",
"data",
")",
"mid",
"=",
"(",
"lo",
"+",
"hi",
")",
"//",
"2",
"if",
"hi",
"<",
"2",
"or",
"hi",
">",
"len",
"(",
"data",
")",
"or",
"target",
">",
"data",
"[",
"-",
"1",
"]",
"or",
"target",
"<",
"data",
"[",
"0",
"]",
":",
"return",
"-",
"1",
"if",
"data",
"[",
"mid",
"]",
">",
"target",
":",
"return",
"binary_search",
"(",
"data",
",",
"target",
",",
"lo",
"=",
"lo",
",",
"hi",
"=",
"mid",
")",
"elif",
"data",
"[",
"mid",
"]",
"<",
"target",
":",
"return",
"binary_search",
"(",
"data",
",",
"target",
",",
"lo",
"=",
"(",
"mid",
"+",
"1",
")",
",",
"hi",
"=",
"hi",
")",
"elif",
"data",
"[",
"mid",
"]",
"==",
"target",
":",
"return",
"mid"
] | Perform binary search on sorted list data for target. Returns int
representing position of target in data. | [
"Perform",
"binary",
"search",
"on",
"sorted",
"list",
"data",
"for",
"target",
".",
"Returns",
"int",
"representing",
"position",
"of",
"target",
"in",
"data",
"."
] | e85daeaab10796d5746fcfa7157e65e5210fa07b | https://github.com/bsdlp/solutions/blob/e85daeaab10796d5746fcfa7157e65e5210fa07b/solutions/__init__.py#L14-L28 |
251,183 | gnarlychicken/ticket_auth | ticket_auth/ticket_factory.py | TicketFactory.new | def new(self, user_id, tokens=None, user_data=None, valid_until=None,
client_ip=None, encoding='utf-8'):
"""Creates a new authentication ticket.
Args:
user_id: User id to store in ticket (stored in plain text)
tokens: Optional sequence of token strings to store in the ticket
(stored in plain text).
user_data: Optional user data to store in the ticket (string like
object stored in plain text)
valid_until: Expiration time of ticket as a integer (typically
time.time() + seconds).
client_ip: Optional string or ip_address.IPAddress of the client.
encoding: Optional encoding type that is used when hashing the
strings passed to the function
Returns:
A ticket string that can later be used to identify the user
"""
if valid_until is None:
valid_until = int(time.time()) + TicketFactory._DEFAULT_TIMEOUT
else:
valid_until = int(valid_until)
# Make sure we dont have any exclamations in the user_id
user_id = ulp.quote(user_id)
# Create a comma seperated list of tokens
token_str = ''
if tokens:
# Escape characters in our tokens
token_str = ','.join((ulp.quote(t) for t in tokens))
# Encode our user data (a string)
user_str = '' if not user_data else ulp.quote(user_data)
# Get our address
ip = self._DEFAULT_IP if client_ip is None else ip_address(client_ip)
# Create our digest
data0 = bytes([ip.version]) + ip.packed + pack(">I", valid_until)
data1 = ('\0'.join((user_id, token_str, user_str))).encode(encoding)
digest = self._hexdigest(data0, data1)
# digest + timestamp as an eight character hexadecimal + userid
parts = ('{0}{1:08x}{2}'.format(digest, valid_until, user_id),
token_str, user_str)
return '!'.join(parts) | python | def new(self, user_id, tokens=None, user_data=None, valid_until=None,
client_ip=None, encoding='utf-8'):
"""Creates a new authentication ticket.
Args:
user_id: User id to store in ticket (stored in plain text)
tokens: Optional sequence of token strings to store in the ticket
(stored in plain text).
user_data: Optional user data to store in the ticket (string like
object stored in plain text)
valid_until: Expiration time of ticket as a integer (typically
time.time() + seconds).
client_ip: Optional string or ip_address.IPAddress of the client.
encoding: Optional encoding type that is used when hashing the
strings passed to the function
Returns:
A ticket string that can later be used to identify the user
"""
if valid_until is None:
valid_until = int(time.time()) + TicketFactory._DEFAULT_TIMEOUT
else:
valid_until = int(valid_until)
# Make sure we dont have any exclamations in the user_id
user_id = ulp.quote(user_id)
# Create a comma seperated list of tokens
token_str = ''
if tokens:
# Escape characters in our tokens
token_str = ','.join((ulp.quote(t) for t in tokens))
# Encode our user data (a string)
user_str = '' if not user_data else ulp.quote(user_data)
# Get our address
ip = self._DEFAULT_IP if client_ip is None else ip_address(client_ip)
# Create our digest
data0 = bytes([ip.version]) + ip.packed + pack(">I", valid_until)
data1 = ('\0'.join((user_id, token_str, user_str))).encode(encoding)
digest = self._hexdigest(data0, data1)
# digest + timestamp as an eight character hexadecimal + userid
parts = ('{0}{1:08x}{2}'.format(digest, valid_until, user_id),
token_str, user_str)
return '!'.join(parts) | [
"def",
"new",
"(",
"self",
",",
"user_id",
",",
"tokens",
"=",
"None",
",",
"user_data",
"=",
"None",
",",
"valid_until",
"=",
"None",
",",
"client_ip",
"=",
"None",
",",
"encoding",
"=",
"'utf-8'",
")",
":",
"if",
"valid_until",
"is",
"None",
":",
"valid_until",
"=",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
"+",
"TicketFactory",
".",
"_DEFAULT_TIMEOUT",
"else",
":",
"valid_until",
"=",
"int",
"(",
"valid_until",
")",
"# Make sure we dont have any exclamations in the user_id",
"user_id",
"=",
"ulp",
".",
"quote",
"(",
"user_id",
")",
"# Create a comma seperated list of tokens",
"token_str",
"=",
"''",
"if",
"tokens",
":",
"# Escape characters in our tokens",
"token_str",
"=",
"','",
".",
"join",
"(",
"(",
"ulp",
".",
"quote",
"(",
"t",
")",
"for",
"t",
"in",
"tokens",
")",
")",
"# Encode our user data (a string)",
"user_str",
"=",
"''",
"if",
"not",
"user_data",
"else",
"ulp",
".",
"quote",
"(",
"user_data",
")",
"# Get our address",
"ip",
"=",
"self",
".",
"_DEFAULT_IP",
"if",
"client_ip",
"is",
"None",
"else",
"ip_address",
"(",
"client_ip",
")",
"# Create our digest",
"data0",
"=",
"bytes",
"(",
"[",
"ip",
".",
"version",
"]",
")",
"+",
"ip",
".",
"packed",
"+",
"pack",
"(",
"\">I\"",
",",
"valid_until",
")",
"data1",
"=",
"(",
"'\\0'",
".",
"join",
"(",
"(",
"user_id",
",",
"token_str",
",",
"user_str",
")",
")",
")",
".",
"encode",
"(",
"encoding",
")",
"digest",
"=",
"self",
".",
"_hexdigest",
"(",
"data0",
",",
"data1",
")",
"# digest + timestamp as an eight character hexadecimal + userid",
"parts",
"=",
"(",
"'{0}{1:08x}{2}'",
".",
"format",
"(",
"digest",
",",
"valid_until",
",",
"user_id",
")",
",",
"token_str",
",",
"user_str",
")",
"return",
"'!'",
".",
"join",
"(",
"parts",
")"
] | Creates a new authentication ticket.
Args:
user_id: User id to store in ticket (stored in plain text)
tokens: Optional sequence of token strings to store in the ticket
(stored in plain text).
user_data: Optional user data to store in the ticket (string like
object stored in plain text)
valid_until: Expiration time of ticket as a integer (typically
time.time() + seconds).
client_ip: Optional string or ip_address.IPAddress of the client.
encoding: Optional encoding type that is used when hashing the
strings passed to the function
Returns:
A ticket string that can later be used to identify the user | [
"Creates",
"a",
"new",
"authentication",
"ticket",
"."
] | 814eaa2cbe9c8dd9f4ded611def85fdd57763f8d | https://github.com/gnarlychicken/ticket_auth/blob/814eaa2cbe9c8dd9f4ded611def85fdd57763f8d/ticket_auth/ticket_factory.py#L34-L81 |
251,184 | gnarlychicken/ticket_auth | ticket_auth/ticket_factory.py | TicketFactory.validate | def validate(self, ticket, client_ip=None, now=None, encoding='utf-8'):
"""Validates the passed ticket, , raises a TicketError
on failure
Args:
ticket: String value (possibly generated by new function)
client_ip: Optional IPAddress of client, should be passed if the
ip address was passed on ticket creation.
now: Optional (defaults to time.time()) time to use when
validating ticket date
Returns:
Ticket a TicketInfo tuple containing the users authentication details on
success
Raises:
TicketParseError: Invalid ticket format
TicketDigestError: Digest is incorrect (ticket data was modified)
TicketExpired: Ticket has passed expiration date
"""
parts = self.parse(ticket)
# Check if our ticket matches
new_ticket = self.new(*(parts[1:]), client_ip=client_ip, encoding=encoding)
if new_ticket[:self._hash.digest_size * 2] != parts.digest:
raise TicketDigestError(ticket)
if now is None:
now = time.time()
if parts.valid_until <= now:
raise TicketExpired(ticket)
return parts | python | def validate(self, ticket, client_ip=None, now=None, encoding='utf-8'):
"""Validates the passed ticket, , raises a TicketError
on failure
Args:
ticket: String value (possibly generated by new function)
client_ip: Optional IPAddress of client, should be passed if the
ip address was passed on ticket creation.
now: Optional (defaults to time.time()) time to use when
validating ticket date
Returns:
Ticket a TicketInfo tuple containing the users authentication details on
success
Raises:
TicketParseError: Invalid ticket format
TicketDigestError: Digest is incorrect (ticket data was modified)
TicketExpired: Ticket has passed expiration date
"""
parts = self.parse(ticket)
# Check if our ticket matches
new_ticket = self.new(*(parts[1:]), client_ip=client_ip, encoding=encoding)
if new_ticket[:self._hash.digest_size * 2] != parts.digest:
raise TicketDigestError(ticket)
if now is None:
now = time.time()
if parts.valid_until <= now:
raise TicketExpired(ticket)
return parts | [
"def",
"validate",
"(",
"self",
",",
"ticket",
",",
"client_ip",
"=",
"None",
",",
"now",
"=",
"None",
",",
"encoding",
"=",
"'utf-8'",
")",
":",
"parts",
"=",
"self",
".",
"parse",
"(",
"ticket",
")",
"# Check if our ticket matches",
"new_ticket",
"=",
"self",
".",
"new",
"(",
"*",
"(",
"parts",
"[",
"1",
":",
"]",
")",
",",
"client_ip",
"=",
"client_ip",
",",
"encoding",
"=",
"encoding",
")",
"if",
"new_ticket",
"[",
":",
"self",
".",
"_hash",
".",
"digest_size",
"*",
"2",
"]",
"!=",
"parts",
".",
"digest",
":",
"raise",
"TicketDigestError",
"(",
"ticket",
")",
"if",
"now",
"is",
"None",
":",
"now",
"=",
"time",
".",
"time",
"(",
")",
"if",
"parts",
".",
"valid_until",
"<=",
"now",
":",
"raise",
"TicketExpired",
"(",
"ticket",
")",
"return",
"parts"
] | Validates the passed ticket, , raises a TicketError
on failure
Args:
ticket: String value (possibly generated by new function)
client_ip: Optional IPAddress of client, should be passed if the
ip address was passed on ticket creation.
now: Optional (defaults to time.time()) time to use when
validating ticket date
Returns:
Ticket a TicketInfo tuple containing the users authentication details on
success
Raises:
TicketParseError: Invalid ticket format
TicketDigestError: Digest is incorrect (ticket data was modified)
TicketExpired: Ticket has passed expiration date | [
"Validates",
"the",
"passed",
"ticket",
"raises",
"a",
"TicketError",
"on",
"failure"
] | 814eaa2cbe9c8dd9f4ded611def85fdd57763f8d | https://github.com/gnarlychicken/ticket_auth/blob/814eaa2cbe9c8dd9f4ded611def85fdd57763f8d/ticket_auth/ticket_factory.py#L83-L118 |
251,185 | gnarlychicken/ticket_auth | ticket_auth/ticket_factory.py | TicketFactory.parse | def parse(self, ticket):
"""Parses the passed ticket, returning a tuple containing the digest,
user_id, valid_until, tokens, and user_data fields
"""
if len(ticket) < self._min_ticket_size():
raise TicketParseError(ticket, 'Invalid ticket length')
digest_len = self._hash.digest_size * 2
digest = ticket[:digest_len]
try:
time_len = 8
time = int(ticket[digest_len:digest_len + time_len], 16)
except:
raise TicketParseError(ticket, 'Invalid time field')
parts = ticket[digest_len + time_len:].split('!')
if len(parts) != 3:
raise TicketParseError(ticket, 'Missing parts')
user_id = ulp.unquote(parts[0])
tokens = ()
if parts[1]:
tokens = tuple((ulp.unquote(t) for t in parts[1].split(',')))
user_data = ulp.unquote(parts[2])
return TicketInfo(digest, user_id, tokens, user_data, time) | python | def parse(self, ticket):
"""Parses the passed ticket, returning a tuple containing the digest,
user_id, valid_until, tokens, and user_data fields
"""
if len(ticket) < self._min_ticket_size():
raise TicketParseError(ticket, 'Invalid ticket length')
digest_len = self._hash.digest_size * 2
digest = ticket[:digest_len]
try:
time_len = 8
time = int(ticket[digest_len:digest_len + time_len], 16)
except:
raise TicketParseError(ticket, 'Invalid time field')
parts = ticket[digest_len + time_len:].split('!')
if len(parts) != 3:
raise TicketParseError(ticket, 'Missing parts')
user_id = ulp.unquote(parts[0])
tokens = ()
if parts[1]:
tokens = tuple((ulp.unquote(t) for t in parts[1].split(',')))
user_data = ulp.unquote(parts[2])
return TicketInfo(digest, user_id, tokens, user_data, time) | [
"def",
"parse",
"(",
"self",
",",
"ticket",
")",
":",
"if",
"len",
"(",
"ticket",
")",
"<",
"self",
".",
"_min_ticket_size",
"(",
")",
":",
"raise",
"TicketParseError",
"(",
"ticket",
",",
"'Invalid ticket length'",
")",
"digest_len",
"=",
"self",
".",
"_hash",
".",
"digest_size",
"*",
"2",
"digest",
"=",
"ticket",
"[",
":",
"digest_len",
"]",
"try",
":",
"time_len",
"=",
"8",
"time",
"=",
"int",
"(",
"ticket",
"[",
"digest_len",
":",
"digest_len",
"+",
"time_len",
"]",
",",
"16",
")",
"except",
":",
"raise",
"TicketParseError",
"(",
"ticket",
",",
"'Invalid time field'",
")",
"parts",
"=",
"ticket",
"[",
"digest_len",
"+",
"time_len",
":",
"]",
".",
"split",
"(",
"'!'",
")",
"if",
"len",
"(",
"parts",
")",
"!=",
"3",
":",
"raise",
"TicketParseError",
"(",
"ticket",
",",
"'Missing parts'",
")",
"user_id",
"=",
"ulp",
".",
"unquote",
"(",
"parts",
"[",
"0",
"]",
")",
"tokens",
"=",
"(",
")",
"if",
"parts",
"[",
"1",
"]",
":",
"tokens",
"=",
"tuple",
"(",
"(",
"ulp",
".",
"unquote",
"(",
"t",
")",
"for",
"t",
"in",
"parts",
"[",
"1",
"]",
".",
"split",
"(",
"','",
")",
")",
")",
"user_data",
"=",
"ulp",
".",
"unquote",
"(",
"parts",
"[",
"2",
"]",
")",
"return",
"TicketInfo",
"(",
"digest",
",",
"user_id",
",",
"tokens",
",",
"user_data",
",",
"time",
")"
] | Parses the passed ticket, returning a tuple containing the digest,
user_id, valid_until, tokens, and user_data fields | [
"Parses",
"the",
"passed",
"ticket",
"returning",
"a",
"tuple",
"containing",
"the",
"digest",
"user_id",
"valid_until",
"tokens",
"and",
"user_data",
"fields"
] | 814eaa2cbe9c8dd9f4ded611def85fdd57763f8d | https://github.com/gnarlychicken/ticket_auth/blob/814eaa2cbe9c8dd9f4ded611def85fdd57763f8d/ticket_auth/ticket_factory.py#L120-L147 |
251,186 | sbusard/wagoner | wagoner/tree.py | Tree.random_word | def random_word(self, *args, **kwargs):
"""
Return a random word from this tree. The length of the word depends on
the this tree.
:return: a random word from this tree.
args and kwargs are ignored.
"""
word = ""
current = (">", 0)
while current[0] != "<":
choices = self[current]
choice = random_weighted_choice(choices)
current = choice
word += current[0][-1]
return word[:-1] | python | def random_word(self, *args, **kwargs):
"""
Return a random word from this tree. The length of the word depends on
the this tree.
:return: a random word from this tree.
args and kwargs are ignored.
"""
word = ""
current = (">", 0)
while current[0] != "<":
choices = self[current]
choice = random_weighted_choice(choices)
current = choice
word += current[0][-1]
return word[:-1] | [
"def",
"random_word",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"word",
"=",
"\"\"",
"current",
"=",
"(",
"\">\"",
",",
"0",
")",
"while",
"current",
"[",
"0",
"]",
"!=",
"\"<\"",
":",
"choices",
"=",
"self",
"[",
"current",
"]",
"choice",
"=",
"random_weighted_choice",
"(",
"choices",
")",
"current",
"=",
"choice",
"word",
"+=",
"current",
"[",
"0",
"]",
"[",
"-",
"1",
"]",
"return",
"word",
"[",
":",
"-",
"1",
"]"
] | Return a random word from this tree. The length of the word depends on
the this tree.
:return: a random word from this tree.
args and kwargs are ignored. | [
"Return",
"a",
"random",
"word",
"from",
"this",
"tree",
".",
"The",
"length",
"of",
"the",
"word",
"depends",
"on",
"the",
"this",
"tree",
"."
] | 7f83d66bbd0e009e4d4232ffdf319bd5a2a5683b | https://github.com/sbusard/wagoner/blob/7f83d66bbd0e009e4d4232ffdf319bd5a2a5683b/wagoner/tree.py#L112-L128 |
251,187 | jmgilman/Neolib | neolib/item/UserShopFrontItem.py | UserShopFrontItem.buy | def buy(self):
""" Attempts to purchase a user shop item, returns result
Uses the associated user and buyURL to attempt to purchase the user shop item. Returns
whether or not the item was successfully bought.
Returns
bool - True if successful, false otherwise
"""
# Buy the item
pg = self.usr.getPage("http://www.neopets.com/" + self.buyURL, vars = {'Referer': 'http://www.neopets.com/browseshop.phtml?owner=' + self.owner})
# If it was successful a redirect to the shop is sent
if "(owned by" in pg.content:
return True
elif "does not exist in this shop" in pg.content:
return False
else:
logging.getLogger("neolib.item").exception("Unknown message when attempting to buy user shop item.", {'pg': pg})
return False | python | def buy(self):
""" Attempts to purchase a user shop item, returns result
Uses the associated user and buyURL to attempt to purchase the user shop item. Returns
whether or not the item was successfully bought.
Returns
bool - True if successful, false otherwise
"""
# Buy the item
pg = self.usr.getPage("http://www.neopets.com/" + self.buyURL, vars = {'Referer': 'http://www.neopets.com/browseshop.phtml?owner=' + self.owner})
# If it was successful a redirect to the shop is sent
if "(owned by" in pg.content:
return True
elif "does not exist in this shop" in pg.content:
return False
else:
logging.getLogger("neolib.item").exception("Unknown message when attempting to buy user shop item.", {'pg': pg})
return False | [
"def",
"buy",
"(",
"self",
")",
":",
"# Buy the item",
"pg",
"=",
"self",
".",
"usr",
".",
"getPage",
"(",
"\"http://www.neopets.com/\"",
"+",
"self",
".",
"buyURL",
",",
"vars",
"=",
"{",
"'Referer'",
":",
"'http://www.neopets.com/browseshop.phtml?owner='",
"+",
"self",
".",
"owner",
"}",
")",
"# If it was successful a redirect to the shop is sent",
"if",
"\"(owned by\"",
"in",
"pg",
".",
"content",
":",
"return",
"True",
"elif",
"\"does not exist in this shop\"",
"in",
"pg",
".",
"content",
":",
"return",
"False",
"else",
":",
"logging",
".",
"getLogger",
"(",
"\"neolib.item\"",
")",
".",
"exception",
"(",
"\"Unknown message when attempting to buy user shop item.\"",
",",
"{",
"'pg'",
":",
"pg",
"}",
")",
"return",
"False"
] | Attempts to purchase a user shop item, returns result
Uses the associated user and buyURL to attempt to purchase the user shop item. Returns
whether or not the item was successfully bought.
Returns
bool - True if successful, false otherwise | [
"Attempts",
"to",
"purchase",
"a",
"user",
"shop",
"item",
"returns",
"result",
"Uses",
"the",
"associated",
"user",
"and",
"buyURL",
"to",
"attempt",
"to",
"purchase",
"the",
"user",
"shop",
"item",
".",
"Returns",
"whether",
"or",
"not",
"the",
"item",
"was",
"successfully",
"bought",
".",
"Returns",
"bool",
"-",
"True",
"if",
"successful",
"false",
"otherwise"
] | 228fafeaed0f3195676137732384a14820ae285c | https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/item/UserShopFrontItem.py#L33-L52 |
251,188 | Pringley/spyglass | spyglass/torrent.py | Torrent.fetch | def fetch(self, cache=None):
"""Query the info page to fill in the property cache.
Return a dictionary with the fetched properties and values.
"""
self.reset()
soup = get(self.url).soup
details = soup.find(id="detailsframe")
getdef = lambda s: [elem
for elem in details.find("dt",
text=re.compile(s)).next_siblings
if elem.name == 'dd'][0]
getdefstring = lambda s: getdef(s).string.strip()
info = {
"title": details.find(id="title").string.strip(),
"type": getdefstring("Type:"),
"files": getdefstring("Files:"),
"size": getdefstring("Size:"),
"uploaded": getdefstring("Uploaded:"),
"submitter": getdef("By:").parent.find("a", href=re.compile("user")).string.strip(),
"seeders": getdefstring("Seeders:"),
"leechers": getdefstring("Leechers:"),
"comments": details.find(id="NumComments").string.strip(),
"link": details.find("a", href=re.compile("^magnet\:"))['href'].strip(),
}
if self._use_cache(cache):
self._attrs = info
self._fetched = True
return info | python | def fetch(self, cache=None):
"""Query the info page to fill in the property cache.
Return a dictionary with the fetched properties and values.
"""
self.reset()
soup = get(self.url).soup
details = soup.find(id="detailsframe")
getdef = lambda s: [elem
for elem in details.find("dt",
text=re.compile(s)).next_siblings
if elem.name == 'dd'][0]
getdefstring = lambda s: getdef(s).string.strip()
info = {
"title": details.find(id="title").string.strip(),
"type": getdefstring("Type:"),
"files": getdefstring("Files:"),
"size": getdefstring("Size:"),
"uploaded": getdefstring("Uploaded:"),
"submitter": getdef("By:").parent.find("a", href=re.compile("user")).string.strip(),
"seeders": getdefstring("Seeders:"),
"leechers": getdefstring("Leechers:"),
"comments": details.find(id="NumComments").string.strip(),
"link": details.find("a", href=re.compile("^magnet\:"))['href'].strip(),
}
if self._use_cache(cache):
self._attrs = info
self._fetched = True
return info | [
"def",
"fetch",
"(",
"self",
",",
"cache",
"=",
"None",
")",
":",
"self",
".",
"reset",
"(",
")",
"soup",
"=",
"get",
"(",
"self",
".",
"url",
")",
".",
"soup",
"details",
"=",
"soup",
".",
"find",
"(",
"id",
"=",
"\"detailsframe\"",
")",
"getdef",
"=",
"lambda",
"s",
":",
"[",
"elem",
"for",
"elem",
"in",
"details",
".",
"find",
"(",
"\"dt\"",
",",
"text",
"=",
"re",
".",
"compile",
"(",
"s",
")",
")",
".",
"next_siblings",
"if",
"elem",
".",
"name",
"==",
"'dd'",
"]",
"[",
"0",
"]",
"getdefstring",
"=",
"lambda",
"s",
":",
"getdef",
"(",
"s",
")",
".",
"string",
".",
"strip",
"(",
")",
"info",
"=",
"{",
"\"title\"",
":",
"details",
".",
"find",
"(",
"id",
"=",
"\"title\"",
")",
".",
"string",
".",
"strip",
"(",
")",
",",
"\"type\"",
":",
"getdefstring",
"(",
"\"Type:\"",
")",
",",
"\"files\"",
":",
"getdefstring",
"(",
"\"Files:\"",
")",
",",
"\"size\"",
":",
"getdefstring",
"(",
"\"Size:\"",
")",
",",
"\"uploaded\"",
":",
"getdefstring",
"(",
"\"Uploaded:\"",
")",
",",
"\"submitter\"",
":",
"getdef",
"(",
"\"By:\"",
")",
".",
"parent",
".",
"find",
"(",
"\"a\"",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"\"user\"",
")",
")",
".",
"string",
".",
"strip",
"(",
")",
",",
"\"seeders\"",
":",
"getdefstring",
"(",
"\"Seeders:\"",
")",
",",
"\"leechers\"",
":",
"getdefstring",
"(",
"\"Leechers:\"",
")",
",",
"\"comments\"",
":",
"details",
".",
"find",
"(",
"id",
"=",
"\"NumComments\"",
")",
".",
"string",
".",
"strip",
"(",
")",
",",
"\"link\"",
":",
"details",
".",
"find",
"(",
"\"a\"",
",",
"href",
"=",
"re",
".",
"compile",
"(",
"\"^magnet\\:\"",
")",
")",
"[",
"'href'",
"]",
".",
"strip",
"(",
")",
",",
"}",
"if",
"self",
".",
"_use_cache",
"(",
"cache",
")",
":",
"self",
".",
"_attrs",
"=",
"info",
"self",
".",
"_fetched",
"=",
"True",
"return",
"info"
] | Query the info page to fill in the property cache.
Return a dictionary with the fetched properties and values. | [
"Query",
"the",
"info",
"page",
"to",
"fill",
"in",
"the",
"property",
"cache",
"."
] | 091d74f34837673af936daa9f462ad8216be9916 | https://github.com/Pringley/spyglass/blob/091d74f34837673af936daa9f462ad8216be9916/spyglass/torrent.py#L47-L76 |
251,189 | Pringley/spyglass | spyglass/torrent.py | Torrent.get | def get(self, item, cache=None):
"""Lookup a torrent info property.
If cache is True, check the cache first. If the cache is empty, then
fetch torrent info before returning it.
"""
if item not in self._keys:
raise KeyError(item)
if self._use_cache(cache) and (self._fetched or
item in self._attrs):
return self._attrs[item]
info = self.fetch(cache=cache)
return info[item] | python | def get(self, item, cache=None):
"""Lookup a torrent info property.
If cache is True, check the cache first. If the cache is empty, then
fetch torrent info before returning it.
"""
if item not in self._keys:
raise KeyError(item)
if self._use_cache(cache) and (self._fetched or
item in self._attrs):
return self._attrs[item]
info = self.fetch(cache=cache)
return info[item] | [
"def",
"get",
"(",
"self",
",",
"item",
",",
"cache",
"=",
"None",
")",
":",
"if",
"item",
"not",
"in",
"self",
".",
"_keys",
":",
"raise",
"KeyError",
"(",
"item",
")",
"if",
"self",
".",
"_use_cache",
"(",
"cache",
")",
"and",
"(",
"self",
".",
"_fetched",
"or",
"item",
"in",
"self",
".",
"_attrs",
")",
":",
"return",
"self",
".",
"_attrs",
"[",
"item",
"]",
"info",
"=",
"self",
".",
"fetch",
"(",
"cache",
"=",
"cache",
")",
"return",
"info",
"[",
"item",
"]"
] | Lookup a torrent info property.
If cache is True, check the cache first. If the cache is empty, then
fetch torrent info before returning it. | [
"Lookup",
"a",
"torrent",
"info",
"property",
"."
] | 091d74f34837673af936daa9f462ad8216be9916 | https://github.com/Pringley/spyglass/blob/091d74f34837673af936daa9f462ad8216be9916/spyglass/torrent.py#L78-L91 |
251,190 | Pringley/spyglass | spyglass/torrent.py | Torrent.as_dict | def as_dict(self, cache=None, fetch=True):
"""Return torrent properties as a dictionary.
Set the cache flag to False to disable the cache. On the other hand,
set the fetch flag to False to avoid fetching data if it's not cached.
"""
if not self._fetched and fetch:
info = self.fetch(cache)
elif self._use_cache(cache):
info = self._attrs.copy()
else:
info = {}
info.update(url=self.url)
return info | python | def as_dict(self, cache=None, fetch=True):
"""Return torrent properties as a dictionary.
Set the cache flag to False to disable the cache. On the other hand,
set the fetch flag to False to avoid fetching data if it's not cached.
"""
if not self._fetched and fetch:
info = self.fetch(cache)
elif self._use_cache(cache):
info = self._attrs.copy()
else:
info = {}
info.update(url=self.url)
return info | [
"def",
"as_dict",
"(",
"self",
",",
"cache",
"=",
"None",
",",
"fetch",
"=",
"True",
")",
":",
"if",
"not",
"self",
".",
"_fetched",
"and",
"fetch",
":",
"info",
"=",
"self",
".",
"fetch",
"(",
"cache",
")",
"elif",
"self",
".",
"_use_cache",
"(",
"cache",
")",
":",
"info",
"=",
"self",
".",
"_attrs",
".",
"copy",
"(",
")",
"else",
":",
"info",
"=",
"{",
"}",
"info",
".",
"update",
"(",
"url",
"=",
"self",
".",
"url",
")",
"return",
"info"
] | Return torrent properties as a dictionary.
Set the cache flag to False to disable the cache. On the other hand,
set the fetch flag to False to avoid fetching data if it's not cached. | [
"Return",
"torrent",
"properties",
"as",
"a",
"dictionary",
"."
] | 091d74f34837673af936daa9f462ad8216be9916 | https://github.com/Pringley/spyglass/blob/091d74f34837673af936daa9f462ad8216be9916/spyglass/torrent.py#L114-L128 |
251,191 | fedora-infra/fmn.rules | fmn/rules/generic.py | user_filter | def user_filter(config, message, fasnick=None, *args, **kw):
""" A particular user
Use this rule to include messages that are associated with a
specific user.
"""
fasnick = kw.get('fasnick', fasnick)
if fasnick:
return fasnick in fmn.rules.utils.msg2usernames(message, **config) | python | def user_filter(config, message, fasnick=None, *args, **kw):
""" A particular user
Use this rule to include messages that are associated with a
specific user.
"""
fasnick = kw.get('fasnick', fasnick)
if fasnick:
return fasnick in fmn.rules.utils.msg2usernames(message, **config) | [
"def",
"user_filter",
"(",
"config",
",",
"message",
",",
"fasnick",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"fasnick",
"=",
"kw",
".",
"get",
"(",
"'fasnick'",
",",
"fasnick",
")",
"if",
"fasnick",
":",
"return",
"fasnick",
"in",
"fmn",
".",
"rules",
".",
"utils",
".",
"msg2usernames",
"(",
"message",
",",
"*",
"*",
"config",
")"
] | A particular user
Use this rule to include messages that are associated with a
specific user. | [
"A",
"particular",
"user"
] | f9ec790619fcc8b41803077c4dec094e5127fc24 | https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/generic.py#L12-L21 |
251,192 | fedora-infra/fmn.rules | fmn/rules/generic.py | not_user_filter | def not_user_filter(config, message, fasnick=None, *args, **kw):
""" Everything except a particular user
Use this rule to exclude messages that are associated with one or more
users. Specify several users by separating them with a comma ','.
"""
fasnick = kw.get('fasnick', fasnick)
if not fasnick:
return False
fasnick = (fasnick or []) and fasnick.split(',')
valid = True
for nick in fasnick:
if nick.strip() in fmn.rules.utils.msg2usernames(message, **config):
valid = False
break
return valid | python | def not_user_filter(config, message, fasnick=None, *args, **kw):
""" Everything except a particular user
Use this rule to exclude messages that are associated with one or more
users. Specify several users by separating them with a comma ','.
"""
fasnick = kw.get('fasnick', fasnick)
if not fasnick:
return False
fasnick = (fasnick or []) and fasnick.split(',')
valid = True
for nick in fasnick:
if nick.strip() in fmn.rules.utils.msg2usernames(message, **config):
valid = False
break
return valid | [
"def",
"not_user_filter",
"(",
"config",
",",
"message",
",",
"fasnick",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"fasnick",
"=",
"kw",
".",
"get",
"(",
"'fasnick'",
",",
"fasnick",
")",
"if",
"not",
"fasnick",
":",
"return",
"False",
"fasnick",
"=",
"(",
"fasnick",
"or",
"[",
"]",
")",
"and",
"fasnick",
".",
"split",
"(",
"','",
")",
"valid",
"=",
"True",
"for",
"nick",
"in",
"fasnick",
":",
"if",
"nick",
".",
"strip",
"(",
")",
"in",
"fmn",
".",
"rules",
".",
"utils",
".",
"msg2usernames",
"(",
"message",
",",
"*",
"*",
"config",
")",
":",
"valid",
"=",
"False",
"break",
"return",
"valid"
] | Everything except a particular user
Use this rule to exclude messages that are associated with one or more
users. Specify several users by separating them with a comma ','. | [
"Everything",
"except",
"a",
"particular",
"user"
] | f9ec790619fcc8b41803077c4dec094e5127fc24 | https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/generic.py#L26-L44 |
251,193 | fedora-infra/fmn.rules | fmn/rules/generic.py | _get_users_of_group | def _get_users_of_group(config, group):
""" Utility to query fas for users of a group. """
if not group:
return set()
fas = fmn.rules.utils.get_fas(config)
return fmn.rules.utils.get_user_of_group(config, fas, group) | python | def _get_users_of_group(config, group):
""" Utility to query fas for users of a group. """
if not group:
return set()
fas = fmn.rules.utils.get_fas(config)
return fmn.rules.utils.get_user_of_group(config, fas, group) | [
"def",
"_get_users_of_group",
"(",
"config",
",",
"group",
")",
":",
"if",
"not",
"group",
":",
"return",
"set",
"(",
")",
"fas",
"=",
"fmn",
".",
"rules",
".",
"utils",
".",
"get_fas",
"(",
"config",
")",
"return",
"fmn",
".",
"rules",
".",
"utils",
".",
"get_user_of_group",
"(",
"config",
",",
"fas",
",",
"group",
")"
] | Utility to query fas for users of a group. | [
"Utility",
"to",
"query",
"fas",
"for",
"users",
"of",
"a",
"group",
"."
] | f9ec790619fcc8b41803077c4dec094e5127fc24 | https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/generic.py#L47-L52 |
251,194 | fedora-infra/fmn.rules | fmn/rules/generic.py | fas_group_member_filter | def fas_group_member_filter(config, message, group=None, *args, **kw):
""" Messages regarding any member of a FAS group
Use this rule to include messages that have anything to do with **any
user** belonging to a particular fas group. You might want to use this
to monitor the activity of a group for which you are responsible.
"""
if not group:
return False
fasusers = _get_users_of_group(config, group)
msgusers = fmn.rules.utils.msg2usernames(message, **config)
return bool(fasusers.intersection(msgusers)) | python | def fas_group_member_filter(config, message, group=None, *args, **kw):
""" Messages regarding any member of a FAS group
Use this rule to include messages that have anything to do with **any
user** belonging to a particular fas group. You might want to use this
to monitor the activity of a group for which you are responsible.
"""
if not group:
return False
fasusers = _get_users_of_group(config, group)
msgusers = fmn.rules.utils.msg2usernames(message, **config)
return bool(fasusers.intersection(msgusers)) | [
"def",
"fas_group_member_filter",
"(",
"config",
",",
"message",
",",
"group",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"if",
"not",
"group",
":",
"return",
"False",
"fasusers",
"=",
"_get_users_of_group",
"(",
"config",
",",
"group",
")",
"msgusers",
"=",
"fmn",
".",
"rules",
".",
"utils",
".",
"msg2usernames",
"(",
"message",
",",
"*",
"*",
"config",
")",
"return",
"bool",
"(",
"fasusers",
".",
"intersection",
"(",
"msgusers",
")",
")"
] | Messages regarding any member of a FAS group
Use this rule to include messages that have anything to do with **any
user** belonging to a particular fas group. You might want to use this
to monitor the activity of a group for which you are responsible. | [
"Messages",
"regarding",
"any",
"member",
"of",
"a",
"FAS",
"group"
] | f9ec790619fcc8b41803077c4dec094e5127fc24 | https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/generic.py#L56-L67 |
251,195 | fedora-infra/fmn.rules | fmn/rules/generic.py | package_filter | def package_filter(config, message, package=None, *args, **kw):
""" A particular package
Use this rule to include messages that relate to a certain package
(*i.e., nethack*).
"""
package = kw.get('package', package)
if package:
return package in fmn.rules.utils.msg2packages(message, **config) | python | def package_filter(config, message, package=None, *args, **kw):
""" A particular package
Use this rule to include messages that relate to a certain package
(*i.e., nethack*).
"""
package = kw.get('package', package)
if package:
return package in fmn.rules.utils.msg2packages(message, **config) | [
"def",
"package_filter",
"(",
"config",
",",
"message",
",",
"package",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"package",
"=",
"kw",
".",
"get",
"(",
"'package'",
",",
"package",
")",
"if",
"package",
":",
"return",
"package",
"in",
"fmn",
".",
"rules",
".",
"utils",
".",
"msg2packages",
"(",
"message",
",",
"*",
"*",
"config",
")"
] | A particular package
Use this rule to include messages that relate to a certain package
(*i.e., nethack*). | [
"A",
"particular",
"package"
] | f9ec790619fcc8b41803077c4dec094e5127fc24 | https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/generic.py#L152-L161 |
251,196 | fedora-infra/fmn.rules | fmn/rules/generic.py | package_regex_filter | def package_regex_filter(config, message, pattern=None, *args, **kw):
""" All packages matching a regular expression
Use this rule to include messages that relate to packages that match
particular regular expressions
(*i.e., (maven|javapackages-tools|maven-surefire)*).
"""
pattern = kw.get('pattern', pattern)
if pattern:
packages = fmn.rules.utils.msg2packages(message, **config)
regex = fmn.rules.utils.compile_regex(pattern.encode('utf-8'))
return any([regex.search(p.encode('utf-8')) for p in packages]) | python | def package_regex_filter(config, message, pattern=None, *args, **kw):
""" All packages matching a regular expression
Use this rule to include messages that relate to packages that match
particular regular expressions
(*i.e., (maven|javapackages-tools|maven-surefire)*).
"""
pattern = kw.get('pattern', pattern)
if pattern:
packages = fmn.rules.utils.msg2packages(message, **config)
regex = fmn.rules.utils.compile_regex(pattern.encode('utf-8'))
return any([regex.search(p.encode('utf-8')) for p in packages]) | [
"def",
"package_regex_filter",
"(",
"config",
",",
"message",
",",
"pattern",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"pattern",
"=",
"kw",
".",
"get",
"(",
"'pattern'",
",",
"pattern",
")",
"if",
"pattern",
":",
"packages",
"=",
"fmn",
".",
"rules",
".",
"utils",
".",
"msg2packages",
"(",
"message",
",",
"*",
"*",
"config",
")",
"regex",
"=",
"fmn",
".",
"rules",
".",
"utils",
".",
"compile_regex",
"(",
"pattern",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"return",
"any",
"(",
"[",
"regex",
".",
"search",
"(",
"p",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"for",
"p",
"in",
"packages",
"]",
")"
] | All packages matching a regular expression
Use this rule to include messages that relate to packages that match
particular regular expressions
(*i.e., (maven|javapackages-tools|maven-surefire)*). | [
"All",
"packages",
"matching",
"a",
"regular",
"expression"
] | f9ec790619fcc8b41803077c4dec094e5127fc24 | https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/generic.py#L165-L177 |
251,197 | fedora-infra/fmn.rules | fmn/rules/generic.py | regex_filter | def regex_filter(config, message, pattern=None, *args, **kw):
""" All messages matching a regular expression
Use this rule to include messages that bear a certain pattern.
This can be anything that appears anywhere in the message (for instance,
you could combine this with rules for wiki updates or Ask Fedora changes
to alert yourself of activity in your area of expertise).
(*i.e., (beefy miracle)*).
"""
pattern = kw.get('pattern', pattern)
if pattern:
regex = fmn.rules.utils.compile_regex(pattern.encode('utf-8'))
return bool(regex.search(
fedmsg.encoding.dumps(message['msg']).encode('utf-8')
)) | python | def regex_filter(config, message, pattern=None, *args, **kw):
""" All messages matching a regular expression
Use this rule to include messages that bear a certain pattern.
This can be anything that appears anywhere in the message (for instance,
you could combine this with rules for wiki updates or Ask Fedora changes
to alert yourself of activity in your area of expertise).
(*i.e., (beefy miracle)*).
"""
pattern = kw.get('pattern', pattern)
if pattern:
regex = fmn.rules.utils.compile_regex(pattern.encode('utf-8'))
return bool(regex.search(
fedmsg.encoding.dumps(message['msg']).encode('utf-8')
)) | [
"def",
"regex_filter",
"(",
"config",
",",
"message",
",",
"pattern",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"pattern",
"=",
"kw",
".",
"get",
"(",
"'pattern'",
",",
"pattern",
")",
"if",
"pattern",
":",
"regex",
"=",
"fmn",
".",
"rules",
".",
"utils",
".",
"compile_regex",
"(",
"pattern",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"return",
"bool",
"(",
"regex",
".",
"search",
"(",
"fedmsg",
".",
"encoding",
".",
"dumps",
"(",
"message",
"[",
"'msg'",
"]",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
")"
] | All messages matching a regular expression
Use this rule to include messages that bear a certain pattern.
This can be anything that appears anywhere in the message (for instance,
you could combine this with rules for wiki updates or Ask Fedora changes
to alert yourself of activity in your area of expertise).
(*i.e., (beefy miracle)*). | [
"All",
"messages",
"matching",
"a",
"regular",
"expression"
] | f9ec790619fcc8b41803077c4dec094e5127fc24 | https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/generic.py#L181-L197 |
251,198 | kodexlab/reliure | reliure/pipeline.py | Optionable.add_option | def add_option(self, opt_name, otype, hidden=False):
""" Add an option to the object
:param opt_name: option name
:type opt_name: str
:param otype: option type
:type otype: subclass of :class:`.GenericType`
:param hidden: if True the option will be hidden
:type hidden: bool
"""
if self.has_option(opt_name):
raise ValueError("The option is already present !")
opt = ValueOption.FromType(opt_name, otype)
opt.hidden = hidden
self._options[opt_name] = opt | python | def add_option(self, opt_name, otype, hidden=False):
""" Add an option to the object
:param opt_name: option name
:type opt_name: str
:param otype: option type
:type otype: subclass of :class:`.GenericType`
:param hidden: if True the option will be hidden
:type hidden: bool
"""
if self.has_option(opt_name):
raise ValueError("The option is already present !")
opt = ValueOption.FromType(opt_name, otype)
opt.hidden = hidden
self._options[opt_name] = opt | [
"def",
"add_option",
"(",
"self",
",",
"opt_name",
",",
"otype",
",",
"hidden",
"=",
"False",
")",
":",
"if",
"self",
".",
"has_option",
"(",
"opt_name",
")",
":",
"raise",
"ValueError",
"(",
"\"The option is already present !\"",
")",
"opt",
"=",
"ValueOption",
".",
"FromType",
"(",
"opt_name",
",",
"otype",
")",
"opt",
".",
"hidden",
"=",
"hidden",
"self",
".",
"_options",
"[",
"opt_name",
"]",
"=",
"opt"
] | Add an option to the object
:param opt_name: option name
:type opt_name: str
:param otype: option type
:type otype: subclass of :class:`.GenericType`
:param hidden: if True the option will be hidden
:type hidden: bool | [
"Add",
"an",
"option",
"to",
"the",
"object"
] | 0450c7a9254c5c003162738458bbe0c49e777ba5 | https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/pipeline.py#L143-L157 |
251,199 | kodexlab/reliure | reliure/pipeline.py | Optionable.print_options | def print_options(self):
""" print description of the component options
"""
summary = []
for opt_name, opt in self.options.items():
if opt.hidden:
continue
summary.append(opt.summary())
print("\n".join(summary)) | python | def print_options(self):
""" print description of the component options
"""
summary = []
for opt_name, opt in self.options.items():
if opt.hidden:
continue
summary.append(opt.summary())
print("\n".join(summary)) | [
"def",
"print_options",
"(",
"self",
")",
":",
"summary",
"=",
"[",
"]",
"for",
"opt_name",
",",
"opt",
"in",
"self",
".",
"options",
".",
"items",
"(",
")",
":",
"if",
"opt",
".",
"hidden",
":",
"continue",
"summary",
".",
"append",
"(",
"opt",
".",
"summary",
"(",
")",
")",
"print",
"(",
"\"\\n\"",
".",
"join",
"(",
"summary",
")",
")"
] | print description of the component options | [
"print",
"description",
"of",
"the",
"component",
"options"
] | 0450c7a9254c5c003162738458bbe0c49e777ba5 | https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/pipeline.py#L164-L172 |
Subsets and Splits