id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
247,900 | artisanofcode/python-broadway | broadway/whitenoise.py | init_app | def init_app(application):
"""
Initialise an application
Set up whitenoise to handle static files.
"""
config = {k: v for k, v in application.config.items() if k in SCHEMA}
kwargs = {'autorefresh': application.debug}
kwargs.update((k[11:].lower(), v) for k, v in config.items())
instance = whitenoise.WhiteNoise(application.wsgi_app, **kwargs)
instance.add_files(application.static_folder, application.static_url_path)
if not hasattr(application, 'extensions'):
application.extensions = {}
application.extensions['whitenoise'] = instance
application.wsgi_app = instance | python | def init_app(application):
"""
Initialise an application
Set up whitenoise to handle static files.
"""
config = {k: v for k, v in application.config.items() if k in SCHEMA}
kwargs = {'autorefresh': application.debug}
kwargs.update((k[11:].lower(), v) for k, v in config.items())
instance = whitenoise.WhiteNoise(application.wsgi_app, **kwargs)
instance.add_files(application.static_folder, application.static_url_path)
if not hasattr(application, 'extensions'):
application.extensions = {}
application.extensions['whitenoise'] = instance
application.wsgi_app = instance | [
"def",
"init_app",
"(",
"application",
")",
":",
"config",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"application",
".",
"config",
".",
"items",
"(",
")",
"if",
"k",
"in",
"SCHEMA",
"}",
"kwargs",
"=",
"{",
"'autorefresh'",
":",
"application",
".",
"debug",
"}",
"kwargs",
".",
"update",
"(",
"(",
"k",
"[",
"11",
":",
"]",
".",
"lower",
"(",
")",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"config",
".",
"items",
"(",
")",
")",
"instance",
"=",
"whitenoise",
".",
"WhiteNoise",
"(",
"application",
".",
"wsgi_app",
",",
"*",
"*",
"kwargs",
")",
"instance",
".",
"add_files",
"(",
"application",
".",
"static_folder",
",",
"application",
".",
"static_url_path",
")",
"if",
"not",
"hasattr",
"(",
"application",
",",
"'extensions'",
")",
":",
"application",
".",
"extensions",
"=",
"{",
"}",
"application",
".",
"extensions",
"[",
"'whitenoise'",
"]",
"=",
"instance",
"application",
".",
"wsgi_app",
"=",
"instance"
] | Initialise an application
Set up whitenoise to handle static files. | [
"Initialise",
"an",
"application"
] | a051ca5a922ecb38a541df59e8740e2a047d9a4a | https://github.com/artisanofcode/python-broadway/blob/a051ca5a922ecb38a541df59e8740e2a047d9a4a/broadway/whitenoise.py#L38-L59 |
247,901 | neuroticnerd/armory | armory/utils/__init__.py | env | def env(key, default=_NOT_PROVIDED, cast=str, force=False, **kwargs):
"""
Retrieve environment variables and specify default and options.
:param key: (required) environment variable name to retrieve
:param default: value to use if the environment var doesn't exist
:param cast: values always come in as strings, cast to this type if needed
:param force: force casting of value even when it may not be needed
:param boolmap: if True use default map, otherwise you can pass custom map
:param sticky: injects default into environment so child processes inherit
NOTE: None can be passed as the default to avoid raising a KeyError
"""
boolmap = kwargs.get('boolmap', None)
sticky = kwargs.get('sticky', False)
value = os.environ.get(key, default)
if value is _NOT_PROVIDED:
raise KeyError(_ENV_ERROR_MSG.format(key))
if sticky and value == default:
try:
os.environ[key] = value
except TypeError:
os.environ[key] = str(value)
if force or (value != default and type(value) != cast):
if cast is bool and boolmap is not None:
value = boolean(value, boolmap=boolmap)
elif cast is bool:
value = boolean(value)
else:
value = cast(value)
return value | python | def env(key, default=_NOT_PROVIDED, cast=str, force=False, **kwargs):
"""
Retrieve environment variables and specify default and options.
:param key: (required) environment variable name to retrieve
:param default: value to use if the environment var doesn't exist
:param cast: values always come in as strings, cast to this type if needed
:param force: force casting of value even when it may not be needed
:param boolmap: if True use default map, otherwise you can pass custom map
:param sticky: injects default into environment so child processes inherit
NOTE: None can be passed as the default to avoid raising a KeyError
"""
boolmap = kwargs.get('boolmap', None)
sticky = kwargs.get('sticky', False)
value = os.environ.get(key, default)
if value is _NOT_PROVIDED:
raise KeyError(_ENV_ERROR_MSG.format(key))
if sticky and value == default:
try:
os.environ[key] = value
except TypeError:
os.environ[key] = str(value)
if force or (value != default and type(value) != cast):
if cast is bool and boolmap is not None:
value = boolean(value, boolmap=boolmap)
elif cast is bool:
value = boolean(value)
else:
value = cast(value)
return value | [
"def",
"env",
"(",
"key",
",",
"default",
"=",
"_NOT_PROVIDED",
",",
"cast",
"=",
"str",
",",
"force",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"boolmap",
"=",
"kwargs",
".",
"get",
"(",
"'boolmap'",
",",
"None",
")",
"sticky",
"=",
"kwargs",
".",
"get",
"(",
"'sticky'",
",",
"False",
")",
"value",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"key",
",",
"default",
")",
"if",
"value",
"is",
"_NOT_PROVIDED",
":",
"raise",
"KeyError",
"(",
"_ENV_ERROR_MSG",
".",
"format",
"(",
"key",
")",
")",
"if",
"sticky",
"and",
"value",
"==",
"default",
":",
"try",
":",
"os",
".",
"environ",
"[",
"key",
"]",
"=",
"value",
"except",
"TypeError",
":",
"os",
".",
"environ",
"[",
"key",
"]",
"=",
"str",
"(",
"value",
")",
"if",
"force",
"or",
"(",
"value",
"!=",
"default",
"and",
"type",
"(",
"value",
")",
"!=",
"cast",
")",
":",
"if",
"cast",
"is",
"bool",
"and",
"boolmap",
"is",
"not",
"None",
":",
"value",
"=",
"boolean",
"(",
"value",
",",
"boolmap",
"=",
"boolmap",
")",
"elif",
"cast",
"is",
"bool",
":",
"value",
"=",
"boolean",
"(",
"value",
")",
"else",
":",
"value",
"=",
"cast",
"(",
"value",
")",
"return",
"value"
] | Retrieve environment variables and specify default and options.
:param key: (required) environment variable name to retrieve
:param default: value to use if the environment var doesn't exist
:param cast: values always come in as strings, cast to this type if needed
:param force: force casting of value even when it may not be needed
:param boolmap: if True use default map, otherwise you can pass custom map
:param sticky: injects default into environment so child processes inherit
NOTE: None can be passed as the default to avoid raising a KeyError | [
"Retrieve",
"environment",
"variables",
"and",
"specify",
"default",
"and",
"options",
"."
] | d37c5ca1dbdd60dddb968e35f0bbe4bc1299dca1 | https://github.com/neuroticnerd/armory/blob/d37c5ca1dbdd60dddb968e35f0bbe4bc1299dca1/armory/utils/__init__.py#L14-L48 |
247,902 | n8henrie/urlmon | urlmon/urlmon.py | main | def main(arguments):
"""Parse arguments, request the urls, notify if different."""
formatter_class = argparse.ArgumentDefaultsHelpFormatter
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=formatter_class)
parser.add_argument('infile', help="Input file",
type=argparse.FileType('r'))
parser.add_argument('-o', '--outfile', help="Output file",
default=sys.stdout, type=argparse.FileType('w'))
args = parser.parse_args(arguments)
urls = args.infile.read().splitlines()
api_token = keyring.get_password('pushover', 'api_token')
pushover_user = keyring.get_password('pushover', 'user')
pushover = Pushover(api_token, pushover_user)
for url in urls:
domain = urlparse(url).netloc
urlpath = urlparse(url).path
url_dashes = re.sub(r'/', '-', urlpath)
cache = os.path.expanduser("~/.urlmon-cache")
if not os.path.isdir(cache):
os.mkdir(cache, mode=0o755)
filename = domain + url_dashes + '.html'
filepath = os.path.join(cache, filename)
html = requests.get(url).text
if os.path.isfile(filepath):
with open(filepath) as r:
before = r.read()
if html == before:
logger.info("{} is unchanged".format(url))
else:
msg = "{} changed".format(url)
logger.info(msg)
logger.debug(diff(before, html))
response = pushover.push(msg)
logger.debug("Pushover notification sent: "
"{}".format(response.status_code))
else:
logger.info("New url: {}".format(filename))
with open(filepath, 'w') as w:
w.write(html)
logger.info("Wrote file to cache: {}".format(filename)) | python | def main(arguments):
"""Parse arguments, request the urls, notify if different."""
formatter_class = argparse.ArgumentDefaultsHelpFormatter
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=formatter_class)
parser.add_argument('infile', help="Input file",
type=argparse.FileType('r'))
parser.add_argument('-o', '--outfile', help="Output file",
default=sys.stdout, type=argparse.FileType('w'))
args = parser.parse_args(arguments)
urls = args.infile.read().splitlines()
api_token = keyring.get_password('pushover', 'api_token')
pushover_user = keyring.get_password('pushover', 'user')
pushover = Pushover(api_token, pushover_user)
for url in urls:
domain = urlparse(url).netloc
urlpath = urlparse(url).path
url_dashes = re.sub(r'/', '-', urlpath)
cache = os.path.expanduser("~/.urlmon-cache")
if not os.path.isdir(cache):
os.mkdir(cache, mode=0o755)
filename = domain + url_dashes + '.html'
filepath = os.path.join(cache, filename)
html = requests.get(url).text
if os.path.isfile(filepath):
with open(filepath) as r:
before = r.read()
if html == before:
logger.info("{} is unchanged".format(url))
else:
msg = "{} changed".format(url)
logger.info(msg)
logger.debug(diff(before, html))
response = pushover.push(msg)
logger.debug("Pushover notification sent: "
"{}".format(response.status_code))
else:
logger.info("New url: {}".format(filename))
with open(filepath, 'w') as w:
w.write(html)
logger.info("Wrote file to cache: {}".format(filename)) | [
"def",
"main",
"(",
"arguments",
")",
":",
"formatter_class",
"=",
"argparse",
".",
"ArgumentDefaultsHelpFormatter",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"__doc__",
",",
"formatter_class",
"=",
"formatter_class",
")",
"parser",
".",
"add_argument",
"(",
"'infile'",
",",
"help",
"=",
"\"Input file\"",
",",
"type",
"=",
"argparse",
".",
"FileType",
"(",
"'r'",
")",
")",
"parser",
".",
"add_argument",
"(",
"'-o'",
",",
"'--outfile'",
",",
"help",
"=",
"\"Output file\"",
",",
"default",
"=",
"sys",
".",
"stdout",
",",
"type",
"=",
"argparse",
".",
"FileType",
"(",
"'w'",
")",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"arguments",
")",
"urls",
"=",
"args",
".",
"infile",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"api_token",
"=",
"keyring",
".",
"get_password",
"(",
"'pushover'",
",",
"'api_token'",
")",
"pushover_user",
"=",
"keyring",
".",
"get_password",
"(",
"'pushover'",
",",
"'user'",
")",
"pushover",
"=",
"Pushover",
"(",
"api_token",
",",
"pushover_user",
")",
"for",
"url",
"in",
"urls",
":",
"domain",
"=",
"urlparse",
"(",
"url",
")",
".",
"netloc",
"urlpath",
"=",
"urlparse",
"(",
"url",
")",
".",
"path",
"url_dashes",
"=",
"re",
".",
"sub",
"(",
"r'/'",
",",
"'-'",
",",
"urlpath",
")",
"cache",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~/.urlmon-cache\"",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"cache",
")",
":",
"os",
".",
"mkdir",
"(",
"cache",
",",
"mode",
"=",
"0o755",
")",
"filename",
"=",
"domain",
"+",
"url_dashes",
"+",
"'.html'",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"cache",
",",
"filename",
")",
"html",
"=",
"requests",
".",
"get",
"(",
"url",
")",
".",
"text",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"filepath",
")",
":",
"with",
"open",
"(",
"filepath",
")",
"as",
"r",
":",
"before",
"=",
"r",
".",
"read",
"(",
")",
"if",
"html",
"==",
"before",
":",
"logger",
".",
"info",
"(",
"\"{} is unchanged\"",
".",
"format",
"(",
"url",
")",
")",
"else",
":",
"msg",
"=",
"\"{} changed\"",
".",
"format",
"(",
"url",
")",
"logger",
".",
"info",
"(",
"msg",
")",
"logger",
".",
"debug",
"(",
"diff",
"(",
"before",
",",
"html",
")",
")",
"response",
"=",
"pushover",
".",
"push",
"(",
"msg",
")",
"logger",
".",
"debug",
"(",
"\"Pushover notification sent: \"",
"\"{}\"",
".",
"format",
"(",
"response",
".",
"status_code",
")",
")",
"else",
":",
"logger",
".",
"info",
"(",
"\"New url: {}\"",
".",
"format",
"(",
"filename",
")",
")",
"with",
"open",
"(",
"filepath",
",",
"'w'",
")",
"as",
"w",
":",
"w",
".",
"write",
"(",
"html",
")",
"logger",
".",
"info",
"(",
"\"Wrote file to cache: {}\"",
".",
"format",
"(",
"filename",
")",
")"
] | Parse arguments, request the urls, notify if different. | [
"Parse",
"arguments",
"request",
"the",
"urls",
"notify",
"if",
"different",
"."
] | ebd58358843d7414f708c818a5c5a96feadd176f | https://github.com/n8henrie/urlmon/blob/ebd58358843d7414f708c818a5c5a96feadd176f/urlmon/urlmon.py#L101-L148 |
247,903 | n8henrie/urlmon | urlmon/urlmon.py | Pushover.validate | def validate(self):
"""Validate the user and token, returns the Requests response."""
validate_url = "https://api.pushover.net/1/users/validate.json"
payload = {
'token': self.api_token,
'user': self.user,
}
return requests.post(validate_url, data=payload) | python | def validate(self):
"""Validate the user and token, returns the Requests response."""
validate_url = "https://api.pushover.net/1/users/validate.json"
payload = {
'token': self.api_token,
'user': self.user,
}
return requests.post(validate_url, data=payload) | [
"def",
"validate",
"(",
"self",
")",
":",
"validate_url",
"=",
"\"https://api.pushover.net/1/users/validate.json\"",
"payload",
"=",
"{",
"'token'",
":",
"self",
".",
"api_token",
",",
"'user'",
":",
"self",
".",
"user",
",",
"}",
"return",
"requests",
".",
"post",
"(",
"validate_url",
",",
"data",
"=",
"payload",
")"
] | Validate the user and token, returns the Requests response. | [
"Validate",
"the",
"user",
"and",
"token",
"returns",
"the",
"Requests",
"response",
"."
] | ebd58358843d7414f708c818a5c5a96feadd176f | https://github.com/n8henrie/urlmon/blob/ebd58358843d7414f708c818a5c5a96feadd176f/urlmon/urlmon.py#L42-L52 |
247,904 | n8henrie/urlmon | urlmon/urlmon.py | Pushover.push | def push(self, message, device=None, title=None, url=None, url_title=None,
priority=None, timestamp=None, sound=None):
"""Pushes the notification, returns the Requests response.
Arguments:
message -- your message
Keyword arguments:
device -- your user's device name to send the message directly to
that device, rather than all of the user's devices
title -- your message's title, otherwise your app's name is used
url -- a supplementary URL to show with your message
url_title -- a title for your supplementary URL, otherwise just the
URL is shown
priority -- send as --1 to always send as a quiet notification, 1
to display as high--priority and bypass the user's quiet hours,
or 2 to also require confirmation from the user
timestamp -- a Unix timestamp of your message's date and time to
display to the user, rather than the time your message is
received by our API
sound -- the name of one of the sounds supported by device clients
to override the user's default sound choice.
"""
api_url = 'https://api.pushover.net/1/messages.json'
payload = {
'token': self.api_token,
'user': self.user,
'message': message,
'device': device,
'title': title,
'url': url,
'url_title': url_title,
'priority': priority,
'timestamp': timestamp,
'sound': sound
}
return requests.post(api_url, params=payload) | python | def push(self, message, device=None, title=None, url=None, url_title=None,
priority=None, timestamp=None, sound=None):
"""Pushes the notification, returns the Requests response.
Arguments:
message -- your message
Keyword arguments:
device -- your user's device name to send the message directly to
that device, rather than all of the user's devices
title -- your message's title, otherwise your app's name is used
url -- a supplementary URL to show with your message
url_title -- a title for your supplementary URL, otherwise just the
URL is shown
priority -- send as --1 to always send as a quiet notification, 1
to display as high--priority and bypass the user's quiet hours,
or 2 to also require confirmation from the user
timestamp -- a Unix timestamp of your message's date and time to
display to the user, rather than the time your message is
received by our API
sound -- the name of one of the sounds supported by device clients
to override the user's default sound choice.
"""
api_url = 'https://api.pushover.net/1/messages.json'
payload = {
'token': self.api_token,
'user': self.user,
'message': message,
'device': device,
'title': title,
'url': url,
'url_title': url_title,
'priority': priority,
'timestamp': timestamp,
'sound': sound
}
return requests.post(api_url, params=payload) | [
"def",
"push",
"(",
"self",
",",
"message",
",",
"device",
"=",
"None",
",",
"title",
"=",
"None",
",",
"url",
"=",
"None",
",",
"url_title",
"=",
"None",
",",
"priority",
"=",
"None",
",",
"timestamp",
"=",
"None",
",",
"sound",
"=",
"None",
")",
":",
"api_url",
"=",
"'https://api.pushover.net/1/messages.json'",
"payload",
"=",
"{",
"'token'",
":",
"self",
".",
"api_token",
",",
"'user'",
":",
"self",
".",
"user",
",",
"'message'",
":",
"message",
",",
"'device'",
":",
"device",
",",
"'title'",
":",
"title",
",",
"'url'",
":",
"url",
",",
"'url_title'",
":",
"url_title",
",",
"'priority'",
":",
"priority",
",",
"'timestamp'",
":",
"timestamp",
",",
"'sound'",
":",
"sound",
"}",
"return",
"requests",
".",
"post",
"(",
"api_url",
",",
"params",
"=",
"payload",
")"
] | Pushes the notification, returns the Requests response.
Arguments:
message -- your message
Keyword arguments:
device -- your user's device name to send the message directly to
that device, rather than all of the user's devices
title -- your message's title, otherwise your app's name is used
url -- a supplementary URL to show with your message
url_title -- a title for your supplementary URL, otherwise just the
URL is shown
priority -- send as --1 to always send as a quiet notification, 1
to display as high--priority and bypass the user's quiet hours,
or 2 to also require confirmation from the user
timestamp -- a Unix timestamp of your message's date and time to
display to the user, rather than the time your message is
received by our API
sound -- the name of one of the sounds supported by device clients
to override the user's default sound choice. | [
"Pushes",
"the",
"notification",
"returns",
"the",
"Requests",
"response",
"."
] | ebd58358843d7414f708c818a5c5a96feadd176f | https://github.com/n8henrie/urlmon/blob/ebd58358843d7414f708c818a5c5a96feadd176f/urlmon/urlmon.py#L54-L93 |
247,905 | cogniteev/docido-python-sdk | docido_sdk/toolbox/edsl.py | kwargsql._get_obj_attr | def _get_obj_attr(cls, obj, path, pos):
"""Resolve one kwargsql expression for a given object and returns
its result.
:param obj: the object to evaluate
:param path: the list of all kwargsql expression, including those
previously evaluated.
:param int pos: provides index of the expression to evaluate in the
`path` parameter.
"""
field = path[pos]
if isinstance(obj, (dict, Mapping)):
return obj[field], pos
elif isinstance(obj, (list, Sequence)):
join_operation = cls.SEQUENCE_OPERATIONS.get(field)
if join_operation is not None:
return (
AnySequenceResult(
cls._sequence_map(obj, path[pos + 1:]),
join_operation
),
len(path) + 1,
)
else:
return obj[int(field)], pos
else:
return getattr(obj, field, None), pos | python | def _get_obj_attr(cls, obj, path, pos):
"""Resolve one kwargsql expression for a given object and returns
its result.
:param obj: the object to evaluate
:param path: the list of all kwargsql expression, including those
previously evaluated.
:param int pos: provides index of the expression to evaluate in the
`path` parameter.
"""
field = path[pos]
if isinstance(obj, (dict, Mapping)):
return obj[field], pos
elif isinstance(obj, (list, Sequence)):
join_operation = cls.SEQUENCE_OPERATIONS.get(field)
if join_operation is not None:
return (
AnySequenceResult(
cls._sequence_map(obj, path[pos + 1:]),
join_operation
),
len(path) + 1,
)
else:
return obj[int(field)], pos
else:
return getattr(obj, field, None), pos | [
"def",
"_get_obj_attr",
"(",
"cls",
",",
"obj",
",",
"path",
",",
"pos",
")",
":",
"field",
"=",
"path",
"[",
"pos",
"]",
"if",
"isinstance",
"(",
"obj",
",",
"(",
"dict",
",",
"Mapping",
")",
")",
":",
"return",
"obj",
"[",
"field",
"]",
",",
"pos",
"elif",
"isinstance",
"(",
"obj",
",",
"(",
"list",
",",
"Sequence",
")",
")",
":",
"join_operation",
"=",
"cls",
".",
"SEQUENCE_OPERATIONS",
".",
"get",
"(",
"field",
")",
"if",
"join_operation",
"is",
"not",
"None",
":",
"return",
"(",
"AnySequenceResult",
"(",
"cls",
".",
"_sequence_map",
"(",
"obj",
",",
"path",
"[",
"pos",
"+",
"1",
":",
"]",
")",
",",
"join_operation",
")",
",",
"len",
"(",
"path",
")",
"+",
"1",
",",
")",
"else",
":",
"return",
"obj",
"[",
"int",
"(",
"field",
")",
"]",
",",
"pos",
"else",
":",
"return",
"getattr",
"(",
"obj",
",",
"field",
",",
"None",
")",
",",
"pos"
] | Resolve one kwargsql expression for a given object and returns
its result.
:param obj: the object to evaluate
:param path: the list of all kwargsql expression, including those
previously evaluated.
:param int pos: provides index of the expression to evaluate in the
`path` parameter. | [
"Resolve",
"one",
"kwargsql",
"expression",
"for",
"a",
"given",
"object",
"and",
"returns",
"its",
"result",
"."
] | 58ecb6c6f5757fd40c0601657ab18368da7ddf33 | https://github.com/cogniteev/docido-python-sdk/blob/58ecb6c6f5757fd40c0601657ab18368da7ddf33/docido_sdk/toolbox/edsl.py#L245-L271 |
247,906 | todddeluca/dones | dones.py | get | def get(ns, dburl=None):
'''
Get a default dones object for ns. If no dones object exists for ns yet,
a DbDones object will be created, cached, and returned.
'''
if dburl is None:
dburl = DONES_DB_URL
cache_key = (ns, dburl)
if ns not in DONES_CACHE:
dones_ns = 'dones_{}'.format(ns)
DONES_CACHE[cache_key] = DbDones(ns=dones_ns, dburl=dburl)
return DONES_CACHE[cache_key] | python | def get(ns, dburl=None):
'''
Get a default dones object for ns. If no dones object exists for ns yet,
a DbDones object will be created, cached, and returned.
'''
if dburl is None:
dburl = DONES_DB_URL
cache_key = (ns, dburl)
if ns not in DONES_CACHE:
dones_ns = 'dones_{}'.format(ns)
DONES_CACHE[cache_key] = DbDones(ns=dones_ns, dburl=dburl)
return DONES_CACHE[cache_key] | [
"def",
"get",
"(",
"ns",
",",
"dburl",
"=",
"None",
")",
":",
"if",
"dburl",
"is",
"None",
":",
"dburl",
"=",
"DONES_DB_URL",
"cache_key",
"=",
"(",
"ns",
",",
"dburl",
")",
"if",
"ns",
"not",
"in",
"DONES_CACHE",
":",
"dones_ns",
"=",
"'dones_{}'",
".",
"format",
"(",
"ns",
")",
"DONES_CACHE",
"[",
"cache_key",
"]",
"=",
"DbDones",
"(",
"ns",
"=",
"dones_ns",
",",
"dburl",
"=",
"dburl",
")",
"return",
"DONES_CACHE",
"[",
"cache_key",
"]"
] | Get a default dones object for ns. If no dones object exists for ns yet,
a DbDones object will be created, cached, and returned. | [
"Get",
"a",
"default",
"dones",
"object",
"for",
"ns",
".",
"If",
"no",
"dones",
"object",
"exists",
"for",
"ns",
"yet",
"a",
"DbDones",
"object",
"will",
"be",
"created",
"cached",
"and",
"returned",
"."
] | 6ef56565556987e701fed797a405f0825fe2e15a | https://github.com/todddeluca/dones/blob/6ef56565556987e701fed797a405f0825fe2e15a/dones.py#L26-L39 |
247,907 | todddeluca/dones | dones.py | open_conn | def open_conn(host, db, user, password, retries=0, sleep=0.5):
'''
Return an open mysql db connection using the given credentials. Use
`retries` and `sleep` to be robust to the occassional transient connection
failure.
retries: if an exception when getting the connection, try again at most this many times.
sleep: pause between retries for this many seconds. a float >= 0.
'''
assert retries >= 0
try:
return MySQLdb.connect(host=host, user=user, passwd=password, db=db)
except Exception:
if retries > 0:
time.sleep(sleep)
return open_conn(host, db, user, password, retries - 1, sleep)
else:
raise | python | def open_conn(host, db, user, password, retries=0, sleep=0.5):
'''
Return an open mysql db connection using the given credentials. Use
`retries` and `sleep` to be robust to the occassional transient connection
failure.
retries: if an exception when getting the connection, try again at most this many times.
sleep: pause between retries for this many seconds. a float >= 0.
'''
assert retries >= 0
try:
return MySQLdb.connect(host=host, user=user, passwd=password, db=db)
except Exception:
if retries > 0:
time.sleep(sleep)
return open_conn(host, db, user, password, retries - 1, sleep)
else:
raise | [
"def",
"open_conn",
"(",
"host",
",",
"db",
",",
"user",
",",
"password",
",",
"retries",
"=",
"0",
",",
"sleep",
"=",
"0.5",
")",
":",
"assert",
"retries",
">=",
"0",
"try",
":",
"return",
"MySQLdb",
".",
"connect",
"(",
"host",
"=",
"host",
",",
"user",
"=",
"user",
",",
"passwd",
"=",
"password",
",",
"db",
"=",
"db",
")",
"except",
"Exception",
":",
"if",
"retries",
">",
"0",
":",
"time",
".",
"sleep",
"(",
"sleep",
")",
"return",
"open_conn",
"(",
"host",
",",
"db",
",",
"user",
",",
"password",
",",
"retries",
"-",
"1",
",",
"sleep",
")",
"else",
":",
"raise"
] | Return an open mysql db connection using the given credentials. Use
`retries` and `sleep` to be robust to the occassional transient connection
failure.
retries: if an exception when getting the connection, try again at most this many times.
sleep: pause between retries for this many seconds. a float >= 0. | [
"Return",
"an",
"open",
"mysql",
"db",
"connection",
"using",
"the",
"given",
"credentials",
".",
"Use",
"retries",
"and",
"sleep",
"to",
"be",
"robust",
"to",
"the",
"occassional",
"transient",
"connection",
"failure",
"."
] | 6ef56565556987e701fed797a405f0825fe2e15a | https://github.com/todddeluca/dones/blob/6ef56565556987e701fed797a405f0825fe2e15a/dones.py#L265-L283 |
247,908 | todddeluca/dones | dones.py | open_url | def open_url(url, retries=0, sleep=0.5):
'''
Open a mysql connection to a url. Note that if your password has
punctuation characters, it might break the parsing of url.
url: A string in the form "mysql://username:[email protected]/database"
'''
return open_conn(retries=retries, sleep=sleep, **parse_url(url)) | python | def open_url(url, retries=0, sleep=0.5):
'''
Open a mysql connection to a url. Note that if your password has
punctuation characters, it might break the parsing of url.
url: A string in the form "mysql://username:[email protected]/database"
'''
return open_conn(retries=retries, sleep=sleep, **parse_url(url)) | [
"def",
"open_url",
"(",
"url",
",",
"retries",
"=",
"0",
",",
"sleep",
"=",
"0.5",
")",
":",
"return",
"open_conn",
"(",
"retries",
"=",
"retries",
",",
"sleep",
"=",
"sleep",
",",
"*",
"*",
"parse_url",
"(",
"url",
")",
")"
] | Open a mysql connection to a url. Note that if your password has
punctuation characters, it might break the parsing of url.
url: A string in the form "mysql://username:[email protected]/database" | [
"Open",
"a",
"mysql",
"connection",
"to",
"a",
"url",
".",
"Note",
"that",
"if",
"your",
"password",
"has",
"punctuation",
"characters",
"it",
"might",
"break",
"the",
"parsing",
"of",
"url",
"."
] | 6ef56565556987e701fed797a405f0825fe2e15a | https://github.com/todddeluca/dones/blob/6ef56565556987e701fed797a405f0825fe2e15a/dones.py#L286-L293 |
247,909 | todddeluca/dones | dones.py | DbDones._get_k | def _get_k(self):
'''
Accessing self.k indirectly allows for creating the kvstore table
if necessary.
'''
if not self.ready:
self.k.create() # create table if it does not exist.
self.ready = True
return self.k | python | def _get_k(self):
'''
Accessing self.k indirectly allows for creating the kvstore table
if necessary.
'''
if not self.ready:
self.k.create() # create table if it does not exist.
self.ready = True
return self.k | [
"def",
"_get_k",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"ready",
":",
"self",
".",
"k",
".",
"create",
"(",
")",
"# create table if it does not exist.",
"self",
".",
"ready",
"=",
"True",
"return",
"self",
".",
"k"
] | Accessing self.k indirectly allows for creating the kvstore table
if necessary. | [
"Accessing",
"self",
".",
"k",
"indirectly",
"allows",
"for",
"creating",
"the",
"kvstore",
"table",
"if",
"necessary",
"."
] | 6ef56565556987e701fed797a405f0825fe2e15a | https://github.com/todddeluca/dones/blob/6ef56565556987e701fed797a405f0825fe2e15a/dones.py#L64-L73 |
247,910 | todddeluca/dones | dones.py | FileJSONAppendDones.clear | def clear(self):
'''
Remove all existing done markers and the file used to store the dones.
'''
if os.path.exists(self.path):
os.remove(self.path) | python | def clear(self):
'''
Remove all existing done markers and the file used to store the dones.
'''
if os.path.exists(self.path):
os.remove(self.path) | [
"def",
"clear",
"(",
"self",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"path",
")",
":",
"os",
".",
"remove",
"(",
"self",
".",
"path",
")"
] | Remove all existing done markers and the file used to store the dones. | [
"Remove",
"all",
"existing",
"done",
"markers",
"and",
"the",
"file",
"used",
"to",
"store",
"the",
"dones",
"."
] | 6ef56565556987e701fed797a405f0825fe2e15a | https://github.com/todddeluca/dones/blob/6ef56565556987e701fed797a405f0825fe2e15a/dones.py#L170-L175 |
247,911 | todddeluca/dones | dones.py | FileJSONAppendDones.done | def done(self, key):
'''
return True iff key is marked done.
:param key: a json-serializable object.
'''
# key is not done b/c the file does not even exist yet
if not os.path.exists(self.path):
return False
is_done = False
done_line = self._done_line(key)
undone_line = self._undone_line(key)
with open(self.path) as fh:
for line in fh:
if line == done_line:
is_done = True
elif line == undone_line:
is_done = False
return is_done | python | def done(self, key):
'''
return True iff key is marked done.
:param key: a json-serializable object.
'''
# key is not done b/c the file does not even exist yet
if not os.path.exists(self.path):
return False
is_done = False
done_line = self._done_line(key)
undone_line = self._undone_line(key)
with open(self.path) as fh:
for line in fh:
if line == done_line:
is_done = True
elif line == undone_line:
is_done = False
return is_done | [
"def",
"done",
"(",
"self",
",",
"key",
")",
":",
"# key is not done b/c the file does not even exist yet",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"path",
")",
":",
"return",
"False",
"is_done",
"=",
"False",
"done_line",
"=",
"self",
".",
"_done_line",
"(",
"key",
")",
"undone_line",
"=",
"self",
".",
"_undone_line",
"(",
"key",
")",
"with",
"open",
"(",
"self",
".",
"path",
")",
"as",
"fh",
":",
"for",
"line",
"in",
"fh",
":",
"if",
"line",
"==",
"done_line",
":",
"is_done",
"=",
"True",
"elif",
"line",
"==",
"undone_line",
":",
"is_done",
"=",
"False",
"return",
"is_done"
] | return True iff key is marked done.
:param key: a json-serializable object. | [
"return",
"True",
"iff",
"key",
"is",
"marked",
"done",
"."
] | 6ef56565556987e701fed797a405f0825fe2e15a | https://github.com/todddeluca/dones/blob/6ef56565556987e701fed797a405f0825fe2e15a/dones.py#L195-L215 |
247,912 | todddeluca/dones | dones.py | FileJSONAppendDones.are_done | def are_done(self, keys):
'''
Return a list of boolean values corresponding to whether or not each
key in keys is marked done. This method can be faster than
individually checking each key, depending on how many keys you
want to check.
:param keys: a list of json-serializable keys
'''
# No keys are done b/c the file does not even exist yet.
if not os.path.exists(self.path):
return [False] * len(keys)
done_lines = set([self._done_line(key) for key in keys])
undone_lines = set([self._undone_line(key) for key in keys])
status = {}
with open(self.path) as fh:
for line in fh:
if line in done_lines:
# extract serialized key
status[line[5:-1]] = True
elif line in undone_lines:
status[line[5:-1]] = False
serialized_keys = [self._serialize(key) for key in keys]
return [status.get(sk, False) for sk in serialized_keys] | python | def are_done(self, keys):
'''
Return a list of boolean values corresponding to whether or not each
key in keys is marked done. This method can be faster than
individually checking each key, depending on how many keys you
want to check.
:param keys: a list of json-serializable keys
'''
# No keys are done b/c the file does not even exist yet.
if not os.path.exists(self.path):
return [False] * len(keys)
done_lines = set([self._done_line(key) for key in keys])
undone_lines = set([self._undone_line(key) for key in keys])
status = {}
with open(self.path) as fh:
for line in fh:
if line in done_lines:
# extract serialized key
status[line[5:-1]] = True
elif line in undone_lines:
status[line[5:-1]] = False
serialized_keys = [self._serialize(key) for key in keys]
return [status.get(sk, False) for sk in serialized_keys] | [
"def",
"are_done",
"(",
"self",
",",
"keys",
")",
":",
"# No keys are done b/c the file does not even exist yet.",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"path",
")",
":",
"return",
"[",
"False",
"]",
"*",
"len",
"(",
"keys",
")",
"done_lines",
"=",
"set",
"(",
"[",
"self",
".",
"_done_line",
"(",
"key",
")",
"for",
"key",
"in",
"keys",
"]",
")",
"undone_lines",
"=",
"set",
"(",
"[",
"self",
".",
"_undone_line",
"(",
"key",
")",
"for",
"key",
"in",
"keys",
"]",
")",
"status",
"=",
"{",
"}",
"with",
"open",
"(",
"self",
".",
"path",
")",
"as",
"fh",
":",
"for",
"line",
"in",
"fh",
":",
"if",
"line",
"in",
"done_lines",
":",
"# extract serialized key",
"status",
"[",
"line",
"[",
"5",
":",
"-",
"1",
"]",
"]",
"=",
"True",
"elif",
"line",
"in",
"undone_lines",
":",
"status",
"[",
"line",
"[",
"5",
":",
"-",
"1",
"]",
"]",
"=",
"False",
"serialized_keys",
"=",
"[",
"self",
".",
"_serialize",
"(",
"key",
")",
"for",
"key",
"in",
"keys",
"]",
"return",
"[",
"status",
".",
"get",
"(",
"sk",
",",
"False",
")",
"for",
"sk",
"in",
"serialized_keys",
"]"
] | Return a list of boolean values corresponding to whether or not each
key in keys is marked done. This method can be faster than
individually checking each key, depending on how many keys you
want to check.
:param keys: a list of json-serializable keys | [
"Return",
"a",
"list",
"of",
"boolean",
"values",
"corresponding",
"to",
"whether",
"or",
"not",
"each",
"key",
"in",
"keys",
"is",
"marked",
"done",
".",
"This",
"method",
"can",
"be",
"faster",
"than",
"individually",
"checking",
"each",
"key",
"depending",
"on",
"how",
"many",
"keys",
"you",
"want",
"to",
"check",
"."
] | 6ef56565556987e701fed797a405f0825fe2e15a | https://github.com/todddeluca/dones/blob/6ef56565556987e701fed797a405f0825fe2e15a/dones.py#L217-L241 |
247,913 | todddeluca/dones | dones.py | KStore.add | def add(self, key):
'''
add key to the namespace. it is fine to add a key multiple times.
'''
encodedKey = json.dumps(key)
with self.connect() as conn:
with doTransaction(conn):
sql = 'INSERT IGNORE INTO ' + self.table + ' (name) VALUES (%s)'
return insertSQL(conn, sql, args=[encodedKey]) | python | def add(self, key):
'''
add key to the namespace. it is fine to add a key multiple times.
'''
encodedKey = json.dumps(key)
with self.connect() as conn:
with doTransaction(conn):
sql = 'INSERT IGNORE INTO ' + self.table + ' (name) VALUES (%s)'
return insertSQL(conn, sql, args=[encodedKey]) | [
"def",
"add",
"(",
"self",
",",
"key",
")",
":",
"encodedKey",
"=",
"json",
".",
"dumps",
"(",
"key",
")",
"with",
"self",
".",
"connect",
"(",
")",
"as",
"conn",
":",
"with",
"doTransaction",
"(",
"conn",
")",
":",
"sql",
"=",
"'INSERT IGNORE INTO '",
"+",
"self",
".",
"table",
"+",
"' (name) VALUES (%s)'",
"return",
"insertSQL",
"(",
"conn",
",",
"sql",
",",
"args",
"=",
"[",
"encodedKey",
"]",
")"
] | add key to the namespace. it is fine to add a key multiple times. | [
"add",
"key",
"to",
"the",
"namespace",
".",
"it",
"is",
"fine",
"to",
"add",
"a",
"key",
"multiple",
"times",
"."
] | 6ef56565556987e701fed797a405f0825fe2e15a | https://github.com/todddeluca/dones/blob/6ef56565556987e701fed797a405f0825fe2e15a/dones.py#L458-L466 |
247,914 | todddeluca/dones | dones.py | KStore.remove | def remove(self, key):
'''
remove key from the namespace. it is fine to remove a key multiple times.
'''
encodedKey = json.dumps(key)
sql = 'DELETE FROM ' + self.table + ' WHERE name = %s'
with self.connect() as conn:
with doTransaction(conn):
return executeSQL(conn, sql, args=[encodedKey]) | python | def remove(self, key):
'''
remove key from the namespace. it is fine to remove a key multiple times.
'''
encodedKey = json.dumps(key)
sql = 'DELETE FROM ' + self.table + ' WHERE name = %s'
with self.connect() as conn:
with doTransaction(conn):
return executeSQL(conn, sql, args=[encodedKey]) | [
"def",
"remove",
"(",
"self",
",",
"key",
")",
":",
"encodedKey",
"=",
"json",
".",
"dumps",
"(",
"key",
")",
"sql",
"=",
"'DELETE FROM '",
"+",
"self",
".",
"table",
"+",
"' WHERE name = %s'",
"with",
"self",
".",
"connect",
"(",
")",
"as",
"conn",
":",
"with",
"doTransaction",
"(",
"conn",
")",
":",
"return",
"executeSQL",
"(",
"conn",
",",
"sql",
",",
"args",
"=",
"[",
"encodedKey",
"]",
")"
] | remove key from the namespace. it is fine to remove a key multiple times. | [
"remove",
"key",
"from",
"the",
"namespace",
".",
"it",
"is",
"fine",
"to",
"remove",
"a",
"key",
"multiple",
"times",
"."
] | 6ef56565556987e701fed797a405f0825fe2e15a | https://github.com/todddeluca/dones/blob/6ef56565556987e701fed797a405f0825fe2e15a/dones.py#L468-L476 |
247,915 | daknuett/py_register_machine2 | app/web/model.py | RMServer.load_machine | def load_machine(self, descriptor):
"""
Load a complete register machine.
The descriptor is a map, unspecified values are loaded from the default values.
"""
def get_cfg(name):
if(name in descriptor):
return descriptor[name]
else:
return defaults[name]
self.processor = Processor(width = get_cfg("width"))
self.rom = ROM(get_cfg("rom_size"), get_cfg("rom_width"))
self.processor.register_memory_device(self.rom)
self.registers = []
if(get_cfg("ram_enable")):
self.ram = RAM(get_cfg("ram_size"), get_cfg("ram_width"))
self.processor.register_memory_device(self.ram)
else:
self.ram = None
if(get_cfg("flash_enable")):
self.flash = Flash(get_cfg("flash_size"), get_cfg("flash_width"))
self.processor.register_device(self.flash)
else:
self.flash = None
for register in get_cfg("registers"):
self.processor.add_register(register)
self.registers.append(register)
for command in get_cfg("commands"):
self.processor.register_command(command)
self.processor.setup_done() | python | def load_machine(self, descriptor):
"""
Load a complete register machine.
The descriptor is a map, unspecified values are loaded from the default values.
"""
def get_cfg(name):
if(name in descriptor):
return descriptor[name]
else:
return defaults[name]
self.processor = Processor(width = get_cfg("width"))
self.rom = ROM(get_cfg("rom_size"), get_cfg("rom_width"))
self.processor.register_memory_device(self.rom)
self.registers = []
if(get_cfg("ram_enable")):
self.ram = RAM(get_cfg("ram_size"), get_cfg("ram_width"))
self.processor.register_memory_device(self.ram)
else:
self.ram = None
if(get_cfg("flash_enable")):
self.flash = Flash(get_cfg("flash_size"), get_cfg("flash_width"))
self.processor.register_device(self.flash)
else:
self.flash = None
for register in get_cfg("registers"):
self.processor.add_register(register)
self.registers.append(register)
for command in get_cfg("commands"):
self.processor.register_command(command)
self.processor.setup_done() | [
"def",
"load_machine",
"(",
"self",
",",
"descriptor",
")",
":",
"def",
"get_cfg",
"(",
"name",
")",
":",
"if",
"(",
"name",
"in",
"descriptor",
")",
":",
"return",
"descriptor",
"[",
"name",
"]",
"else",
":",
"return",
"defaults",
"[",
"name",
"]",
"self",
".",
"processor",
"=",
"Processor",
"(",
"width",
"=",
"get_cfg",
"(",
"\"width\"",
")",
")",
"self",
".",
"rom",
"=",
"ROM",
"(",
"get_cfg",
"(",
"\"rom_size\"",
")",
",",
"get_cfg",
"(",
"\"rom_width\"",
")",
")",
"self",
".",
"processor",
".",
"register_memory_device",
"(",
"self",
".",
"rom",
")",
"self",
".",
"registers",
"=",
"[",
"]",
"if",
"(",
"get_cfg",
"(",
"\"ram_enable\"",
")",
")",
":",
"self",
".",
"ram",
"=",
"RAM",
"(",
"get_cfg",
"(",
"\"ram_size\"",
")",
",",
"get_cfg",
"(",
"\"ram_width\"",
")",
")",
"self",
".",
"processor",
".",
"register_memory_device",
"(",
"self",
".",
"ram",
")",
"else",
":",
"self",
".",
"ram",
"=",
"None",
"if",
"(",
"get_cfg",
"(",
"\"flash_enable\"",
")",
")",
":",
"self",
".",
"flash",
"=",
"Flash",
"(",
"get_cfg",
"(",
"\"flash_size\"",
")",
",",
"get_cfg",
"(",
"\"flash_width\"",
")",
")",
"self",
".",
"processor",
".",
"register_device",
"(",
"self",
".",
"flash",
")",
"else",
":",
"self",
".",
"flash",
"=",
"None",
"for",
"register",
"in",
"get_cfg",
"(",
"\"registers\"",
")",
":",
"self",
".",
"processor",
".",
"add_register",
"(",
"register",
")",
"self",
".",
"registers",
".",
"append",
"(",
"register",
")",
"for",
"command",
"in",
"get_cfg",
"(",
"\"commands\"",
")",
":",
"self",
".",
"processor",
".",
"register_command",
"(",
"command",
")",
"self",
".",
"processor",
".",
"setup_done",
"(",
")"
] | Load a complete register machine.
The descriptor is a map, unspecified values are loaded from the default values. | [
"Load",
"a",
"complete",
"register",
"machine",
".",
"The",
"descriptor",
"is",
"a",
"map",
"unspecified",
"values",
"are",
"loaded",
"from",
"the",
"default",
"values",
"."
] | 599c53cd7576297d0d7a53344ed5d9aa98acc751 | https://github.com/daknuett/py_register_machine2/blob/599c53cd7576297d0d7a53344ed5d9aa98acc751/app/web/model.py#L67-L99 |
247,916 | daknuett/py_register_machine2 | app/web/model.py | RMServer.assemble_rom_code | def assemble_rom_code(self, asm):
"""
assemble the given code and program the ROM
"""
stream = StringIO(asm)
worker = assembler.Assembler(self.processor, stream)
try:
result = worker.assemble()
except BaseException as e:
return e, None
self.rom.program(result)
return None, result | python | def assemble_rom_code(self, asm):
"""
assemble the given code and program the ROM
"""
stream = StringIO(asm)
worker = assembler.Assembler(self.processor, stream)
try:
result = worker.assemble()
except BaseException as e:
return e, None
self.rom.program(result)
return None, result | [
"def",
"assemble_rom_code",
"(",
"self",
",",
"asm",
")",
":",
"stream",
"=",
"StringIO",
"(",
"asm",
")",
"worker",
"=",
"assembler",
".",
"Assembler",
"(",
"self",
".",
"processor",
",",
"stream",
")",
"try",
":",
"result",
"=",
"worker",
".",
"assemble",
"(",
")",
"except",
"BaseException",
"as",
"e",
":",
"return",
"e",
",",
"None",
"self",
".",
"rom",
".",
"program",
"(",
"result",
")",
"return",
"None",
",",
"result"
] | assemble the given code and program the ROM | [
"assemble",
"the",
"given",
"code",
"and",
"program",
"the",
"ROM"
] | 599c53cd7576297d0d7a53344ed5d9aa98acc751 | https://github.com/daknuett/py_register_machine2/blob/599c53cd7576297d0d7a53344ed5d9aa98acc751/app/web/model.py#L105-L116 |
247,917 | daknuett/py_register_machine2 | app/web/model.py | RMServer.assemble_flash_code | def assemble_flash_code(self, asm):
"""
assemble the given code and program the Flash
"""
stream = StringIO(asm)
worker = assembler.Assembler(self.processor, stream)
try:
result = worker.assemble()
except BaseException as e:
return e, None
self.flash.program(result)
return None, result | python | def assemble_flash_code(self, asm):
"""
assemble the given code and program the Flash
"""
stream = StringIO(asm)
worker = assembler.Assembler(self.processor, stream)
try:
result = worker.assemble()
except BaseException as e:
return e, None
self.flash.program(result)
return None, result | [
"def",
"assemble_flash_code",
"(",
"self",
",",
"asm",
")",
":",
"stream",
"=",
"StringIO",
"(",
"asm",
")",
"worker",
"=",
"assembler",
".",
"Assembler",
"(",
"self",
".",
"processor",
",",
"stream",
")",
"try",
":",
"result",
"=",
"worker",
".",
"assemble",
"(",
")",
"except",
"BaseException",
"as",
"e",
":",
"return",
"e",
",",
"None",
"self",
".",
"flash",
".",
"program",
"(",
"result",
")",
"return",
"None",
",",
"result"
] | assemble the given code and program the Flash | [
"assemble",
"the",
"given",
"code",
"and",
"program",
"the",
"Flash"
] | 599c53cd7576297d0d7a53344ed5d9aa98acc751 | https://github.com/daknuett/py_register_machine2/blob/599c53cd7576297d0d7a53344ed5d9aa98acc751/app/web/model.py#L117-L128 |
247,918 | daknuett/py_register_machine2 | app/web/model.py | RMServer.flush_devices | def flush_devices(self):
"""
overwrite the complete memory with zeros
"""
self.rom.program([0 for i in range(self.rom.size)])
self.flash.program([0 for i in range(self.flash.size)])
for i in range(self.ram.size):
self.ram.write(i, 0) | python | def flush_devices(self):
"""
overwrite the complete memory with zeros
"""
self.rom.program([0 for i in range(self.rom.size)])
self.flash.program([0 for i in range(self.flash.size)])
for i in range(self.ram.size):
self.ram.write(i, 0) | [
"def",
"flush_devices",
"(",
"self",
")",
":",
"self",
".",
"rom",
".",
"program",
"(",
"[",
"0",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"rom",
".",
"size",
")",
"]",
")",
"self",
".",
"flash",
".",
"program",
"(",
"[",
"0",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"flash",
".",
"size",
")",
"]",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"ram",
".",
"size",
")",
":",
"self",
".",
"ram",
".",
"write",
"(",
"i",
",",
"0",
")"
] | overwrite the complete memory with zeros | [
"overwrite",
"the",
"complete",
"memory",
"with",
"zeros"
] | 599c53cd7576297d0d7a53344ed5d9aa98acc751 | https://github.com/daknuett/py_register_machine2/blob/599c53cd7576297d0d7a53344ed5d9aa98acc751/app/web/model.py#L153-L160 |
247,919 | daknuett/py_register_machine2 | app/web/model.py | RMServer.get_rom | def get_rom(self, format_ = "nl"):
"""
return a string representations of the rom
"""
rom = [self.rom.read(i) for i in range(self.rom.size)]
return self._format_mem(rom, format_) | python | def get_rom(self, format_ = "nl"):
"""
return a string representations of the rom
"""
rom = [self.rom.read(i) for i in range(self.rom.size)]
return self._format_mem(rom, format_) | [
"def",
"get_rom",
"(",
"self",
",",
"format_",
"=",
"\"nl\"",
")",
":",
"rom",
"=",
"[",
"self",
".",
"rom",
".",
"read",
"(",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"rom",
".",
"size",
")",
"]",
"return",
"self",
".",
"_format_mem",
"(",
"rom",
",",
"format_",
")"
] | return a string representations of the rom | [
"return",
"a",
"string",
"representations",
"of",
"the",
"rom"
] | 599c53cd7576297d0d7a53344ed5d9aa98acc751 | https://github.com/daknuett/py_register_machine2/blob/599c53cd7576297d0d7a53344ed5d9aa98acc751/app/web/model.py#L168-L173 |
247,920 | daknuett/py_register_machine2 | app/web/model.py | RMServer.get_ram | def get_ram(self, format_ = "nl"):
"""
return a string representations of the ram
"""
ram = [self.ram.read(i) for i in range(self.ram.size)]
return self._format_mem(ram, format_) | python | def get_ram(self, format_ = "nl"):
"""
return a string representations of the ram
"""
ram = [self.ram.read(i) for i in range(self.ram.size)]
return self._format_mem(ram, format_) | [
"def",
"get_ram",
"(",
"self",
",",
"format_",
"=",
"\"nl\"",
")",
":",
"ram",
"=",
"[",
"self",
".",
"ram",
".",
"read",
"(",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"ram",
".",
"size",
")",
"]",
"return",
"self",
".",
"_format_mem",
"(",
"ram",
",",
"format_",
")"
] | return a string representations of the ram | [
"return",
"a",
"string",
"representations",
"of",
"the",
"ram"
] | 599c53cd7576297d0d7a53344ed5d9aa98acc751 | https://github.com/daknuett/py_register_machine2/blob/599c53cd7576297d0d7a53344ed5d9aa98acc751/app/web/model.py#L174-L179 |
247,921 | daknuett/py_register_machine2 | app/web/model.py | RMServer.get_flash | def get_flash(self, format_ = "nl"):
"""
return a string representations of the flash
"""
flash = [self.flash.read(i) for i in range(self.flash.size)]
return self._format_mem(flash, format_) | python | def get_flash(self, format_ = "nl"):
"""
return a string representations of the flash
"""
flash = [self.flash.read(i) for i in range(self.flash.size)]
return self._format_mem(flash, format_) | [
"def",
"get_flash",
"(",
"self",
",",
"format_",
"=",
"\"nl\"",
")",
":",
"flash",
"=",
"[",
"self",
".",
"flash",
".",
"read",
"(",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"flash",
".",
"size",
")",
"]",
"return",
"self",
".",
"_format_mem",
"(",
"flash",
",",
"format_",
")"
] | return a string representations of the flash | [
"return",
"a",
"string",
"representations",
"of",
"the",
"flash"
] | 599c53cd7576297d0d7a53344ed5d9aa98acc751 | https://github.com/daknuett/py_register_machine2/blob/599c53cd7576297d0d7a53344ed5d9aa98acc751/app/web/model.py#L180-L185 |
247,922 | soasme/rio-client | rio_client/base.py | Client.emit | def emit(self, action, payload=None, retry=0):
"""Emit action with payload.
:param action: an action slug
:param payload: data, default {}
:param retry: integer, default 0.
:return: information in form of dict.
"""
payload = payload or {}
if retry:
_retry = self.transport.retry(retry)
emit = _retry(self.transport.emit)
else:
emit = self.transport.emit
return emit(action, payload) | python | def emit(self, action, payload=None, retry=0):
"""Emit action with payload.
:param action: an action slug
:param payload: data, default {}
:param retry: integer, default 0.
:return: information in form of dict.
"""
payload = payload or {}
if retry:
_retry = self.transport.retry(retry)
emit = _retry(self.transport.emit)
else:
emit = self.transport.emit
return emit(action, payload) | [
"def",
"emit",
"(",
"self",
",",
"action",
",",
"payload",
"=",
"None",
",",
"retry",
"=",
"0",
")",
":",
"payload",
"=",
"payload",
"or",
"{",
"}",
"if",
"retry",
":",
"_retry",
"=",
"self",
".",
"transport",
".",
"retry",
"(",
"retry",
")",
"emit",
"=",
"_retry",
"(",
"self",
".",
"transport",
".",
"emit",
")",
"else",
":",
"emit",
"=",
"self",
".",
"transport",
".",
"emit",
"return",
"emit",
"(",
"action",
",",
"payload",
")"
] | Emit action with payload.
:param action: an action slug
:param payload: data, default {}
:param retry: integer, default 0.
:return: information in form of dict. | [
"Emit",
"action",
"with",
"payload",
"."
] | c6d684c6f9deea5b43f2b05bcaf40714c48b5619 | https://github.com/soasme/rio-client/blob/c6d684c6f9deea5b43f2b05bcaf40714c48b5619/rio_client/base.py#L31-L47 |
247,923 | pythonkc/pythonkc-meetups | pythonkc_meetups/client.py | PythonKCMeetups.get_upcoming_events | def get_upcoming_events(self):
"""
Get upcoming PythonKC meetup events.
Returns
-------
List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time,
ascending.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'group_urlname': GROUP_URLNAME})
url = '{0}?{1}'.format(EVENTS_URL, query)
data = self._http_get_json(url)
events = data['results']
return [parse_event(event) for event in events] | python | def get_upcoming_events(self):
"""
Get upcoming PythonKC meetup events.
Returns
-------
List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time,
ascending.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'group_urlname': GROUP_URLNAME})
url = '{0}?{1}'.format(EVENTS_URL, query)
data = self._http_get_json(url)
events = data['results']
return [parse_event(event) for event in events] | [
"def",
"get_upcoming_events",
"(",
"self",
")",
":",
"query",
"=",
"urllib",
".",
"urlencode",
"(",
"{",
"'key'",
":",
"self",
".",
"_api_key",
",",
"'group_urlname'",
":",
"GROUP_URLNAME",
"}",
")",
"url",
"=",
"'{0}?{1}'",
".",
"format",
"(",
"EVENTS_URL",
",",
"query",
")",
"data",
"=",
"self",
".",
"_http_get_json",
"(",
"url",
")",
"events",
"=",
"data",
"[",
"'results'",
"]",
"return",
"[",
"parse_event",
"(",
"event",
")",
"for",
"event",
"in",
"events",
"]"
] | Get upcoming PythonKC meetup events.
Returns
-------
List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time,
ascending.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded | [
"Get",
"upcoming",
"PythonKC",
"meetup",
"events",
"."
] | 54b5062b2825011c87c303256f59c6c13d395ee7 | https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/client.py#L60-L84 |
247,924 | pythonkc/pythonkc-meetups | pythonkc_meetups/client.py | PythonKCMeetups.get_past_events | def get_past_events(self):
"""
Get past PythonKC meetup events.
Returns
-------
List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time,
descending.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
def get_attendees(event):
return [attendee for event_id, attendee in events_attendees
if event_id == event['id']]
def get_photos(event):
return [photo for event_id, photo in events_photos
if event_id == event['id']]
params = {'key': self._api_key,
'group_urlname': GROUP_URLNAME,
'status': 'past',
'desc': 'true'}
if self._num_past_events:
params['page'] = str(self._num_past_events)
query = urllib.urlencode(params)
url = '{0}?{1}'.format(EVENTS_URL, query)
data = self._http_get_json(url)
events = data['results']
event_ids = [event['id'] for event in events]
events_attendees = self.get_events_attendees(event_ids)
events_photos = self.get_events_photos(event_ids)
return [parse_event(event, get_attendees(event), get_photos(event))
for event in events] | python | def get_past_events(self):
"""
Get past PythonKC meetup events.
Returns
-------
List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time,
descending.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
def get_attendees(event):
return [attendee for event_id, attendee in events_attendees
if event_id == event['id']]
def get_photos(event):
return [photo for event_id, photo in events_photos
if event_id == event['id']]
params = {'key': self._api_key,
'group_urlname': GROUP_URLNAME,
'status': 'past',
'desc': 'true'}
if self._num_past_events:
params['page'] = str(self._num_past_events)
query = urllib.urlencode(params)
url = '{0}?{1}'.format(EVENTS_URL, query)
data = self._http_get_json(url)
events = data['results']
event_ids = [event['id'] for event in events]
events_attendees = self.get_events_attendees(event_ids)
events_photos = self.get_events_photos(event_ids)
return [parse_event(event, get_attendees(event), get_photos(event))
for event in events] | [
"def",
"get_past_events",
"(",
"self",
")",
":",
"def",
"get_attendees",
"(",
"event",
")",
":",
"return",
"[",
"attendee",
"for",
"event_id",
",",
"attendee",
"in",
"events_attendees",
"if",
"event_id",
"==",
"event",
"[",
"'id'",
"]",
"]",
"def",
"get_photos",
"(",
"event",
")",
":",
"return",
"[",
"photo",
"for",
"event_id",
",",
"photo",
"in",
"events_photos",
"if",
"event_id",
"==",
"event",
"[",
"'id'",
"]",
"]",
"params",
"=",
"{",
"'key'",
":",
"self",
".",
"_api_key",
",",
"'group_urlname'",
":",
"GROUP_URLNAME",
",",
"'status'",
":",
"'past'",
",",
"'desc'",
":",
"'true'",
"}",
"if",
"self",
".",
"_num_past_events",
":",
"params",
"[",
"'page'",
"]",
"=",
"str",
"(",
"self",
".",
"_num_past_events",
")",
"query",
"=",
"urllib",
".",
"urlencode",
"(",
"params",
")",
"url",
"=",
"'{0}?{1}'",
".",
"format",
"(",
"EVENTS_URL",
",",
"query",
")",
"data",
"=",
"self",
".",
"_http_get_json",
"(",
"url",
")",
"events",
"=",
"data",
"[",
"'results'",
"]",
"event_ids",
"=",
"[",
"event",
"[",
"'id'",
"]",
"for",
"event",
"in",
"events",
"]",
"events_attendees",
"=",
"self",
".",
"get_events_attendees",
"(",
"event_ids",
")",
"events_photos",
"=",
"self",
".",
"get_events_photos",
"(",
"event_ids",
")",
"return",
"[",
"parse_event",
"(",
"event",
",",
"get_attendees",
"(",
"event",
")",
",",
"get_photos",
"(",
"event",
")",
")",
"for",
"event",
"in",
"events",
"]"
] | Get past PythonKC meetup events.
Returns
-------
List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time,
descending.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded | [
"Get",
"past",
"PythonKC",
"meetup",
"events",
"."
] | 54b5062b2825011c87c303256f59c6c13d395ee7 | https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/client.py#L86-L130 |
247,925 | pythonkc/pythonkc-meetups | pythonkc_meetups/client.py | PythonKCMeetups.get_events_attendees | def get_events_attendees(self, event_ids):
"""
Get the attendees of the identified events.
Parameters
----------
event_ids
List of IDs of events to get attendees for.
Returns
-------
List of tuples of (event id, ``pythonkc_meetups.types.MeetupMember``).
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'event_id': ','.join(event_ids)})
url = '{0}?{1}'.format(RSVPS_URL, query)
data = self._http_get_json(url)
rsvps = data['results']
return [(rsvp['event']['id'], parse_member_from_rsvp(rsvp))
for rsvp in rsvps
if rsvp['response'] != "no"] | python | def get_events_attendees(self, event_ids):
"""
Get the attendees of the identified events.
Parameters
----------
event_ids
List of IDs of events to get attendees for.
Returns
-------
List of tuples of (event id, ``pythonkc_meetups.types.MeetupMember``).
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'event_id': ','.join(event_ids)})
url = '{0}?{1}'.format(RSVPS_URL, query)
data = self._http_get_json(url)
rsvps = data['results']
return [(rsvp['event']['id'], parse_member_from_rsvp(rsvp))
for rsvp in rsvps
if rsvp['response'] != "no"] | [
"def",
"get_events_attendees",
"(",
"self",
",",
"event_ids",
")",
":",
"query",
"=",
"urllib",
".",
"urlencode",
"(",
"{",
"'key'",
":",
"self",
".",
"_api_key",
",",
"'event_id'",
":",
"','",
".",
"join",
"(",
"event_ids",
")",
"}",
")",
"url",
"=",
"'{0}?{1}'",
".",
"format",
"(",
"RSVPS_URL",
",",
"query",
")",
"data",
"=",
"self",
".",
"_http_get_json",
"(",
"url",
")",
"rsvps",
"=",
"data",
"[",
"'results'",
"]",
"return",
"[",
"(",
"rsvp",
"[",
"'event'",
"]",
"[",
"'id'",
"]",
",",
"parse_member_from_rsvp",
"(",
"rsvp",
")",
")",
"for",
"rsvp",
"in",
"rsvps",
"if",
"rsvp",
"[",
"'response'",
"]",
"!=",
"\"no\"",
"]"
] | Get the attendees of the identified events.
Parameters
----------
event_ids
List of IDs of events to get attendees for.
Returns
-------
List of tuples of (event id, ``pythonkc_meetups.types.MeetupMember``).
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded | [
"Get",
"the",
"attendees",
"of",
"the",
"identified",
"events",
"."
] | 54b5062b2825011c87c303256f59c6c13d395ee7 | https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/client.py#L132-L161 |
247,926 | pythonkc/pythonkc-meetups | pythonkc_meetups/client.py | PythonKCMeetups.get_event_attendees | def get_event_attendees(self, event_id):
"""
Get the attendees of the identified event.
Parameters
----------
event_id
ID of the event to get attendees for.
Returns
-------
List of ``pythonkc_meetups.types.MeetupMember``.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'event_id': event_id})
url = '{0}?{1}'.format(RSVPS_URL, query)
data = self._http_get_json(url)
rsvps = data['results']
return [parse_member_from_rsvp(rsvp) for rsvp in rsvps
if rsvp['response'] != "no"] | python | def get_event_attendees(self, event_id):
"""
Get the attendees of the identified event.
Parameters
----------
event_id
ID of the event to get attendees for.
Returns
-------
List of ``pythonkc_meetups.types.MeetupMember``.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'event_id': event_id})
url = '{0}?{1}'.format(RSVPS_URL, query)
data = self._http_get_json(url)
rsvps = data['results']
return [parse_member_from_rsvp(rsvp) for rsvp in rsvps
if rsvp['response'] != "no"] | [
"def",
"get_event_attendees",
"(",
"self",
",",
"event_id",
")",
":",
"query",
"=",
"urllib",
".",
"urlencode",
"(",
"{",
"'key'",
":",
"self",
".",
"_api_key",
",",
"'event_id'",
":",
"event_id",
"}",
")",
"url",
"=",
"'{0}?{1}'",
".",
"format",
"(",
"RSVPS_URL",
",",
"query",
")",
"data",
"=",
"self",
".",
"_http_get_json",
"(",
"url",
")",
"rsvps",
"=",
"data",
"[",
"'results'",
"]",
"return",
"[",
"parse_member_from_rsvp",
"(",
"rsvp",
")",
"for",
"rsvp",
"in",
"rsvps",
"if",
"rsvp",
"[",
"'response'",
"]",
"!=",
"\"no\"",
"]"
] | Get the attendees of the identified event.
Parameters
----------
event_id
ID of the event to get attendees for.
Returns
-------
List of ``pythonkc_meetups.types.MeetupMember``.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded | [
"Get",
"the",
"attendees",
"of",
"the",
"identified",
"event",
"."
] | 54b5062b2825011c87c303256f59c6c13d395ee7 | https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/client.py#L163-L191 |
247,927 | pythonkc/pythonkc-meetups | pythonkc_meetups/client.py | PythonKCMeetups.get_events_photos | def get_events_photos(self, event_ids):
"""
Get photos for the identified events.
Parameters
----------
event_ids
List of IDs of events to get photos for.
Returns
-------
List of tuples of (event id, ``pythonkc_meetups.types.MeetupPhoto``).
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'event_id': ','.join(event_ids)})
url = '{0}?{1}'.format(PHOTOS_URL, query)
data = self._http_get_json(url)
photos = data['results']
return [(photo['photo_album']['event_id'], parse_photo(photo))
for photo in photos] | python | def get_events_photos(self, event_ids):
"""
Get photos for the identified events.
Parameters
----------
event_ids
List of IDs of events to get photos for.
Returns
-------
List of tuples of (event id, ``pythonkc_meetups.types.MeetupPhoto``).
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'event_id': ','.join(event_ids)})
url = '{0}?{1}'.format(PHOTOS_URL, query)
data = self._http_get_json(url)
photos = data['results']
return [(photo['photo_album']['event_id'], parse_photo(photo))
for photo in photos] | [
"def",
"get_events_photos",
"(",
"self",
",",
"event_ids",
")",
":",
"query",
"=",
"urllib",
".",
"urlencode",
"(",
"{",
"'key'",
":",
"self",
".",
"_api_key",
",",
"'event_id'",
":",
"','",
".",
"join",
"(",
"event_ids",
")",
"}",
")",
"url",
"=",
"'{0}?{1}'",
".",
"format",
"(",
"PHOTOS_URL",
",",
"query",
")",
"data",
"=",
"self",
".",
"_http_get_json",
"(",
"url",
")",
"photos",
"=",
"data",
"[",
"'results'",
"]",
"return",
"[",
"(",
"photo",
"[",
"'photo_album'",
"]",
"[",
"'event_id'",
"]",
",",
"parse_photo",
"(",
"photo",
")",
")",
"for",
"photo",
"in",
"photos",
"]"
] | Get photos for the identified events.
Parameters
----------
event_ids
List of IDs of events to get photos for.
Returns
-------
List of tuples of (event id, ``pythonkc_meetups.types.MeetupPhoto``).
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded | [
"Get",
"photos",
"for",
"the",
"identified",
"events",
"."
] | 54b5062b2825011c87c303256f59c6c13d395ee7 | https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/client.py#L193-L221 |
247,928 | pythonkc/pythonkc-meetups | pythonkc_meetups/client.py | PythonKCMeetups.get_event_photos | def get_event_photos(self, event_id):
"""
Get photos for the identified event.
Parameters
----------
event_id
ID of the event to get photos for.
Returns
-------
List of ``pythonkc_meetups.types.MeetupPhoto``.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'event_id': event_id})
url = '{0}?{1}'.format(PHOTOS_URL, query)
data = self._http_get_json(url)
photos = data['results']
return [parse_photo(photo) for photo in photos] | python | def get_event_photos(self, event_id):
"""
Get photos for the identified event.
Parameters
----------
event_id
ID of the event to get photos for.
Returns
-------
List of ``pythonkc_meetups.types.MeetupPhoto``.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
query = urllib.urlencode({'key': self._api_key,
'event_id': event_id})
url = '{0}?{1}'.format(PHOTOS_URL, query)
data = self._http_get_json(url)
photos = data['results']
return [parse_photo(photo) for photo in photos] | [
"def",
"get_event_photos",
"(",
"self",
",",
"event_id",
")",
":",
"query",
"=",
"urllib",
".",
"urlencode",
"(",
"{",
"'key'",
":",
"self",
".",
"_api_key",
",",
"'event_id'",
":",
"event_id",
"}",
")",
"url",
"=",
"'{0}?{1}'",
".",
"format",
"(",
"PHOTOS_URL",
",",
"query",
")",
"data",
"=",
"self",
".",
"_http_get_json",
"(",
"url",
")",
"photos",
"=",
"data",
"[",
"'results'",
"]",
"return",
"[",
"parse_photo",
"(",
"photo",
")",
"for",
"photo",
"in",
"photos",
"]"
] | Get photos for the identified event.
Parameters
----------
event_id
ID of the event to get photos for.
Returns
-------
List of ``pythonkc_meetups.types.MeetupPhoto``.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded | [
"Get",
"photos",
"for",
"the",
"identified",
"event",
"."
] | 54b5062b2825011c87c303256f59c6c13d395ee7 | https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/client.py#L223-L250 |
247,929 | pythonkc/pythonkc-meetups | pythonkc_meetups/client.py | PythonKCMeetups._http_get_json | def _http_get_json(self, url):
"""
Make an HTTP GET request to the specified URL, check that it returned a
JSON response, and returned the data parsed from that response.
Parameters
----------
url
The URL to GET.
Returns
-------
Dictionary of data parsed from a JSON HTTP response.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
response = self._http_get(url)
content_type = response.headers['content-type']
parsed_mimetype = mimeparse.parse_mime_type(content_type)
if parsed_mimetype[1] not in ('json', 'javascript'):
raise PythonKCMeetupsNotJson(content_type)
try:
return json.loads(response.content)
except ValueError as e:
raise PythonKCMeetupsBadJson(e) | python | def _http_get_json(self, url):
"""
Make an HTTP GET request to the specified URL, check that it returned a
JSON response, and returned the data parsed from that response.
Parameters
----------
url
The URL to GET.
Returns
-------
Dictionary of data parsed from a JSON HTTP response.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded
"""
response = self._http_get(url)
content_type = response.headers['content-type']
parsed_mimetype = mimeparse.parse_mime_type(content_type)
if parsed_mimetype[1] not in ('json', 'javascript'):
raise PythonKCMeetupsNotJson(content_type)
try:
return json.loads(response.content)
except ValueError as e:
raise PythonKCMeetupsBadJson(e) | [
"def",
"_http_get_json",
"(",
"self",
",",
"url",
")",
":",
"response",
"=",
"self",
".",
"_http_get",
"(",
"url",
")",
"content_type",
"=",
"response",
".",
"headers",
"[",
"'content-type'",
"]",
"parsed_mimetype",
"=",
"mimeparse",
".",
"parse_mime_type",
"(",
"content_type",
")",
"if",
"parsed_mimetype",
"[",
"1",
"]",
"not",
"in",
"(",
"'json'",
",",
"'javascript'",
")",
":",
"raise",
"PythonKCMeetupsNotJson",
"(",
"content_type",
")",
"try",
":",
"return",
"json",
".",
"loads",
"(",
"response",
".",
"content",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"PythonKCMeetupsBadJson",
"(",
"e",
")"
] | Make an HTTP GET request to the specified URL, check that it returned a
JSON response, and returned the data parsed from that response.
Parameters
----------
url
The URL to GET.
Returns
-------
Dictionary of data parsed from a JSON HTTP response.
Exceptions
----------
* PythonKCMeetupsBadJson
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsNotJson
* PythonKCMeetupsRateLimitExceeded | [
"Make",
"an",
"HTTP",
"GET",
"request",
"to",
"the",
"specified",
"URL",
"check",
"that",
"it",
"returned",
"a",
"JSON",
"response",
"and",
"returned",
"the",
"data",
"parsed",
"from",
"that",
"response",
"."
] | 54b5062b2825011c87c303256f59c6c13d395ee7 | https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/client.py#L252-L285 |
247,930 | pythonkc/pythonkc-meetups | pythonkc_meetups/client.py | PythonKCMeetups._http_get | def _http_get(self, url):
"""
Make an HTTP GET request to the specified URL and return the response.
Retries
-------
The constructor of this class takes an argument specifying the number
of times to retry a GET. The statuses which are retried on are: 408,
500, 502, 503, and 504.
Returns
-------
An HTTP response, containing response headers and content.
Exceptions
----------
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsRateLimitExceeded
"""
for try_number in range(self._http_retries + 1):
response = requests.get(url, timeout=self._http_timeout)
if response.status_code == 200:
return response
if (try_number >= self._http_retries or
response.status_code not in (408, 500, 502, 503, 504)):
if response.status_code >= 500:
raise PythonKCMeetupsMeetupDown(response, response.content)
if response.status_code == 400:
try:
data = json.loads(response.content)
if data.get('code', None) == 'limit':
raise PythonKCMeetupsRateLimitExceeded
except: # Don't lose original error when JSON is bad
pass
raise PythonKCMeetupsBadResponse(response, response.content) | python | def _http_get(self, url):
"""
Make an HTTP GET request to the specified URL and return the response.
Retries
-------
The constructor of this class takes an argument specifying the number
of times to retry a GET. The statuses which are retried on are: 408,
500, 502, 503, and 504.
Returns
-------
An HTTP response, containing response headers and content.
Exceptions
----------
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsRateLimitExceeded
"""
for try_number in range(self._http_retries + 1):
response = requests.get(url, timeout=self._http_timeout)
if response.status_code == 200:
return response
if (try_number >= self._http_retries or
response.status_code not in (408, 500, 502, 503, 504)):
if response.status_code >= 500:
raise PythonKCMeetupsMeetupDown(response, response.content)
if response.status_code == 400:
try:
data = json.loads(response.content)
if data.get('code', None) == 'limit':
raise PythonKCMeetupsRateLimitExceeded
except: # Don't lose original error when JSON is bad
pass
raise PythonKCMeetupsBadResponse(response, response.content) | [
"def",
"_http_get",
"(",
"self",
",",
"url",
")",
":",
"for",
"try_number",
"in",
"range",
"(",
"self",
".",
"_http_retries",
"+",
"1",
")",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"timeout",
"=",
"self",
".",
"_http_timeout",
")",
"if",
"response",
".",
"status_code",
"==",
"200",
":",
"return",
"response",
"if",
"(",
"try_number",
">=",
"self",
".",
"_http_retries",
"or",
"response",
".",
"status_code",
"not",
"in",
"(",
"408",
",",
"500",
",",
"502",
",",
"503",
",",
"504",
")",
")",
":",
"if",
"response",
".",
"status_code",
">=",
"500",
":",
"raise",
"PythonKCMeetupsMeetupDown",
"(",
"response",
",",
"response",
".",
"content",
")",
"if",
"response",
".",
"status_code",
"==",
"400",
":",
"try",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"content",
")",
"if",
"data",
".",
"get",
"(",
"'code'",
",",
"None",
")",
"==",
"'limit'",
":",
"raise",
"PythonKCMeetupsRateLimitExceeded",
"except",
":",
"# Don't lose original error when JSON is bad",
"pass",
"raise",
"PythonKCMeetupsBadResponse",
"(",
"response",
",",
"response",
".",
"content",
")"
] | Make an HTTP GET request to the specified URL and return the response.
Retries
-------
The constructor of this class takes an argument specifying the number
of times to retry a GET. The statuses which are retried on are: 408,
500, 502, 503, and 504.
Returns
-------
An HTTP response, containing response headers and content.
Exceptions
----------
* PythonKCMeetupsBadResponse
* PythonKCMeetupsMeetupDown
* PythonKCMeetupsRateLimitExceeded | [
"Make",
"an",
"HTTP",
"GET",
"request",
"to",
"the",
"specified",
"URL",
"and",
"return",
"the",
"response",
"."
] | 54b5062b2825011c87c303256f59c6c13d395ee7 | https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/client.py#L287-L325 |
247,931 | exekias/droplet | droplet/common.py | save | def save():
"""
Apply configuration changes on all the modules
"""
from .models import ModuleInfo
logger = logging.getLogger(__name__)
logger.info("Saving changes")
# Save + restart
for module in modules():
if module.enabled:
if module.changed:
module.save()
module.restart()
module.commit()
else:
logger.debug('Not saving unchanged module: %s' %
module.verbose_name)
else:
logger.debug('Not saving disabled module: %s' %
module.verbose_name)
# Commit
ModuleInfo.commit()
logger.info("Changes saved") | python | def save():
"""
Apply configuration changes on all the modules
"""
from .models import ModuleInfo
logger = logging.getLogger(__name__)
logger.info("Saving changes")
# Save + restart
for module in modules():
if module.enabled:
if module.changed:
module.save()
module.restart()
module.commit()
else:
logger.debug('Not saving unchanged module: %s' %
module.verbose_name)
else:
logger.debug('Not saving disabled module: %s' %
module.verbose_name)
# Commit
ModuleInfo.commit()
logger.info("Changes saved") | [
"def",
"save",
"(",
")",
":",
"from",
".",
"models",
"import",
"ModuleInfo",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"logger",
".",
"info",
"(",
"\"Saving changes\"",
")",
"# Save + restart",
"for",
"module",
"in",
"modules",
"(",
")",
":",
"if",
"module",
".",
"enabled",
":",
"if",
"module",
".",
"changed",
":",
"module",
".",
"save",
"(",
")",
"module",
".",
"restart",
"(",
")",
"module",
".",
"commit",
"(",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'Not saving unchanged module: %s'",
"%",
"module",
".",
"verbose_name",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'Not saving disabled module: %s'",
"%",
"module",
".",
"verbose_name",
")",
"# Commit",
"ModuleInfo",
".",
"commit",
"(",
")",
"logger",
".",
"info",
"(",
"\"Changes saved\"",
")"
] | Apply configuration changes on all the modules | [
"Apply",
"configuration",
"changes",
"on",
"all",
"the",
"modules"
] | aeac573a2c1c4b774e99d5414a1c79b1bb734941 | https://github.com/exekias/droplet/blob/aeac573a2c1c4b774e99d5414a1c79b1bb734941/droplet/common.py#L40-L66 |
247,932 | minhhoit/yacms | yacms/pages/fields.py | MenusField.get_default | def get_default(self):
"""
If the user provided a default in the field definition, returns it,
otherwise determines the default menus based on available choices and
``PAGE_MENU_TEMPLATES_DEFAULT``. Ensures the default is not mutable.
"""
if self._overridden_default:
# Even with user-provided default we'd rather not have it
# forced to text. Compare with Field.get_default().
if callable(self.default):
default = self.default()
else:
default = self.default
else:
# Depending on PAGE_MENU_TEMPLATES_DEFAULT:
# * None or no value: all choosable menus;
# * some sequence: specified menus;
# (* empty sequence: no menus).
default = getattr(settings, "PAGE_MENU_TEMPLATES_DEFAULT", None)
if default is None:
choices = self.get_choices(include_blank=False)
default = (c[0] for c in choices)
# Default can't be mutable, as references to it are shared among
# model instances; all sane values should be castable to a tuple.
return tuple(default) | python | def get_default(self):
"""
If the user provided a default in the field definition, returns it,
otherwise determines the default menus based on available choices and
``PAGE_MENU_TEMPLATES_DEFAULT``. Ensures the default is not mutable.
"""
if self._overridden_default:
# Even with user-provided default we'd rather not have it
# forced to text. Compare with Field.get_default().
if callable(self.default):
default = self.default()
else:
default = self.default
else:
# Depending on PAGE_MENU_TEMPLATES_DEFAULT:
# * None or no value: all choosable menus;
# * some sequence: specified menus;
# (* empty sequence: no menus).
default = getattr(settings, "PAGE_MENU_TEMPLATES_DEFAULT", None)
if default is None:
choices = self.get_choices(include_blank=False)
default = (c[0] for c in choices)
# Default can't be mutable, as references to it are shared among
# model instances; all sane values should be castable to a tuple.
return tuple(default) | [
"def",
"get_default",
"(",
"self",
")",
":",
"if",
"self",
".",
"_overridden_default",
":",
"# Even with user-provided default we'd rather not have it",
"# forced to text. Compare with Field.get_default().",
"if",
"callable",
"(",
"self",
".",
"default",
")",
":",
"default",
"=",
"self",
".",
"default",
"(",
")",
"else",
":",
"default",
"=",
"self",
".",
"default",
"else",
":",
"# Depending on PAGE_MENU_TEMPLATES_DEFAULT:",
"# * None or no value: all choosable menus;",
"# * some sequence: specified menus;",
"# (* empty sequence: no menus).",
"default",
"=",
"getattr",
"(",
"settings",
",",
"\"PAGE_MENU_TEMPLATES_DEFAULT\"",
",",
"None",
")",
"if",
"default",
"is",
"None",
":",
"choices",
"=",
"self",
".",
"get_choices",
"(",
"include_blank",
"=",
"False",
")",
"default",
"=",
"(",
"c",
"[",
"0",
"]",
"for",
"c",
"in",
"choices",
")",
"# Default can't be mutable, as references to it are shared among",
"# model instances; all sane values should be castable to a tuple.",
"return",
"tuple",
"(",
"default",
")"
] | If the user provided a default in the field definition, returns it,
otherwise determines the default menus based on available choices and
``PAGE_MENU_TEMPLATES_DEFAULT``. Ensures the default is not mutable. | [
"If",
"the",
"user",
"provided",
"a",
"default",
"in",
"the",
"field",
"definition",
"returns",
"it",
"otherwise",
"determines",
"the",
"default",
"menus",
"based",
"on",
"available",
"choices",
"and",
"PAGE_MENU_TEMPLATES_DEFAULT",
".",
"Ensures",
"the",
"default",
"is",
"not",
"mutable",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/pages/fields.py#L26-L50 |
247,933 | minhhoit/yacms | yacms/pages/fields.py | MenusField._get_choices | def _get_choices(self):
"""
Returns menus specified in ``PAGE_MENU_TEMPLATES`` unless you provide
some custom choices in the field definition.
"""
if self._overridden_choices:
# Note: choices is a property on Field bound to _get_choices().
return self._choices
else:
menus = getattr(settings, "PAGE_MENU_TEMPLATES", [])
return (m[:2] for m in menus) | python | def _get_choices(self):
"""
Returns menus specified in ``PAGE_MENU_TEMPLATES`` unless you provide
some custom choices in the field definition.
"""
if self._overridden_choices:
# Note: choices is a property on Field bound to _get_choices().
return self._choices
else:
menus = getattr(settings, "PAGE_MENU_TEMPLATES", [])
return (m[:2] for m in menus) | [
"def",
"_get_choices",
"(",
"self",
")",
":",
"if",
"self",
".",
"_overridden_choices",
":",
"# Note: choices is a property on Field bound to _get_choices().",
"return",
"self",
".",
"_choices",
"else",
":",
"menus",
"=",
"getattr",
"(",
"settings",
",",
"\"PAGE_MENU_TEMPLATES\"",
",",
"[",
"]",
")",
"return",
"(",
"m",
"[",
":",
"2",
"]",
"for",
"m",
"in",
"menus",
")"
] | Returns menus specified in ``PAGE_MENU_TEMPLATES`` unless you provide
some custom choices in the field definition. | [
"Returns",
"menus",
"specified",
"in",
"PAGE_MENU_TEMPLATES",
"unless",
"you",
"provide",
"some",
"custom",
"choices",
"in",
"the",
"field",
"definition",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/pages/fields.py#L52-L62 |
247,934 | brutasse/rache | rache/__init__.py | job_details | def job_details(job_id, connection=None):
"""Returns the job data with its scheduled timestamp.
:param job_id: the ID of the job to retrieve."""
if connection is None:
connection = r
data = connection.hgetall(job_key(job_id))
job_data = {'id': job_id, 'schedule_at': int(connection.zscore(REDIS_KEY,
job_id))}
for key, value in data.items():
try:
decoded = value.decode('utf-8')
except UnicodeDecodeError:
decoded = value
if decoded.isdigit():
decoded = int(decoded)
job_data[key.decode('utf-8')] = decoded
return job_data | python | def job_details(job_id, connection=None):
"""Returns the job data with its scheduled timestamp.
:param job_id: the ID of the job to retrieve."""
if connection is None:
connection = r
data = connection.hgetall(job_key(job_id))
job_data = {'id': job_id, 'schedule_at': int(connection.zscore(REDIS_KEY,
job_id))}
for key, value in data.items():
try:
decoded = value.decode('utf-8')
except UnicodeDecodeError:
decoded = value
if decoded.isdigit():
decoded = int(decoded)
job_data[key.decode('utf-8')] = decoded
return job_data | [
"def",
"job_details",
"(",
"job_id",
",",
"connection",
"=",
"None",
")",
":",
"if",
"connection",
"is",
"None",
":",
"connection",
"=",
"r",
"data",
"=",
"connection",
".",
"hgetall",
"(",
"job_key",
"(",
"job_id",
")",
")",
"job_data",
"=",
"{",
"'id'",
":",
"job_id",
",",
"'schedule_at'",
":",
"int",
"(",
"connection",
".",
"zscore",
"(",
"REDIS_KEY",
",",
"job_id",
")",
")",
"}",
"for",
"key",
",",
"value",
"in",
"data",
".",
"items",
"(",
")",
":",
"try",
":",
"decoded",
"=",
"value",
".",
"decode",
"(",
"'utf-8'",
")",
"except",
"UnicodeDecodeError",
":",
"decoded",
"=",
"value",
"if",
"decoded",
".",
"isdigit",
"(",
")",
":",
"decoded",
"=",
"int",
"(",
"decoded",
")",
"job_data",
"[",
"key",
".",
"decode",
"(",
"'utf-8'",
")",
"]",
"=",
"decoded",
"return",
"job_data"
] | Returns the job data with its scheduled timestamp.
:param job_id: the ID of the job to retrieve. | [
"Returns",
"the",
"job",
"data",
"with",
"its",
"scheduled",
"timestamp",
"."
] | fa9cf073376a8c731a13924b84fb8422a771a4ab | https://github.com/brutasse/rache/blob/fa9cf073376a8c731a13924b84fb8422a771a4ab/rache/__init__.py#L27-L45 |
247,935 | brutasse/rache | rache/__init__.py | schedule_job | def schedule_job(job_id, schedule_in, connection=None, **kwargs):
"""Schedules a job.
:param job_id: unique identifier for this job
:param schedule_in: number of seconds from now in which to schedule the
job or timedelta object.
:param **kwargs: parameters to attach to the job, key-value structure.
>>> schedule_job('http://example.com/test', schedule_in=10, num_retries=10)
"""
if not isinstance(schedule_in, int): # assumed to be a timedelta
schedule_in = schedule_in.days * 3600 * 24 + schedule_in.seconds
schedule_at = int(time.time()) + schedule_in
if connection is None:
connection = r
if 'id' in kwargs:
raise RuntimeError("'id' is a reserved key for the job ID")
with connection.pipeline() as pipe:
if schedule_at is not None:
args = (schedule_at, job_id)
if isinstance(connection, redis.Redis):
# StrictRedis or Redis don't have the same argument order
args = (job_id, schedule_at)
pipe.zadd(REDIS_KEY, *args)
delete = []
hmset = {}
for key, value in kwargs.items():
if value is None:
delete.append(key)
else:
hmset[key] = value
if hmset:
pipe.hmset(job_key(job_id), hmset)
if len(delete) > 0:
pipe.hdel(job_key(job_id), *delete)
pipe.execute() | python | def schedule_job(job_id, schedule_in, connection=None, **kwargs):
"""Schedules a job.
:param job_id: unique identifier for this job
:param schedule_in: number of seconds from now in which to schedule the
job or timedelta object.
:param **kwargs: parameters to attach to the job, key-value structure.
>>> schedule_job('http://example.com/test', schedule_in=10, num_retries=10)
"""
if not isinstance(schedule_in, int): # assumed to be a timedelta
schedule_in = schedule_in.days * 3600 * 24 + schedule_in.seconds
schedule_at = int(time.time()) + schedule_in
if connection is None:
connection = r
if 'id' in kwargs:
raise RuntimeError("'id' is a reserved key for the job ID")
with connection.pipeline() as pipe:
if schedule_at is not None:
args = (schedule_at, job_id)
if isinstance(connection, redis.Redis):
# StrictRedis or Redis don't have the same argument order
args = (job_id, schedule_at)
pipe.zadd(REDIS_KEY, *args)
delete = []
hmset = {}
for key, value in kwargs.items():
if value is None:
delete.append(key)
else:
hmset[key] = value
if hmset:
pipe.hmset(job_key(job_id), hmset)
if len(delete) > 0:
pipe.hdel(job_key(job_id), *delete)
pipe.execute() | [
"def",
"schedule_job",
"(",
"job_id",
",",
"schedule_in",
",",
"connection",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"isinstance",
"(",
"schedule_in",
",",
"int",
")",
":",
"# assumed to be a timedelta",
"schedule_in",
"=",
"schedule_in",
".",
"days",
"*",
"3600",
"*",
"24",
"+",
"schedule_in",
".",
"seconds",
"schedule_at",
"=",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
"+",
"schedule_in",
"if",
"connection",
"is",
"None",
":",
"connection",
"=",
"r",
"if",
"'id'",
"in",
"kwargs",
":",
"raise",
"RuntimeError",
"(",
"\"'id' is a reserved key for the job ID\"",
")",
"with",
"connection",
".",
"pipeline",
"(",
")",
"as",
"pipe",
":",
"if",
"schedule_at",
"is",
"not",
"None",
":",
"args",
"=",
"(",
"schedule_at",
",",
"job_id",
")",
"if",
"isinstance",
"(",
"connection",
",",
"redis",
".",
"Redis",
")",
":",
"# StrictRedis or Redis don't have the same argument order",
"args",
"=",
"(",
"job_id",
",",
"schedule_at",
")",
"pipe",
".",
"zadd",
"(",
"REDIS_KEY",
",",
"*",
"args",
")",
"delete",
"=",
"[",
"]",
"hmset",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"if",
"value",
"is",
"None",
":",
"delete",
".",
"append",
"(",
"key",
")",
"else",
":",
"hmset",
"[",
"key",
"]",
"=",
"value",
"if",
"hmset",
":",
"pipe",
".",
"hmset",
"(",
"job_key",
"(",
"job_id",
")",
",",
"hmset",
")",
"if",
"len",
"(",
"delete",
")",
">",
"0",
":",
"pipe",
".",
"hdel",
"(",
"job_key",
"(",
"job_id",
")",
",",
"*",
"delete",
")",
"pipe",
".",
"execute",
"(",
")"
] | Schedules a job.
:param job_id: unique identifier for this job
:param schedule_in: number of seconds from now in which to schedule the
job or timedelta object.
:param **kwargs: parameters to attach to the job, key-value structure.
>>> schedule_job('http://example.com/test', schedule_in=10, num_retries=10) | [
"Schedules",
"a",
"job",
"."
] | fa9cf073376a8c731a13924b84fb8422a771a4ab | https://github.com/brutasse/rache/blob/fa9cf073376a8c731a13924b84fb8422a771a4ab/rache/__init__.py#L48-L87 |
247,936 | brutasse/rache | rache/__init__.py | pending_jobs | def pending_jobs(reschedule_in=None, limit=None, connection=None):
"""Gets the job needing execution.
:param reschedule_in: number of seconds in which returned jobs should be
auto-rescheduled. If set to None (default), jobs are not auto-rescheduled.
:param limit: max number of jobs to retrieve. If set to None (default),
retrieves all pending jobs with no limit.
"""
if connection is None:
connection = r
start = None if limit is None else 0
job_ids = connection.zrangebyscore(REDIS_KEY, 0, int(time.time()),
start=start, num=limit)
with connection.pipeline() as pipe:
if reschedule_in is None:
for job_id in job_ids:
pipe.zrem(REDIS_KEY, job_id)
else:
schedule_at = int(time.time()) + reschedule_in
for job_id in job_ids:
args = (schedule_at, job_id)
if isinstance(connection, redis.Redis):
# StrictRedis or Redis don't have the same argument order
args = (job_id, schedule_at)
pipe.zadd(REDIS_KEY, *args)
pipe.execute()
with connection.pipeline() as pipe:
for job_id in job_ids:
pipe.hgetall(job_key(job_id.decode('utf-8')))
jobs = pipe.execute()
for job_id, data in izip(job_ids, jobs):
job_data = {'id': job_id.decode('utf-8')}
for key, value in data.items():
try:
decoded = value.decode('utf-8')
except UnicodeDecodeError:
decoded = value
if decoded.isdigit():
decoded = int(decoded)
job_data[key.decode('utf-8')] = decoded
yield job_data | python | def pending_jobs(reschedule_in=None, limit=None, connection=None):
"""Gets the job needing execution.
:param reschedule_in: number of seconds in which returned jobs should be
auto-rescheduled. If set to None (default), jobs are not auto-rescheduled.
:param limit: max number of jobs to retrieve. If set to None (default),
retrieves all pending jobs with no limit.
"""
if connection is None:
connection = r
start = None if limit is None else 0
job_ids = connection.zrangebyscore(REDIS_KEY, 0, int(time.time()),
start=start, num=limit)
with connection.pipeline() as pipe:
if reschedule_in is None:
for job_id in job_ids:
pipe.zrem(REDIS_KEY, job_id)
else:
schedule_at = int(time.time()) + reschedule_in
for job_id in job_ids:
args = (schedule_at, job_id)
if isinstance(connection, redis.Redis):
# StrictRedis or Redis don't have the same argument order
args = (job_id, schedule_at)
pipe.zadd(REDIS_KEY, *args)
pipe.execute()
with connection.pipeline() as pipe:
for job_id in job_ids:
pipe.hgetall(job_key(job_id.decode('utf-8')))
jobs = pipe.execute()
for job_id, data in izip(job_ids, jobs):
job_data = {'id': job_id.decode('utf-8')}
for key, value in data.items():
try:
decoded = value.decode('utf-8')
except UnicodeDecodeError:
decoded = value
if decoded.isdigit():
decoded = int(decoded)
job_data[key.decode('utf-8')] = decoded
yield job_data | [
"def",
"pending_jobs",
"(",
"reschedule_in",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"connection",
"=",
"None",
")",
":",
"if",
"connection",
"is",
"None",
":",
"connection",
"=",
"r",
"start",
"=",
"None",
"if",
"limit",
"is",
"None",
"else",
"0",
"job_ids",
"=",
"connection",
".",
"zrangebyscore",
"(",
"REDIS_KEY",
",",
"0",
",",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
",",
"start",
"=",
"start",
",",
"num",
"=",
"limit",
")",
"with",
"connection",
".",
"pipeline",
"(",
")",
"as",
"pipe",
":",
"if",
"reschedule_in",
"is",
"None",
":",
"for",
"job_id",
"in",
"job_ids",
":",
"pipe",
".",
"zrem",
"(",
"REDIS_KEY",
",",
"job_id",
")",
"else",
":",
"schedule_at",
"=",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
"+",
"reschedule_in",
"for",
"job_id",
"in",
"job_ids",
":",
"args",
"=",
"(",
"schedule_at",
",",
"job_id",
")",
"if",
"isinstance",
"(",
"connection",
",",
"redis",
".",
"Redis",
")",
":",
"# StrictRedis or Redis don't have the same argument order",
"args",
"=",
"(",
"job_id",
",",
"schedule_at",
")",
"pipe",
".",
"zadd",
"(",
"REDIS_KEY",
",",
"*",
"args",
")",
"pipe",
".",
"execute",
"(",
")",
"with",
"connection",
".",
"pipeline",
"(",
")",
"as",
"pipe",
":",
"for",
"job_id",
"in",
"job_ids",
":",
"pipe",
".",
"hgetall",
"(",
"job_key",
"(",
"job_id",
".",
"decode",
"(",
"'utf-8'",
")",
")",
")",
"jobs",
"=",
"pipe",
".",
"execute",
"(",
")",
"for",
"job_id",
",",
"data",
"in",
"izip",
"(",
"job_ids",
",",
"jobs",
")",
":",
"job_data",
"=",
"{",
"'id'",
":",
"job_id",
".",
"decode",
"(",
"'utf-8'",
")",
"}",
"for",
"key",
",",
"value",
"in",
"data",
".",
"items",
"(",
")",
":",
"try",
":",
"decoded",
"=",
"value",
".",
"decode",
"(",
"'utf-8'",
")",
"except",
"UnicodeDecodeError",
":",
"decoded",
"=",
"value",
"if",
"decoded",
".",
"isdigit",
"(",
")",
":",
"decoded",
"=",
"int",
"(",
"decoded",
")",
"job_data",
"[",
"key",
".",
"decode",
"(",
"'utf-8'",
")",
"]",
"=",
"decoded",
"yield",
"job_data"
] | Gets the job needing execution.
:param reschedule_in: number of seconds in which returned jobs should be
auto-rescheduled. If set to None (default), jobs are not auto-rescheduled.
:param limit: max number of jobs to retrieve. If set to None (default),
retrieves all pending jobs with no limit. | [
"Gets",
"the",
"job",
"needing",
"execution",
"."
] | fa9cf073376a8c731a13924b84fb8422a771a4ab | https://github.com/brutasse/rache/blob/fa9cf073376a8c731a13924b84fb8422a771a4ab/rache/__init__.py#L105-L147 |
247,937 | brutasse/rache | rache/__init__.py | scheduled_jobs | def scheduled_jobs(with_times=False, connection=None):
"""Gets all jobs in the scheduler.
:param with_times: whether to return tuples with (job_id, timestamp) or
just job_id as a list of strings.
"""
if connection is None:
connection = r
jobs = connection.zrangebyscore(REDIS_KEY, 0, sys.maxsize,
withscores=with_times)
for job in jobs:
if with_times:
yield job[0].decode('utf-8'), job[1]
else:
yield job.decode('utf-8') | python | def scheduled_jobs(with_times=False, connection=None):
"""Gets all jobs in the scheduler.
:param with_times: whether to return tuples with (job_id, timestamp) or
just job_id as a list of strings.
"""
if connection is None:
connection = r
jobs = connection.zrangebyscore(REDIS_KEY, 0, sys.maxsize,
withscores=with_times)
for job in jobs:
if with_times:
yield job[0].decode('utf-8'), job[1]
else:
yield job.decode('utf-8') | [
"def",
"scheduled_jobs",
"(",
"with_times",
"=",
"False",
",",
"connection",
"=",
"None",
")",
":",
"if",
"connection",
"is",
"None",
":",
"connection",
"=",
"r",
"jobs",
"=",
"connection",
".",
"zrangebyscore",
"(",
"REDIS_KEY",
",",
"0",
",",
"sys",
".",
"maxsize",
",",
"withscores",
"=",
"with_times",
")",
"for",
"job",
"in",
"jobs",
":",
"if",
"with_times",
":",
"yield",
"job",
"[",
"0",
"]",
".",
"decode",
"(",
"'utf-8'",
")",
",",
"job",
"[",
"1",
"]",
"else",
":",
"yield",
"job",
".",
"decode",
"(",
"'utf-8'",
")"
] | Gets all jobs in the scheduler.
:param with_times: whether to return tuples with (job_id, timestamp) or
just job_id as a list of strings. | [
"Gets",
"all",
"jobs",
"in",
"the",
"scheduler",
"."
] | fa9cf073376a8c731a13924b84fb8422a771a4ab | https://github.com/brutasse/rache/blob/fa9cf073376a8c731a13924b84fb8422a771a4ab/rache/__init__.py#L150-L164 |
247,938 | SmartDeveloperHub/agora-service-provider | agora/provider/server/base.py | AgoraApp.run | def run(self, host=None, port=None, debug=None, **options):
"""
Start the AgoraApp expecting the provided config to have at least REDIS and PORT fields.
"""
tasks = options.get('tasks', [])
for task in tasks:
if task is not None and hasattr(task, '__call__'):
_batch_tasks.append(task)
thread = Thread(target=self.batch_work)
thread.start()
try:
super(AgoraApp, self).run(host='0.0.0.0', port=self.config['PORT'], debug=True, use_reloader=False)
except Exception, e:
print e.message
self._stop_event.set()
if thread.isAlive():
thread.join() | python | def run(self, host=None, port=None, debug=None, **options):
"""
Start the AgoraApp expecting the provided config to have at least REDIS and PORT fields.
"""
tasks = options.get('tasks', [])
for task in tasks:
if task is not None and hasattr(task, '__call__'):
_batch_tasks.append(task)
thread = Thread(target=self.batch_work)
thread.start()
try:
super(AgoraApp, self).run(host='0.0.0.0', port=self.config['PORT'], debug=True, use_reloader=False)
except Exception, e:
print e.message
self._stop_event.set()
if thread.isAlive():
thread.join() | [
"def",
"run",
"(",
"self",
",",
"host",
"=",
"None",
",",
"port",
"=",
"None",
",",
"debug",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"tasks",
"=",
"options",
".",
"get",
"(",
"'tasks'",
",",
"[",
"]",
")",
"for",
"task",
"in",
"tasks",
":",
"if",
"task",
"is",
"not",
"None",
"and",
"hasattr",
"(",
"task",
",",
"'__call__'",
")",
":",
"_batch_tasks",
".",
"append",
"(",
"task",
")",
"thread",
"=",
"Thread",
"(",
"target",
"=",
"self",
".",
"batch_work",
")",
"thread",
".",
"start",
"(",
")",
"try",
":",
"super",
"(",
"AgoraApp",
",",
"self",
")",
".",
"run",
"(",
"host",
"=",
"'0.0.0.0'",
",",
"port",
"=",
"self",
".",
"config",
"[",
"'PORT'",
"]",
",",
"debug",
"=",
"True",
",",
"use_reloader",
"=",
"False",
")",
"except",
"Exception",
",",
"e",
":",
"print",
"e",
".",
"message",
"self",
".",
"_stop_event",
".",
"set",
"(",
")",
"if",
"thread",
".",
"isAlive",
"(",
")",
":",
"thread",
".",
"join",
"(",
")"
] | Start the AgoraApp expecting the provided config to have at least REDIS and PORT fields. | [
"Start",
"the",
"AgoraApp",
"expecting",
"the",
"provided",
"config",
"to",
"have",
"at",
"least",
"REDIS",
"and",
"PORT",
"fields",
"."
] | 3962207e5701c659c74c8cfffcbc4b0a63eac4b4 | https://github.com/SmartDeveloperHub/agora-service-provider/blob/3962207e5701c659c74c8cfffcbc4b0a63eac4b4/agora/provider/server/base.py#L123-L142 |
247,939 | thomasvandoren/bugzscout-py | doc/example/src/celery_wsgi.py | _handle_exc | def _handle_exc(exception):
"""Record exception with stack trace to FogBugz via BugzScout,
asynchronously. Returns an empty string.
Note that this will not be reported to FogBugz until a celery worker
processes this task.
:param exception: uncaught exception thrown in app
"""
# Set the description to a familiar string with the exception
# message. Add the stack trace to extra.
bugzscout.ext.celery_app.submit_error.delay(
'http://fogbugz/scoutSubmit.asp',
'error-user',
'MyAppProject',
'Errors',
'An error occurred in MyApp: {0}'.format(exception.message),
extra=traceback.extract_tb(*sys.exc_info()))
# Return an empty body.
return [''] | python | def _handle_exc(exception):
"""Record exception with stack trace to FogBugz via BugzScout,
asynchronously. Returns an empty string.
Note that this will not be reported to FogBugz until a celery worker
processes this task.
:param exception: uncaught exception thrown in app
"""
# Set the description to a familiar string with the exception
# message. Add the stack trace to extra.
bugzscout.ext.celery_app.submit_error.delay(
'http://fogbugz/scoutSubmit.asp',
'error-user',
'MyAppProject',
'Errors',
'An error occurred in MyApp: {0}'.format(exception.message),
extra=traceback.extract_tb(*sys.exc_info()))
# Return an empty body.
return [''] | [
"def",
"_handle_exc",
"(",
"exception",
")",
":",
"# Set the description to a familiar string with the exception",
"# message. Add the stack trace to extra.",
"bugzscout",
".",
"ext",
".",
"celery_app",
".",
"submit_error",
".",
"delay",
"(",
"'http://fogbugz/scoutSubmit.asp'",
",",
"'error-user'",
",",
"'MyAppProject'",
",",
"'Errors'",
",",
"'An error occurred in MyApp: {0}'",
".",
"format",
"(",
"exception",
".",
"message",
")",
",",
"extra",
"=",
"traceback",
".",
"extract_tb",
"(",
"*",
"sys",
".",
"exc_info",
"(",
")",
")",
")",
"# Return an empty body.",
"return",
"[",
"''",
"]"
] | Record exception with stack trace to FogBugz via BugzScout,
asynchronously. Returns an empty string.
Note that this will not be reported to FogBugz until a celery worker
processes this task.
:param exception: uncaught exception thrown in app | [
"Record",
"exception",
"with",
"stack",
"trace",
"to",
"FogBugz",
"via",
"BugzScout",
"asynchronously",
".",
"Returns",
"an",
"empty",
"string",
"."
] | 514528e958a97e0e7b36870037c5c69661511824 | https://github.com/thomasvandoren/bugzscout-py/blob/514528e958a97e0e7b36870037c5c69661511824/doc/example/src/celery_wsgi.py#L19-L39 |
247,940 | thomasvandoren/bugzscout-py | doc/example/src/celery_wsgi.py | app | def app(environ, start_response):
"""Simple WSGI application. Returns 200 OK response with 'Hellow world!' in
the body for GET requests. Returns 405 Method Not Allowed for all other
methods.
Returns 500 Internal Server Error if an exception is thrown. The response
body will not include the error or any information about it. The error and
its stack trace will be reported to FogBugz via BugzScout, though.
:param environ: WSGI environ
:param start_response: function that accepts status string and headers
"""
try:
if environ['REQUEST_METHOD'] == 'GET':
start_response('200 OK', [('content-type', 'text/html')])
return ['Hellow world!']
else:
start_response(
'405 Method Not Allowed', [('content-type', 'text/html')])
return ['']
except Exception as ex:
# Call start_response with exception info.
start_response(
'500 Internal Server Error',
[('content-type', 'text/html')],
sys.exc_info())
# Record the error to FogBugz and this will return the body for the
# error response.
return _handle_exc(ex) | python | def app(environ, start_response):
"""Simple WSGI application. Returns 200 OK response with 'Hellow world!' in
the body for GET requests. Returns 405 Method Not Allowed for all other
methods.
Returns 500 Internal Server Error if an exception is thrown. The response
body will not include the error or any information about it. The error and
its stack trace will be reported to FogBugz via BugzScout, though.
:param environ: WSGI environ
:param start_response: function that accepts status string and headers
"""
try:
if environ['REQUEST_METHOD'] == 'GET':
start_response('200 OK', [('content-type', 'text/html')])
return ['Hellow world!']
else:
start_response(
'405 Method Not Allowed', [('content-type', 'text/html')])
return ['']
except Exception as ex:
# Call start_response with exception info.
start_response(
'500 Internal Server Error',
[('content-type', 'text/html')],
sys.exc_info())
# Record the error to FogBugz and this will return the body for the
# error response.
return _handle_exc(ex) | [
"def",
"app",
"(",
"environ",
",",
"start_response",
")",
":",
"try",
":",
"if",
"environ",
"[",
"'REQUEST_METHOD'",
"]",
"==",
"'GET'",
":",
"start_response",
"(",
"'200 OK'",
",",
"[",
"(",
"'content-type'",
",",
"'text/html'",
")",
"]",
")",
"return",
"[",
"'Hellow world!'",
"]",
"else",
":",
"start_response",
"(",
"'405 Method Not Allowed'",
",",
"[",
"(",
"'content-type'",
",",
"'text/html'",
")",
"]",
")",
"return",
"[",
"''",
"]",
"except",
"Exception",
"as",
"ex",
":",
"# Call start_response with exception info.",
"start_response",
"(",
"'500 Internal Server Error'",
",",
"[",
"(",
"'content-type'",
",",
"'text/html'",
")",
"]",
",",
"sys",
".",
"exc_info",
"(",
")",
")",
"# Record the error to FogBugz and this will return the body for the",
"# error response.",
"return",
"_handle_exc",
"(",
"ex",
")"
] | Simple WSGI application. Returns 200 OK response with 'Hellow world!' in
the body for GET requests. Returns 405 Method Not Allowed for all other
methods.
Returns 500 Internal Server Error if an exception is thrown. The response
body will not include the error or any information about it. The error and
its stack trace will be reported to FogBugz via BugzScout, though.
:param environ: WSGI environ
:param start_response: function that accepts status string and headers | [
"Simple",
"WSGI",
"application",
".",
"Returns",
"200",
"OK",
"response",
"with",
"Hellow",
"world!",
"in",
"the",
"body",
"for",
"GET",
"requests",
".",
"Returns",
"405",
"Method",
"Not",
"Allowed",
"for",
"all",
"other",
"methods",
"."
] | 514528e958a97e0e7b36870037c5c69661511824 | https://github.com/thomasvandoren/bugzscout-py/blob/514528e958a97e0e7b36870037c5c69661511824/doc/example/src/celery_wsgi.py#L42-L71 |
247,941 | gmr/remy | remy/cli.py | add_cookbook_mgmt_options | def add_cookbook_mgmt_options(parser):
"""Add the cookbook management command and arguments.
:rtype: argparse.ArgumentParser
"""
cookbook = parser.add_parser('cookbook', help='Invoke in a Jenkins job to '
'update a cookbook in '
'chef-repo')
cookbook.add_argument('repo', action='store',
help='Git URL for chef-repo')
cookbook.set_defaults(func='process_cookbook') | python | def add_cookbook_mgmt_options(parser):
"""Add the cookbook management command and arguments.
:rtype: argparse.ArgumentParser
"""
cookbook = parser.add_parser('cookbook', help='Invoke in a Jenkins job to '
'update a cookbook in '
'chef-repo')
cookbook.add_argument('repo', action='store',
help='Git URL for chef-repo')
cookbook.set_defaults(func='process_cookbook') | [
"def",
"add_cookbook_mgmt_options",
"(",
"parser",
")",
":",
"cookbook",
"=",
"parser",
".",
"add_parser",
"(",
"'cookbook'",
",",
"help",
"=",
"'Invoke in a Jenkins job to '",
"'update a cookbook in '",
"'chef-repo'",
")",
"cookbook",
".",
"add_argument",
"(",
"'repo'",
",",
"action",
"=",
"'store'",
",",
"help",
"=",
"'Git URL for chef-repo'",
")",
"cookbook",
".",
"set_defaults",
"(",
"func",
"=",
"'process_cookbook'",
")"
] | Add the cookbook management command and arguments.
:rtype: argparse.ArgumentParser | [
"Add",
"the",
"cookbook",
"management",
"command",
"and",
"arguments",
"."
] | 74368ae74e3f2b59376d6f8e457aefbe9c7debdf | https://github.com/gmr/remy/blob/74368ae74e3f2b59376d6f8e457aefbe9c7debdf/remy/cli.py#L19-L30 |
247,942 | gmr/remy | remy/cli.py | add_github_hook_options | def add_github_hook_options(parser):
"""Add the github jenkins hook command and arguments.
:rtype: argparse.ArgumentParser
"""
cookbook = parser.add_parser('github', help='Install the Jenkins callback '
'hook in a GitHub repository')
cookbook.add_argument('owner', action='store',
help='The owner of the GitHub repo')
cookbook.add_argument('repo', action='store',
help='The GitHub repository name')
domain = socket.gethostname()
example = 'jenkins.%s' % domain
cookbook.add_argument('jenkins_hook_url', action='store',
help='The jenkins hook URL. For example %s' % example)
cookbook.add_argument('-g', '--github-host',
action='store',
dest='github',
default=github.GITHUB_HOST,
help='Override github.com for a '
'GitHub::Enterprise host')
cookbook.add_argument('-u', '--username',
action='store',
dest='username',
help='Specify a different username than the repo '
'owner')
cookbook.set_defaults(func='github_hooks') | python | def add_github_hook_options(parser):
"""Add the github jenkins hook command and arguments.
:rtype: argparse.ArgumentParser
"""
cookbook = parser.add_parser('github', help='Install the Jenkins callback '
'hook in a GitHub repository')
cookbook.add_argument('owner', action='store',
help='The owner of the GitHub repo')
cookbook.add_argument('repo', action='store',
help='The GitHub repository name')
domain = socket.gethostname()
example = 'jenkins.%s' % domain
cookbook.add_argument('jenkins_hook_url', action='store',
help='The jenkins hook URL. For example %s' % example)
cookbook.add_argument('-g', '--github-host',
action='store',
dest='github',
default=github.GITHUB_HOST,
help='Override github.com for a '
'GitHub::Enterprise host')
cookbook.add_argument('-u', '--username',
action='store',
dest='username',
help='Specify a different username than the repo '
'owner')
cookbook.set_defaults(func='github_hooks') | [
"def",
"add_github_hook_options",
"(",
"parser",
")",
":",
"cookbook",
"=",
"parser",
".",
"add_parser",
"(",
"'github'",
",",
"help",
"=",
"'Install the Jenkins callback '",
"'hook in a GitHub repository'",
")",
"cookbook",
".",
"add_argument",
"(",
"'owner'",
",",
"action",
"=",
"'store'",
",",
"help",
"=",
"'The owner of the GitHub repo'",
")",
"cookbook",
".",
"add_argument",
"(",
"'repo'",
",",
"action",
"=",
"'store'",
",",
"help",
"=",
"'The GitHub repository name'",
")",
"domain",
"=",
"socket",
".",
"gethostname",
"(",
")",
"example",
"=",
"'jenkins.%s'",
"%",
"domain",
"cookbook",
".",
"add_argument",
"(",
"'jenkins_hook_url'",
",",
"action",
"=",
"'store'",
",",
"help",
"=",
"'The jenkins hook URL. For example %s'",
"%",
"example",
")",
"cookbook",
".",
"add_argument",
"(",
"'-g'",
",",
"'--github-host'",
",",
"action",
"=",
"'store'",
",",
"dest",
"=",
"'github'",
",",
"default",
"=",
"github",
".",
"GITHUB_HOST",
",",
"help",
"=",
"'Override github.com for a '",
"'GitHub::Enterprise host'",
")",
"cookbook",
".",
"add_argument",
"(",
"'-u'",
",",
"'--username'",
",",
"action",
"=",
"'store'",
",",
"dest",
"=",
"'username'",
",",
"help",
"=",
"'Specify a different username than the repo '",
"'owner'",
")",
"cookbook",
".",
"set_defaults",
"(",
"func",
"=",
"'github_hooks'",
")"
] | Add the github jenkins hook command and arguments.
:rtype: argparse.ArgumentParser | [
"Add",
"the",
"github",
"jenkins",
"hook",
"command",
"and",
"arguments",
"."
] | 74368ae74e3f2b59376d6f8e457aefbe9c7debdf | https://github.com/gmr/remy/blob/74368ae74e3f2b59376d6f8e457aefbe9c7debdf/remy/cli.py#L32-L61 |
247,943 | gmr/remy | remy/cli.py | add_jenkins_job_options | def add_jenkins_job_options(parser):
"""Add a new job to Jenkins for updating chef-repo
:rtype: argparse.ArgumentParser
"""
cookbook = parser.add_parser('jenkins', help='Add a new cookbook job to '
'Jenkins')
cookbook.add_argument('jenkins', action='store',
help='The jenkins server hostname')
cookbook.add_argument('name', action='store',
help='The cookbook name')
cookbook.add_argument('cookbook', action='store',
help='The cookbook git repository URL')
cookbook.add_argument('chef_repo', action='store',
help='The chef-repo git repository URL')
cookbook.add_argument('-u', '--username',
action='store',
dest='username',
default=pwd.getpwuid(os.getuid())[0],
help='Specify a different username than the repo '
'owner')
cookbook.add_argument('-n', '--hipchat-notification',
action='store',
dest='hipchat',
help='Hipchat room for notifications')
cookbook.set_defaults(func='new_job') | python | def add_jenkins_job_options(parser):
"""Add a new job to Jenkins for updating chef-repo
:rtype: argparse.ArgumentParser
"""
cookbook = parser.add_parser('jenkins', help='Add a new cookbook job to '
'Jenkins')
cookbook.add_argument('jenkins', action='store',
help='The jenkins server hostname')
cookbook.add_argument('name', action='store',
help='The cookbook name')
cookbook.add_argument('cookbook', action='store',
help='The cookbook git repository URL')
cookbook.add_argument('chef_repo', action='store',
help='The chef-repo git repository URL')
cookbook.add_argument('-u', '--username',
action='store',
dest='username',
default=pwd.getpwuid(os.getuid())[0],
help='Specify a different username than the repo '
'owner')
cookbook.add_argument('-n', '--hipchat-notification',
action='store',
dest='hipchat',
help='Hipchat room for notifications')
cookbook.set_defaults(func='new_job') | [
"def",
"add_jenkins_job_options",
"(",
"parser",
")",
":",
"cookbook",
"=",
"parser",
".",
"add_parser",
"(",
"'jenkins'",
",",
"help",
"=",
"'Add a new cookbook job to '",
"'Jenkins'",
")",
"cookbook",
".",
"add_argument",
"(",
"'jenkins'",
",",
"action",
"=",
"'store'",
",",
"help",
"=",
"'The jenkins server hostname'",
")",
"cookbook",
".",
"add_argument",
"(",
"'name'",
",",
"action",
"=",
"'store'",
",",
"help",
"=",
"'The cookbook name'",
")",
"cookbook",
".",
"add_argument",
"(",
"'cookbook'",
",",
"action",
"=",
"'store'",
",",
"help",
"=",
"'The cookbook git repository URL'",
")",
"cookbook",
".",
"add_argument",
"(",
"'chef_repo'",
",",
"action",
"=",
"'store'",
",",
"help",
"=",
"'The chef-repo git repository URL'",
")",
"cookbook",
".",
"add_argument",
"(",
"'-u'",
",",
"'--username'",
",",
"action",
"=",
"'store'",
",",
"dest",
"=",
"'username'",
",",
"default",
"=",
"pwd",
".",
"getpwuid",
"(",
"os",
".",
"getuid",
"(",
")",
")",
"[",
"0",
"]",
",",
"help",
"=",
"'Specify a different username than the repo '",
"'owner'",
")",
"cookbook",
".",
"add_argument",
"(",
"'-n'",
",",
"'--hipchat-notification'",
",",
"action",
"=",
"'store'",
",",
"dest",
"=",
"'hipchat'",
",",
"help",
"=",
"'Hipchat room for notifications'",
")",
"cookbook",
".",
"set_defaults",
"(",
"func",
"=",
"'new_job'",
")"
] | Add a new job to Jenkins for updating chef-repo
:rtype: argparse.ArgumentParser | [
"Add",
"a",
"new",
"job",
"to",
"Jenkins",
"for",
"updating",
"chef",
"-",
"repo"
] | 74368ae74e3f2b59376d6f8e457aefbe9c7debdf | https://github.com/gmr/remy/blob/74368ae74e3f2b59376d6f8e457aefbe9c7debdf/remy/cli.py#L64-L90 |
247,944 | gmr/remy | remy/cli.py | argparser | def argparser():
"""Build the argument parser
:rtype: argparse.ArgumentParser
"""
parser = argparse.ArgumentParser(description=__description__)
sparser = parser.add_subparsers()
add_cookbook_mgmt_options(sparser)
add_role_options(sparser)
add_github_hook_options(sparser)
add_jenkins_job_options(sparser)
return parser | python | def argparser():
"""Build the argument parser
:rtype: argparse.ArgumentParser
"""
parser = argparse.ArgumentParser(description=__description__)
sparser = parser.add_subparsers()
add_cookbook_mgmt_options(sparser)
add_role_options(sparser)
add_github_hook_options(sparser)
add_jenkins_job_options(sparser)
return parser | [
"def",
"argparser",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"__description__",
")",
"sparser",
"=",
"parser",
".",
"add_subparsers",
"(",
")",
"add_cookbook_mgmt_options",
"(",
"sparser",
")",
"add_role_options",
"(",
"sparser",
")",
"add_github_hook_options",
"(",
"sparser",
")",
"add_jenkins_job_options",
"(",
"sparser",
")",
"return",
"parser"
] | Build the argument parser
:rtype: argparse.ArgumentParser | [
"Build",
"the",
"argument",
"parser"
] | 74368ae74e3f2b59376d6f8e457aefbe9c7debdf | https://github.com/gmr/remy/blob/74368ae74e3f2b59376d6f8e457aefbe9c7debdf/remy/cli.py#L105-L117 |
247,945 | nickw444/wtforms-webwidgets | wtforms_webwidgets/bootstrap/util.py | render_field_description | def render_field_description(field):
"""
Render a field description as HTML.
"""
if hasattr(field, 'description') and field.description != '':
html = """<p class="help-block">{field.description}</p>"""
html = html.format(
field=field
)
return HTMLString(html)
return '' | python | def render_field_description(field):
"""
Render a field description as HTML.
"""
if hasattr(field, 'description') and field.description != '':
html = """<p class="help-block">{field.description}</p>"""
html = html.format(
field=field
)
return HTMLString(html)
return '' | [
"def",
"render_field_description",
"(",
"field",
")",
":",
"if",
"hasattr",
"(",
"field",
",",
"'description'",
")",
"and",
"field",
".",
"description",
"!=",
"''",
":",
"html",
"=",
"\"\"\"<p class=\"help-block\">{field.description}</p>\"\"\"",
"html",
"=",
"html",
".",
"format",
"(",
"field",
"=",
"field",
")",
"return",
"HTMLString",
"(",
"html",
")",
"return",
"''"
] | Render a field description as HTML. | [
"Render",
"a",
"field",
"description",
"as",
"HTML",
"."
] | 88f224b68c0b0f4f5c97de39fe1428b96e12f8db | https://github.com/nickw444/wtforms-webwidgets/blob/88f224b68c0b0f4f5c97de39fe1428b96e12f8db/wtforms_webwidgets/bootstrap/util.py#L19-L30 |
247,946 | tBaxter/django-fretboard | fretboard/helpers.py | update_post_relations | def update_post_relations(user, topic, deleting=False):
"""
helper function to update user post count and parent topic post_count.
"""
if deleting:
user.post_count = user.post_count - 1
else:
user.post_count += 1
user.save(update_fields=['post_count'])
topic.modified = datetime.now()
topic.modified_int = time.time()
topic.save(update_fields=['modified', 'modified_int']) | python | def update_post_relations(user, topic, deleting=False):
"""
helper function to update user post count and parent topic post_count.
"""
if deleting:
user.post_count = user.post_count - 1
else:
user.post_count += 1
user.save(update_fields=['post_count'])
topic.modified = datetime.now()
topic.modified_int = time.time()
topic.save(update_fields=['modified', 'modified_int']) | [
"def",
"update_post_relations",
"(",
"user",
",",
"topic",
",",
"deleting",
"=",
"False",
")",
":",
"if",
"deleting",
":",
"user",
".",
"post_count",
"=",
"user",
".",
"post_count",
"-",
"1",
"else",
":",
"user",
".",
"post_count",
"+=",
"1",
"user",
".",
"save",
"(",
"update_fields",
"=",
"[",
"'post_count'",
"]",
")",
"topic",
".",
"modified",
"=",
"datetime",
".",
"now",
"(",
")",
"topic",
".",
"modified_int",
"=",
"time",
".",
"time",
"(",
")",
"topic",
".",
"save",
"(",
"update_fields",
"=",
"[",
"'modified'",
",",
"'modified_int'",
"]",
")"
] | helper function to update user post count and parent topic post_count. | [
"helper",
"function",
"to",
"update",
"user",
"post",
"count",
"and",
"parent",
"topic",
"post_count",
"."
] | 3c3f9557089821283f315a07f3e5a57a2725ab3b | https://github.com/tBaxter/django-fretboard/blob/3c3f9557089821283f315a07f3e5a57a2725ab3b/fretboard/helpers.py#L5-L17 |
247,947 | jmgilman/Neolib | neolib/pyamf/adapters/_django_db_models_base.py | DjangoReferenceCollection.addClassKey | def addClassKey(self, klass, key, obj):
"""
Adds an object to the collection, based on klass and key.
@param klass: The class of the object.
@param key: The datastore key of the object.
@param obj: The loaded instance from the datastore.
"""
d = self._getClass(klass)
d[key] = obj | python | def addClassKey(self, klass, key, obj):
"""
Adds an object to the collection, based on klass and key.
@param klass: The class of the object.
@param key: The datastore key of the object.
@param obj: The loaded instance from the datastore.
"""
d = self._getClass(klass)
d[key] = obj | [
"def",
"addClassKey",
"(",
"self",
",",
"klass",
",",
"key",
",",
"obj",
")",
":",
"d",
"=",
"self",
".",
"_getClass",
"(",
"klass",
")",
"d",
"[",
"key",
"]",
"=",
"obj"
] | Adds an object to the collection, based on klass and key.
@param klass: The class of the object.
@param key: The datastore key of the object.
@param obj: The loaded instance from the datastore. | [
"Adds",
"an",
"object",
"to",
"the",
"collection",
"based",
"on",
"klass",
"and",
"key",
"."
] | 228fafeaed0f3195676137732384a14820ae285c | https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/adapters/_django_db_models_base.py#L49-L59 |
247,948 | OldhamMade/PySO8601 | PySO8601/durations.py | parse_duration | def parse_duration(duration, start=None, end=None):
"""
Attepmt to parse an ISO8601 formatted duration.
Accepts a ``duration`` and optionally a start or end ``datetime``.
``duration`` must be an ISO8601 formatted string.
Returns a ``datetime.timedelta`` object.
"""
if not start and not end:
return parse_simple_duration(duration)
if start:
return parse_duration_with_start(start, duration)
if end:
return parse_duration_with_end(duration, end) | python | def parse_duration(duration, start=None, end=None):
"""
Attepmt to parse an ISO8601 formatted duration.
Accepts a ``duration`` and optionally a start or end ``datetime``.
``duration`` must be an ISO8601 formatted string.
Returns a ``datetime.timedelta`` object.
"""
if not start and not end:
return parse_simple_duration(duration)
if start:
return parse_duration_with_start(start, duration)
if end:
return parse_duration_with_end(duration, end) | [
"def",
"parse_duration",
"(",
"duration",
",",
"start",
"=",
"None",
",",
"end",
"=",
"None",
")",
":",
"if",
"not",
"start",
"and",
"not",
"end",
":",
"return",
"parse_simple_duration",
"(",
"duration",
")",
"if",
"start",
":",
"return",
"parse_duration_with_start",
"(",
"start",
",",
"duration",
")",
"if",
"end",
":",
"return",
"parse_duration_with_end",
"(",
"duration",
",",
"end",
")"
] | Attepmt to parse an ISO8601 formatted duration.
Accepts a ``duration`` and optionally a start or end ``datetime``.
``duration`` must be an ISO8601 formatted string.
Returns a ``datetime.timedelta`` object. | [
"Attepmt",
"to",
"parse",
"an",
"ISO8601",
"formatted",
"duration",
"."
] | b7d3b3cb3ed3e12eb2a21caa26a3abeab3c96fe4 | https://github.com/OldhamMade/PySO8601/blob/b7d3b3cb3ed3e12eb2a21caa26a3abeab3c96fe4/PySO8601/durations.py#L12-L28 |
247,949 | OldhamMade/PySO8601 | PySO8601/durations.py | parse_simple_duration | def parse_simple_duration(duration):
"""
Attepmt to parse an ISO8601 formatted duration, using a naive calculation.
Accepts a ``duration`` which must be an ISO8601 formatted string, and
assumes 365 days in a year and 30 days in a month for the calculation.
Returns a ``datetime.timedelta`` object.
"""
elements = _parse_duration_string(_clean(duration))
if not elements:
raise ParseError()
return _timedelta_from_elements(elements) | python | def parse_simple_duration(duration):
"""
Attepmt to parse an ISO8601 formatted duration, using a naive calculation.
Accepts a ``duration`` which must be an ISO8601 formatted string, and
assumes 365 days in a year and 30 days in a month for the calculation.
Returns a ``datetime.timedelta`` object.
"""
elements = _parse_duration_string(_clean(duration))
if not elements:
raise ParseError()
return _timedelta_from_elements(elements) | [
"def",
"parse_simple_duration",
"(",
"duration",
")",
":",
"elements",
"=",
"_parse_duration_string",
"(",
"_clean",
"(",
"duration",
")",
")",
"if",
"not",
"elements",
":",
"raise",
"ParseError",
"(",
")",
"return",
"_timedelta_from_elements",
"(",
"elements",
")"
] | Attepmt to parse an ISO8601 formatted duration, using a naive calculation.
Accepts a ``duration`` which must be an ISO8601 formatted string, and
assumes 365 days in a year and 30 days in a month for the calculation.
Returns a ``datetime.timedelta`` object. | [
"Attepmt",
"to",
"parse",
"an",
"ISO8601",
"formatted",
"duration",
"using",
"a",
"naive",
"calculation",
"."
] | b7d3b3cb3ed3e12eb2a21caa26a3abeab3c96fe4 | https://github.com/OldhamMade/PySO8601/blob/b7d3b3cb3ed3e12eb2a21caa26a3abeab3c96fe4/PySO8601/durations.py#L31-L45 |
247,950 | OldhamMade/PySO8601 | PySO8601/durations.py | parse_duration_with_start | def parse_duration_with_start(start, duration):
"""
Attepmt to parse an ISO8601 formatted duration based on a start datetime.
Accepts a ``duration`` and a start ``datetime``. ``duration`` must be
an ISO8601 formatted string.
Returns a ``datetime.timedelta`` object.
"""
elements = _parse_duration_string(_clean(duration))
year, month = _year_month_delta_from_elements(elements)
end = start.replace(
year=start.year + year,
month=start.month + month
)
del elements['years']
del elements['months']
end += _timedelta_from_elements(elements)
return start, end - start | python | def parse_duration_with_start(start, duration):
"""
Attepmt to parse an ISO8601 formatted duration based on a start datetime.
Accepts a ``duration`` and a start ``datetime``. ``duration`` must be
an ISO8601 formatted string.
Returns a ``datetime.timedelta`` object.
"""
elements = _parse_duration_string(_clean(duration))
year, month = _year_month_delta_from_elements(elements)
end = start.replace(
year=start.year + year,
month=start.month + month
)
del elements['years']
del elements['months']
end += _timedelta_from_elements(elements)
return start, end - start | [
"def",
"parse_duration_with_start",
"(",
"start",
",",
"duration",
")",
":",
"elements",
"=",
"_parse_duration_string",
"(",
"_clean",
"(",
"duration",
")",
")",
"year",
",",
"month",
"=",
"_year_month_delta_from_elements",
"(",
"elements",
")",
"end",
"=",
"start",
".",
"replace",
"(",
"year",
"=",
"start",
".",
"year",
"+",
"year",
",",
"month",
"=",
"start",
".",
"month",
"+",
"month",
")",
"del",
"elements",
"[",
"'years'",
"]",
"del",
"elements",
"[",
"'months'",
"]",
"end",
"+=",
"_timedelta_from_elements",
"(",
"elements",
")",
"return",
"start",
",",
"end",
"-",
"start"
] | Attepmt to parse an ISO8601 formatted duration based on a start datetime.
Accepts a ``duration`` and a start ``datetime``. ``duration`` must be
an ISO8601 formatted string.
Returns a ``datetime.timedelta`` object. | [
"Attepmt",
"to",
"parse",
"an",
"ISO8601",
"formatted",
"duration",
"based",
"on",
"a",
"start",
"datetime",
"."
] | b7d3b3cb3ed3e12eb2a21caa26a3abeab3c96fe4 | https://github.com/OldhamMade/PySO8601/blob/b7d3b3cb3ed3e12eb2a21caa26a3abeab3c96fe4/PySO8601/durations.py#L63-L85 |
247,951 | wiggzz/siggy | siggy/siggy.py | int_to_var_bytes | def int_to_var_bytes(x):
"""Converts an integer to a bitcoin variable length integer as a bytearray
:param x: the integer to convert
"""
if x < 253:
return intbytes.to_bytes(x, 1)
elif x < 65536:
return bytearray([253]) + intbytes.to_bytes(x, 2)[::-1]
elif x < 4294967296:
return bytearray([254]) + intbytes.to_bytes(x, 4)[::-1]
else:
return bytearray([255]) + intbytes.to_bytes(x, 8)[::-1] | python | def int_to_var_bytes(x):
"""Converts an integer to a bitcoin variable length integer as a bytearray
:param x: the integer to convert
"""
if x < 253:
return intbytes.to_bytes(x, 1)
elif x < 65536:
return bytearray([253]) + intbytes.to_bytes(x, 2)[::-1]
elif x < 4294967296:
return bytearray([254]) + intbytes.to_bytes(x, 4)[::-1]
else:
return bytearray([255]) + intbytes.to_bytes(x, 8)[::-1] | [
"def",
"int_to_var_bytes",
"(",
"x",
")",
":",
"if",
"x",
"<",
"253",
":",
"return",
"intbytes",
".",
"to_bytes",
"(",
"x",
",",
"1",
")",
"elif",
"x",
"<",
"65536",
":",
"return",
"bytearray",
"(",
"[",
"253",
"]",
")",
"+",
"intbytes",
".",
"to_bytes",
"(",
"x",
",",
"2",
")",
"[",
":",
":",
"-",
"1",
"]",
"elif",
"x",
"<",
"4294967296",
":",
"return",
"bytearray",
"(",
"[",
"254",
"]",
")",
"+",
"intbytes",
".",
"to_bytes",
"(",
"x",
",",
"4",
")",
"[",
":",
":",
"-",
"1",
"]",
"else",
":",
"return",
"bytearray",
"(",
"[",
"255",
"]",
")",
"+",
"intbytes",
".",
"to_bytes",
"(",
"x",
",",
"8",
")",
"[",
":",
":",
"-",
"1",
"]"
] | Converts an integer to a bitcoin variable length integer as a bytearray
:param x: the integer to convert | [
"Converts",
"an",
"integer",
"to",
"a",
"bitcoin",
"variable",
"length",
"integer",
"as",
"a",
"bytearray"
] | bfb67cbc9da3e4545b3e882bf7384fa265e6d6c1 | https://github.com/wiggzz/siggy/blob/bfb67cbc9da3e4545b3e882bf7384fa265e6d6c1/siggy/siggy.py#L36-L48 |
247,952 | wiggzz/siggy | siggy/siggy.py | bitcoin_sig_hash | def bitcoin_sig_hash(message):
"""Bitcoin has a special format for hashing messages for signing.
:param message: the encoded message to hash in preparation for verifying
"""
padded = b'\x18Bitcoin Signed Message:\n' +\
int_to_var_bytes(len(message)) +\
message
return double_sha256(padded) | python | def bitcoin_sig_hash(message):
"""Bitcoin has a special format for hashing messages for signing.
:param message: the encoded message to hash in preparation for verifying
"""
padded = b'\x18Bitcoin Signed Message:\n' +\
int_to_var_bytes(len(message)) +\
message
return double_sha256(padded) | [
"def",
"bitcoin_sig_hash",
"(",
"message",
")",
":",
"padded",
"=",
"b'\\x18Bitcoin Signed Message:\\n'",
"+",
"int_to_var_bytes",
"(",
"len",
"(",
"message",
")",
")",
"+",
"message",
"return",
"double_sha256",
"(",
"padded",
")"
] | Bitcoin has a special format for hashing messages for signing.
:param message: the encoded message to hash in preparation for verifying | [
"Bitcoin",
"has",
"a",
"special",
"format",
"for",
"hashing",
"messages",
"for",
"signing",
"."
] | bfb67cbc9da3e4545b3e882bf7384fa265e6d6c1 | https://github.com/wiggzz/siggy/blob/bfb67cbc9da3e4545b3e882bf7384fa265e6d6c1/siggy/siggy.py#L51-L59 |
247,953 | wiggzz/siggy | siggy/siggy.py | verify_signature | def verify_signature(message, signature, address):
"""This function verifies a bitcoin signed message.
:param message: the plain text of the message to verify
:param signature: the signature in base64 format
:param address: the signing address
"""
if (len(signature) != SIGNATURE_LENGTH):
return False
try:
binsig = base64.b64decode(signature)
except:
return False
r = intbytes.from_bytes(binsig[1:33])
s = intbytes.from_bytes(binsig[33:65])
val = intbytes.from_bytes(bitcoin_sig_hash(message.encode()))
pubpairs = possible_public_pairs_for_signature(
generator_secp256k1,
val,
(r, s))
addr_hash160 = bitcoin_address_to_hash160_sec(address)
for pair in pubpairs:
if (public_pair_to_hash160_sec(pair, True) == addr_hash160):
return True
if (public_pair_to_hash160_sec(pair, False) == addr_hash160):
return True
return False | python | def verify_signature(message, signature, address):
"""This function verifies a bitcoin signed message.
:param message: the plain text of the message to verify
:param signature: the signature in base64 format
:param address: the signing address
"""
if (len(signature) != SIGNATURE_LENGTH):
return False
try:
binsig = base64.b64decode(signature)
except:
return False
r = intbytes.from_bytes(binsig[1:33])
s = intbytes.from_bytes(binsig[33:65])
val = intbytes.from_bytes(bitcoin_sig_hash(message.encode()))
pubpairs = possible_public_pairs_for_signature(
generator_secp256k1,
val,
(r, s))
addr_hash160 = bitcoin_address_to_hash160_sec(address)
for pair in pubpairs:
if (public_pair_to_hash160_sec(pair, True) == addr_hash160):
return True
if (public_pair_to_hash160_sec(pair, False) == addr_hash160):
return True
return False | [
"def",
"verify_signature",
"(",
"message",
",",
"signature",
",",
"address",
")",
":",
"if",
"(",
"len",
"(",
"signature",
")",
"!=",
"SIGNATURE_LENGTH",
")",
":",
"return",
"False",
"try",
":",
"binsig",
"=",
"base64",
".",
"b64decode",
"(",
"signature",
")",
"except",
":",
"return",
"False",
"r",
"=",
"intbytes",
".",
"from_bytes",
"(",
"binsig",
"[",
"1",
":",
"33",
"]",
")",
"s",
"=",
"intbytes",
".",
"from_bytes",
"(",
"binsig",
"[",
"33",
":",
"65",
"]",
")",
"val",
"=",
"intbytes",
".",
"from_bytes",
"(",
"bitcoin_sig_hash",
"(",
"message",
".",
"encode",
"(",
")",
")",
")",
"pubpairs",
"=",
"possible_public_pairs_for_signature",
"(",
"generator_secp256k1",
",",
"val",
",",
"(",
"r",
",",
"s",
")",
")",
"addr_hash160",
"=",
"bitcoin_address_to_hash160_sec",
"(",
"address",
")",
"for",
"pair",
"in",
"pubpairs",
":",
"if",
"(",
"public_pair_to_hash160_sec",
"(",
"pair",
",",
"True",
")",
"==",
"addr_hash160",
")",
":",
"return",
"True",
"if",
"(",
"public_pair_to_hash160_sec",
"(",
"pair",
",",
"False",
")",
"==",
"addr_hash160",
")",
":",
"return",
"True",
"return",
"False"
] | This function verifies a bitcoin signed message.
:param message: the plain text of the message to verify
:param signature: the signature in base64 format
:param address: the signing address | [
"This",
"function",
"verifies",
"a",
"bitcoin",
"signed",
"message",
"."
] | bfb67cbc9da3e4545b3e882bf7384fa265e6d6c1 | https://github.com/wiggzz/siggy/blob/bfb67cbc9da3e4545b3e882bf7384fa265e6d6c1/siggy/siggy.py#L62-L96 |
247,954 | boilerroomtv/datastore.cloudfiles | datastore/cloudfiles/__init__.py | CloudFilesDatastore.query | def query(self, query):
"""Returns an iterable of objects matching criteria expressed in `query`.
Implementations of query will be the largest differentiating factor
amongst datastores. All datastores **must** implement query, even using
query's worst case scenario, see :ref:class:`Query` for details.
:param query: Query object describing the objects to return.
:return: iterable cursor with all objects matching criteria
"""
return query((self._deserialised_value(x) for x in self.container.get_objects(prefix=query.key))) | python | def query(self, query):
"""Returns an iterable of objects matching criteria expressed in `query`.
Implementations of query will be the largest differentiating factor
amongst datastores. All datastores **must** implement query, even using
query's worst case scenario, see :ref:class:`Query` for details.
:param query: Query object describing the objects to return.
:return: iterable cursor with all objects matching criteria
"""
return query((self._deserialised_value(x) for x in self.container.get_objects(prefix=query.key))) | [
"def",
"query",
"(",
"self",
",",
"query",
")",
":",
"return",
"query",
"(",
"(",
"self",
".",
"_deserialised_value",
"(",
"x",
")",
"for",
"x",
"in",
"self",
".",
"container",
".",
"get_objects",
"(",
"prefix",
"=",
"query",
".",
"key",
")",
")",
")"
] | Returns an iterable of objects matching criteria expressed in `query`.
Implementations of query will be the largest differentiating factor
amongst datastores. All datastores **must** implement query, even using
query's worst case scenario, see :ref:class:`Query` for details.
:param query: Query object describing the objects to return.
:return: iterable cursor with all objects matching criteria | [
"Returns",
"an",
"iterable",
"of",
"objects",
"matching",
"criteria",
"expressed",
"in",
"query",
"."
] | 95e430c72078cfeaa4c640cae38315fb551128fa | https://github.com/boilerroomtv/datastore.cloudfiles/blob/95e430c72078cfeaa4c640cae38315fb551128fa/datastore/cloudfiles/__init__.py#L80-L90 |
247,955 | dstufft/crust | crust/resources.py | Resource.save | def save(self, force_insert=False, force_update=False):
"""
Saves the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be a POST or PUT respectively. Normally, they
should not be set.
"""
if force_insert and force_update:
raise ValueError("Cannot force both insert and updating in resource saving.")
data = {}
for name, field in self._meta.fields.items():
if field.serialize:
data[name] = field.dehydrate(getattr(self, name, None))
insert = True if force_insert or self.resource_uri is None else False
if insert:
resp = self._meta.api.http_resource("POST", self._meta.resource_name, data=self._meta.api.resource_serialize(data))
else:
resp = self._meta.api.http_resource("PUT", self.resource_uri, data=self._meta.api.resource_serialize(data))
if "Location" in resp.headers:
resp = self._meta.api.http_resource("GET", resp.headers["Location"])
elif resp.status_code == 204:
resp = self._meta.api.http_resource("GET", self.resource_uri)
else:
return
data = self._meta.api.resource_deserialize(resp.text)
# Update local values from the API Response
self.__init__(**data) | python | def save(self, force_insert=False, force_update=False):
"""
Saves the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be a POST or PUT respectively. Normally, they
should not be set.
"""
if force_insert and force_update:
raise ValueError("Cannot force both insert and updating in resource saving.")
data = {}
for name, field in self._meta.fields.items():
if field.serialize:
data[name] = field.dehydrate(getattr(self, name, None))
insert = True if force_insert or self.resource_uri is None else False
if insert:
resp = self._meta.api.http_resource("POST", self._meta.resource_name, data=self._meta.api.resource_serialize(data))
else:
resp = self._meta.api.http_resource("PUT", self.resource_uri, data=self._meta.api.resource_serialize(data))
if "Location" in resp.headers:
resp = self._meta.api.http_resource("GET", resp.headers["Location"])
elif resp.status_code == 204:
resp = self._meta.api.http_resource("GET", self.resource_uri)
else:
return
data = self._meta.api.resource_deserialize(resp.text)
# Update local values from the API Response
self.__init__(**data) | [
"def",
"save",
"(",
"self",
",",
"force_insert",
"=",
"False",
",",
"force_update",
"=",
"False",
")",
":",
"if",
"force_insert",
"and",
"force_update",
":",
"raise",
"ValueError",
"(",
"\"Cannot force both insert and updating in resource saving.\"",
")",
"data",
"=",
"{",
"}",
"for",
"name",
",",
"field",
"in",
"self",
".",
"_meta",
".",
"fields",
".",
"items",
"(",
")",
":",
"if",
"field",
".",
"serialize",
":",
"data",
"[",
"name",
"]",
"=",
"field",
".",
"dehydrate",
"(",
"getattr",
"(",
"self",
",",
"name",
",",
"None",
")",
")",
"insert",
"=",
"True",
"if",
"force_insert",
"or",
"self",
".",
"resource_uri",
"is",
"None",
"else",
"False",
"if",
"insert",
":",
"resp",
"=",
"self",
".",
"_meta",
".",
"api",
".",
"http_resource",
"(",
"\"POST\"",
",",
"self",
".",
"_meta",
".",
"resource_name",
",",
"data",
"=",
"self",
".",
"_meta",
".",
"api",
".",
"resource_serialize",
"(",
"data",
")",
")",
"else",
":",
"resp",
"=",
"self",
".",
"_meta",
".",
"api",
".",
"http_resource",
"(",
"\"PUT\"",
",",
"self",
".",
"resource_uri",
",",
"data",
"=",
"self",
".",
"_meta",
".",
"api",
".",
"resource_serialize",
"(",
"data",
")",
")",
"if",
"\"Location\"",
"in",
"resp",
".",
"headers",
":",
"resp",
"=",
"self",
".",
"_meta",
".",
"api",
".",
"http_resource",
"(",
"\"GET\"",
",",
"resp",
".",
"headers",
"[",
"\"Location\"",
"]",
")",
"elif",
"resp",
".",
"status_code",
"==",
"204",
":",
"resp",
"=",
"self",
".",
"_meta",
".",
"api",
".",
"http_resource",
"(",
"\"GET\"",
",",
"self",
".",
"resource_uri",
")",
"else",
":",
"return",
"data",
"=",
"self",
".",
"_meta",
".",
"api",
".",
"resource_deserialize",
"(",
"resp",
".",
"text",
")",
"# Update local values from the API Response",
"self",
".",
"__init__",
"(",
"*",
"*",
"data",
")"
] | Saves the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be a POST or PUT respectively. Normally, they
should not be set. | [
"Saves",
"the",
"current",
"instance",
".",
"Override",
"this",
"in",
"a",
"subclass",
"if",
"you",
"want",
"to",
"control",
"the",
"saving",
"process",
"."
] | 5d4011ecace12fd3f68a03a17dbefb78390a9fc0 | https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/resources.py#L129-L164 |
247,956 | dstufft/crust | crust/resources.py | Resource.delete | def delete(self):
"""
Deletes the current instance. Override this in a subclass if you want to
control the deleting process.
"""
if self.resource_uri is None:
raise ValueError("{0} object cannot be deleted because resource_uri attribute cannot be None".format(self._meta.resource_name))
self._meta.api.http_resource("DELETE", self.resource_uri) | python | def delete(self):
"""
Deletes the current instance. Override this in a subclass if you want to
control the deleting process.
"""
if self.resource_uri is None:
raise ValueError("{0} object cannot be deleted because resource_uri attribute cannot be None".format(self._meta.resource_name))
self._meta.api.http_resource("DELETE", self.resource_uri) | [
"def",
"delete",
"(",
"self",
")",
":",
"if",
"self",
".",
"resource_uri",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"{0} object cannot be deleted because resource_uri attribute cannot be None\"",
".",
"format",
"(",
"self",
".",
"_meta",
".",
"resource_name",
")",
")",
"self",
".",
"_meta",
".",
"api",
".",
"http_resource",
"(",
"\"DELETE\"",
",",
"self",
".",
"resource_uri",
")"
] | Deletes the current instance. Override this in a subclass if you want to
control the deleting process. | [
"Deletes",
"the",
"current",
"instance",
".",
"Override",
"this",
"in",
"a",
"subclass",
"if",
"you",
"want",
"to",
"control",
"the",
"deleting",
"process",
"."
] | 5d4011ecace12fd3f68a03a17dbefb78390a9fc0 | https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/resources.py#L166-L174 |
247,957 | eallik/spinoff | spinoff/actor/cell.py | _BaseCell.spawn_actor | def spawn_actor(self, factory, name=None):
"""Spawns an actor using the given `factory` with the specified `name`.
Returns an immediately usable `Ref` to the newly created actor, regardless of the location of the new actor, or
when the actual spawning will take place.
"""
if name and '/' in name: # pragma: no cover
raise TypeError("Actor names cannot contain slashes")
if not self._children:
self._children = {}
uri = self.uri / name if name else None
if name:
if name.startswith('$'):
raise ValueError("Unable to spawn actor at path %s; name cannot start with '$', it is reserved for auto-generated names" % (uri.path,))
if name in self._children:
raise NameConflict("Unable to spawn actor at path %s; actor %r already sits there" % (uri.path, self._children[name]))
if not uri:
name = self._generate_name(factory)
uri = self.uri / name
assert name not in self._children # XXX: ordering??
child = self._children[name] = Cell.spawn(parent_actor=self.ref, factory=factory, uri=uri, node=self.node).ref
return child | python | def spawn_actor(self, factory, name=None):
"""Spawns an actor using the given `factory` with the specified `name`.
Returns an immediately usable `Ref` to the newly created actor, regardless of the location of the new actor, or
when the actual spawning will take place.
"""
if name and '/' in name: # pragma: no cover
raise TypeError("Actor names cannot contain slashes")
if not self._children:
self._children = {}
uri = self.uri / name if name else None
if name:
if name.startswith('$'):
raise ValueError("Unable to spawn actor at path %s; name cannot start with '$', it is reserved for auto-generated names" % (uri.path,))
if name in self._children:
raise NameConflict("Unable to spawn actor at path %s; actor %r already sits there" % (uri.path, self._children[name]))
if not uri:
name = self._generate_name(factory)
uri = self.uri / name
assert name not in self._children # XXX: ordering??
child = self._children[name] = Cell.spawn(parent_actor=self.ref, factory=factory, uri=uri, node=self.node).ref
return child | [
"def",
"spawn_actor",
"(",
"self",
",",
"factory",
",",
"name",
"=",
"None",
")",
":",
"if",
"name",
"and",
"'/'",
"in",
"name",
":",
"# pragma: no cover",
"raise",
"TypeError",
"(",
"\"Actor names cannot contain slashes\"",
")",
"if",
"not",
"self",
".",
"_children",
":",
"self",
".",
"_children",
"=",
"{",
"}",
"uri",
"=",
"self",
".",
"uri",
"/",
"name",
"if",
"name",
"else",
"None",
"if",
"name",
":",
"if",
"name",
".",
"startswith",
"(",
"'$'",
")",
":",
"raise",
"ValueError",
"(",
"\"Unable to spawn actor at path %s; name cannot start with '$', it is reserved for auto-generated names\"",
"%",
"(",
"uri",
".",
"path",
",",
")",
")",
"if",
"name",
"in",
"self",
".",
"_children",
":",
"raise",
"NameConflict",
"(",
"\"Unable to spawn actor at path %s; actor %r already sits there\"",
"%",
"(",
"uri",
".",
"path",
",",
"self",
".",
"_children",
"[",
"name",
"]",
")",
")",
"if",
"not",
"uri",
":",
"name",
"=",
"self",
".",
"_generate_name",
"(",
"factory",
")",
"uri",
"=",
"self",
".",
"uri",
"/",
"name",
"assert",
"name",
"not",
"in",
"self",
".",
"_children",
"# XXX: ordering??",
"child",
"=",
"self",
".",
"_children",
"[",
"name",
"]",
"=",
"Cell",
".",
"spawn",
"(",
"parent_actor",
"=",
"self",
".",
"ref",
",",
"factory",
"=",
"factory",
",",
"uri",
"=",
"uri",
",",
"node",
"=",
"self",
".",
"node",
")",
".",
"ref",
"return",
"child"
] | Spawns an actor using the given `factory` with the specified `name`.
Returns an immediately usable `Ref` to the newly created actor, regardless of the location of the new actor, or
when the actual spawning will take place. | [
"Spawns",
"an",
"actor",
"using",
"the",
"given",
"factory",
"with",
"the",
"specified",
"name",
"."
] | 06b00d6b86c7422c9cb8f9a4b2915906e92b7d52 | https://github.com/eallik/spinoff/blob/06b00d6b86c7422c9cb8f9a4b2915906e92b7d52/spinoff/actor/cell.py#L48-L69 |
247,958 | eallik/spinoff | spinoff/actor/cell.py | _BaseCell.lookup_cell | def lookup_cell(self, uri):
"""Looks up a local actor by its location relative to this actor."""
steps = uri.steps
if steps[0] == '':
found = self.root
steps.popleft()
else:
found = self
for step in steps:
assert step != ''
found = found.get_child(step)
if not found:
break
found = found._cell
return found | python | def lookup_cell(self, uri):
"""Looks up a local actor by its location relative to this actor."""
steps = uri.steps
if steps[0] == '':
found = self.root
steps.popleft()
else:
found = self
for step in steps:
assert step != ''
found = found.get_child(step)
if not found:
break
found = found._cell
return found | [
"def",
"lookup_cell",
"(",
"self",
",",
"uri",
")",
":",
"steps",
"=",
"uri",
".",
"steps",
"if",
"steps",
"[",
"0",
"]",
"==",
"''",
":",
"found",
"=",
"self",
".",
"root",
"steps",
".",
"popleft",
"(",
")",
"else",
":",
"found",
"=",
"self",
"for",
"step",
"in",
"steps",
":",
"assert",
"step",
"!=",
"''",
"found",
"=",
"found",
".",
"get_child",
"(",
"step",
")",
"if",
"not",
"found",
":",
"break",
"found",
"=",
"found",
".",
"_cell",
"return",
"found"
] | Looks up a local actor by its location relative to this actor. | [
"Looks",
"up",
"a",
"local",
"actor",
"by",
"its",
"location",
"relative",
"to",
"this",
"actor",
"."
] | 06b00d6b86c7422c9cb8f9a4b2915906e92b7d52 | https://github.com/eallik/spinoff/blob/06b00d6b86c7422c9cb8f9a4b2915906e92b7d52/spinoff/actor/cell.py#L94-L108 |
247,959 | amadev/doan | doan/dataset.py | r_num | def r_num(obj):
"""Read list of numbers."""
if isinstance(obj, (list, tuple)):
it = iter
else:
it = LinesIterator
dataset = Dataset([Dataset.FLOAT])
return dataset.load(it(obj)) | python | def r_num(obj):
"""Read list of numbers."""
if isinstance(obj, (list, tuple)):
it = iter
else:
it = LinesIterator
dataset = Dataset([Dataset.FLOAT])
return dataset.load(it(obj)) | [
"def",
"r_num",
"(",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"it",
"=",
"iter",
"else",
":",
"it",
"=",
"LinesIterator",
"dataset",
"=",
"Dataset",
"(",
"[",
"Dataset",
".",
"FLOAT",
"]",
")",
"return",
"dataset",
".",
"load",
"(",
"it",
"(",
"obj",
")",
")"
] | Read list of numbers. | [
"Read",
"list",
"of",
"numbers",
"."
] | 5adfa983ac547007a688fe7517291a432919aa3e | https://github.com/amadev/doan/blob/5adfa983ac547007a688fe7517291a432919aa3e/doan/dataset.py#L150-L157 |
247,960 | amadev/doan | doan/dataset.py | r_date_num | def r_date_num(obj, multiple=False):
"""Read date-value table."""
if isinstance(obj, (list, tuple)):
it = iter
else:
it = LinesIterator
if multiple:
datasets = {}
for line in it(obj):
label = line[2]
if label not in datasets:
datasets[label] = Dataset([Dataset.DATE, Dataset.FLOAT])
datasets[label].name = label
datasets[label].parse_elements(line[0:2])
return datasets.values()
dataset = Dataset([Dataset.DATE, Dataset.FLOAT])
return dataset.load(it(obj)) | python | def r_date_num(obj, multiple=False):
"""Read date-value table."""
if isinstance(obj, (list, tuple)):
it = iter
else:
it = LinesIterator
if multiple:
datasets = {}
for line in it(obj):
label = line[2]
if label not in datasets:
datasets[label] = Dataset([Dataset.DATE, Dataset.FLOAT])
datasets[label].name = label
datasets[label].parse_elements(line[0:2])
return datasets.values()
dataset = Dataset([Dataset.DATE, Dataset.FLOAT])
return dataset.load(it(obj)) | [
"def",
"r_date_num",
"(",
"obj",
",",
"multiple",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"it",
"=",
"iter",
"else",
":",
"it",
"=",
"LinesIterator",
"if",
"multiple",
":",
"datasets",
"=",
"{",
"}",
"for",
"line",
"in",
"it",
"(",
"obj",
")",
":",
"label",
"=",
"line",
"[",
"2",
"]",
"if",
"label",
"not",
"in",
"datasets",
":",
"datasets",
"[",
"label",
"]",
"=",
"Dataset",
"(",
"[",
"Dataset",
".",
"DATE",
",",
"Dataset",
".",
"FLOAT",
"]",
")",
"datasets",
"[",
"label",
"]",
".",
"name",
"=",
"label",
"datasets",
"[",
"label",
"]",
".",
"parse_elements",
"(",
"line",
"[",
"0",
":",
"2",
"]",
")",
"return",
"datasets",
".",
"values",
"(",
")",
"dataset",
"=",
"Dataset",
"(",
"[",
"Dataset",
".",
"DATE",
",",
"Dataset",
".",
"FLOAT",
"]",
")",
"return",
"dataset",
".",
"load",
"(",
"it",
"(",
"obj",
")",
")"
] | Read date-value table. | [
"Read",
"date",
"-",
"value",
"table",
"."
] | 5adfa983ac547007a688fe7517291a432919aa3e | https://github.com/amadev/doan/blob/5adfa983ac547007a688fe7517291a432919aa3e/doan/dataset.py#L160-L176 |
247,961 | sysr-q/rcmd.py | rcmd/parser.py | Regex.command | def command(self, rule, **options):
"""\
direct=False, override=True, inject=False, flags=0
"""
options.setdefault("direct", False)
options.setdefault("override", True)
options.setdefault("inject", False)
options.setdefault("flags", 0)
if not options["direct"]:
rule = self.regexy(rule)
regex = re.compile(rule, flags=options["flags"])
self.handlers.setdefault(regex, [])
def handler(f):
if f == noop:
f.options = {}
else:
f.options = options
if options["override"]:
self.handlers[regex] = [f]
else:
self.handlers[regex].append(f)
f.no_args = self.no_args(f)
return f
return handler | python | def command(self, rule, **options):
"""\
direct=False, override=True, inject=False, flags=0
"""
options.setdefault("direct", False)
options.setdefault("override", True)
options.setdefault("inject", False)
options.setdefault("flags", 0)
if not options["direct"]:
rule = self.regexy(rule)
regex = re.compile(rule, flags=options["flags"])
self.handlers.setdefault(regex, [])
def handler(f):
if f == noop:
f.options = {}
else:
f.options = options
if options["override"]:
self.handlers[regex] = [f]
else:
self.handlers[regex].append(f)
f.no_args = self.no_args(f)
return f
return handler | [
"def",
"command",
"(",
"self",
",",
"rule",
",",
"*",
"*",
"options",
")",
":",
"options",
".",
"setdefault",
"(",
"\"direct\"",
",",
"False",
")",
"options",
".",
"setdefault",
"(",
"\"override\"",
",",
"True",
")",
"options",
".",
"setdefault",
"(",
"\"inject\"",
",",
"False",
")",
"options",
".",
"setdefault",
"(",
"\"flags\"",
",",
"0",
")",
"if",
"not",
"options",
"[",
"\"direct\"",
"]",
":",
"rule",
"=",
"self",
".",
"regexy",
"(",
"rule",
")",
"regex",
"=",
"re",
".",
"compile",
"(",
"rule",
",",
"flags",
"=",
"options",
"[",
"\"flags\"",
"]",
")",
"self",
".",
"handlers",
".",
"setdefault",
"(",
"regex",
",",
"[",
"]",
")",
"def",
"handler",
"(",
"f",
")",
":",
"if",
"f",
"==",
"noop",
":",
"f",
".",
"options",
"=",
"{",
"}",
"else",
":",
"f",
".",
"options",
"=",
"options",
"if",
"options",
"[",
"\"override\"",
"]",
":",
"self",
".",
"handlers",
"[",
"regex",
"]",
"=",
"[",
"f",
"]",
"else",
":",
"self",
".",
"handlers",
"[",
"regex",
"]",
".",
"append",
"(",
"f",
")",
"f",
".",
"no_args",
"=",
"self",
".",
"no_args",
"(",
"f",
")",
"return",
"f",
"return",
"handler"
] | \
direct=False, override=True, inject=False, flags=0 | [
"\\",
"direct",
"=",
"False",
"override",
"=",
"True",
"inject",
"=",
"False",
"flags",
"=",
"0"
] | 0ecce1970164805cedb33d02a9dcf9eb6cd14e0c | https://github.com/sysr-q/rcmd.py/blob/0ecce1970164805cedb33d02a9dcf9eb6cd14e0c/rcmd/parser.py#L49-L72 |
247,962 | apiwatcher/apiwatcher-pyclient | apiwatcher_pyclient/client.py | Client.authorize_client_credentials | def authorize_client_credentials(
self, client_id, client_secret=None, scope="private_agent"
):
"""Authorize to platform with client credentials
This should be used if you posses client_id/client_secret pair
generated by platform.
"""
self.auth_data = {
"grant_type": "client_credentials",
"scope": [ scope ],
"client_id": client_id,
"client_secret": client_secret
}
self._do_authorize() | python | def authorize_client_credentials(
self, client_id, client_secret=None, scope="private_agent"
):
"""Authorize to platform with client credentials
This should be used if you posses client_id/client_secret pair
generated by platform.
"""
self.auth_data = {
"grant_type": "client_credentials",
"scope": [ scope ],
"client_id": client_id,
"client_secret": client_secret
}
self._do_authorize() | [
"def",
"authorize_client_credentials",
"(",
"self",
",",
"client_id",
",",
"client_secret",
"=",
"None",
",",
"scope",
"=",
"\"private_agent\"",
")",
":",
"self",
".",
"auth_data",
"=",
"{",
"\"grant_type\"",
":",
"\"client_credentials\"",
",",
"\"scope\"",
":",
"[",
"scope",
"]",
",",
"\"client_id\"",
":",
"client_id",
",",
"\"client_secret\"",
":",
"client_secret",
"}",
"self",
".",
"_do_authorize",
"(",
")"
] | Authorize to platform with client credentials
This should be used if you posses client_id/client_secret pair
generated by platform. | [
"Authorize",
"to",
"platform",
"with",
"client",
"credentials"
] | 54b5e659be11447d7bb4a05d182ce772ce74b2dc | https://github.com/apiwatcher/apiwatcher-pyclient/blob/54b5e659be11447d7bb4a05d182ce772ce74b2dc/apiwatcher_pyclient/client.py#L37-L52 |
247,963 | apiwatcher/apiwatcher-pyclient | apiwatcher_pyclient/client.py | Client.authorize_password | def authorize_password(self, client_id, username, password):
"""Authorize to platform as regular user
You must provide a valid client_id (same as web application),
your password and your username. Username and password is not stored in
client but refresh token is stored. The only valid scope for this
authorization is "regular_user".
:param client_id: Valid client_id
:type client_id: String
:param username: User email
:type username: String
:param password: User password
:type password: String
"""
self.auth_data = {
"grant_type": "password",
"username": username,
"password": password,
"client_id": client_id,
"scope": ["regular_user"]
}
self._do_authorize() | python | def authorize_password(self, client_id, username, password):
"""Authorize to platform as regular user
You must provide a valid client_id (same as web application),
your password and your username. Username and password is not stored in
client but refresh token is stored. The only valid scope for this
authorization is "regular_user".
:param client_id: Valid client_id
:type client_id: String
:param username: User email
:type username: String
:param password: User password
:type password: String
"""
self.auth_data = {
"grant_type": "password",
"username": username,
"password": password,
"client_id": client_id,
"scope": ["regular_user"]
}
self._do_authorize() | [
"def",
"authorize_password",
"(",
"self",
",",
"client_id",
",",
"username",
",",
"password",
")",
":",
"self",
".",
"auth_data",
"=",
"{",
"\"grant_type\"",
":",
"\"password\"",
",",
"\"username\"",
":",
"username",
",",
"\"password\"",
":",
"password",
",",
"\"client_id\"",
":",
"client_id",
",",
"\"scope\"",
":",
"[",
"\"regular_user\"",
"]",
"}",
"self",
".",
"_do_authorize",
"(",
")"
] | Authorize to platform as regular user
You must provide a valid client_id (same as web application),
your password and your username. Username and password is not stored in
client but refresh token is stored. The only valid scope for this
authorization is "regular_user".
:param client_id: Valid client_id
:type client_id: String
:param username: User email
:type username: String
:param password: User password
:type password: String | [
"Authorize",
"to",
"platform",
"as",
"regular",
"user"
] | 54b5e659be11447d7bb4a05d182ce772ce74b2dc | https://github.com/apiwatcher/apiwatcher-pyclient/blob/54b5e659be11447d7bb4a05d182ce772ce74b2dc/apiwatcher_pyclient/client.py#L54-L78 |
247,964 | apiwatcher/apiwatcher-pyclient | apiwatcher_pyclient/client.py | Client._do_authorize | def _do_authorize(self):
""" Perform the authorization
"""
if self.auth_data is None:
raise ApiwatcherClientException("You must provide authorization data.")
r = requests.post(
"{0}/api/token".format(self.base_url), json=self.auth_data,
verify=self.verify_certificate, timeout=self.timeout
)
if r.status_code == 401:
raise ApiwatcherClientException("Wrong credentials supplied: {0}".format(
r.json()["message"]
))
elif r.status_code != 201:
try:
reason = r.json()["message"]
except:
reason = r.text
raise ApiwatcherClientException(
"Authorization failed. Reason {0} {1}".format(
r.status_code, reason)
)
else:
data = r.json()["data"]
self.token = data["access_token"]
if "refresh_token" in data:
self.auth_data = {
"grant_type": "refresh_token",
"refresh_token": data["refresh_token"],
"client_id": self.auth_data["client_id"]
} | python | def _do_authorize(self):
""" Perform the authorization
"""
if self.auth_data is None:
raise ApiwatcherClientException("You must provide authorization data.")
r = requests.post(
"{0}/api/token".format(self.base_url), json=self.auth_data,
verify=self.verify_certificate, timeout=self.timeout
)
if r.status_code == 401:
raise ApiwatcherClientException("Wrong credentials supplied: {0}".format(
r.json()["message"]
))
elif r.status_code != 201:
try:
reason = r.json()["message"]
except:
reason = r.text
raise ApiwatcherClientException(
"Authorization failed. Reason {0} {1}".format(
r.status_code, reason)
)
else:
data = r.json()["data"]
self.token = data["access_token"]
if "refresh_token" in data:
self.auth_data = {
"grant_type": "refresh_token",
"refresh_token": data["refresh_token"],
"client_id": self.auth_data["client_id"]
} | [
"def",
"_do_authorize",
"(",
"self",
")",
":",
"if",
"self",
".",
"auth_data",
"is",
"None",
":",
"raise",
"ApiwatcherClientException",
"(",
"\"You must provide authorization data.\"",
")",
"r",
"=",
"requests",
".",
"post",
"(",
"\"{0}/api/token\"",
".",
"format",
"(",
"self",
".",
"base_url",
")",
",",
"json",
"=",
"self",
".",
"auth_data",
",",
"verify",
"=",
"self",
".",
"verify_certificate",
",",
"timeout",
"=",
"self",
".",
"timeout",
")",
"if",
"r",
".",
"status_code",
"==",
"401",
":",
"raise",
"ApiwatcherClientException",
"(",
"\"Wrong credentials supplied: {0}\"",
".",
"format",
"(",
"r",
".",
"json",
"(",
")",
"[",
"\"message\"",
"]",
")",
")",
"elif",
"r",
".",
"status_code",
"!=",
"201",
":",
"try",
":",
"reason",
"=",
"r",
".",
"json",
"(",
")",
"[",
"\"message\"",
"]",
"except",
":",
"reason",
"=",
"r",
".",
"text",
"raise",
"ApiwatcherClientException",
"(",
"\"Authorization failed. Reason {0} {1}\"",
".",
"format",
"(",
"r",
".",
"status_code",
",",
"reason",
")",
")",
"else",
":",
"data",
"=",
"r",
".",
"json",
"(",
")",
"[",
"\"data\"",
"]",
"self",
".",
"token",
"=",
"data",
"[",
"\"access_token\"",
"]",
"if",
"\"refresh_token\"",
"in",
"data",
":",
"self",
".",
"auth_data",
"=",
"{",
"\"grant_type\"",
":",
"\"refresh_token\"",
",",
"\"refresh_token\"",
":",
"data",
"[",
"\"refresh_token\"",
"]",
",",
"\"client_id\"",
":",
"self",
".",
"auth_data",
"[",
"\"client_id\"",
"]",
"}"
] | Perform the authorization | [
"Perform",
"the",
"authorization"
] | 54b5e659be11447d7bb4a05d182ce772ce74b2dc | https://github.com/apiwatcher/apiwatcher-pyclient/blob/54b5e659be11447d7bb4a05d182ce772ce74b2dc/apiwatcher_pyclient/client.py#L80-L113 |
247,965 | apiwatcher/apiwatcher-pyclient | apiwatcher_pyclient/client.py | Client._do_request | def _do_request(self, method, endpoint, data=None):
"""Perform one request, possibly solving unauthorized return code
"""
# No token - authorize
if self.token is None:
self._do_authorize()
r = requests.request(
method,
"{0}{1}".format(self.base_url, endpoint),
headers={
"Authorization": "Bearer {0}".format(self.token),
"Content-Type": "application/json"
},
json=data,
verify=self.verify_certificate,
timeout=self.timeout
)
if r.status_code == 401:
self._do_authorize()
r = requests.request(
method,
"{0}{1}".format(self.base_url, endpoint),
headers={
"Authorization": "Bearer {0}".format(self.token),
"Content-Type": "application/json"
},
json=data,
verify=self.verify_certificate,
timeout=self.timeout
)
return r | python | def _do_request(self, method, endpoint, data=None):
"""Perform one request, possibly solving unauthorized return code
"""
# No token - authorize
if self.token is None:
self._do_authorize()
r = requests.request(
method,
"{0}{1}".format(self.base_url, endpoint),
headers={
"Authorization": "Bearer {0}".format(self.token),
"Content-Type": "application/json"
},
json=data,
verify=self.verify_certificate,
timeout=self.timeout
)
if r.status_code == 401:
self._do_authorize()
r = requests.request(
method,
"{0}{1}".format(self.base_url, endpoint),
headers={
"Authorization": "Bearer {0}".format(self.token),
"Content-Type": "application/json"
},
json=data,
verify=self.verify_certificate,
timeout=self.timeout
)
return r | [
"def",
"_do_request",
"(",
"self",
",",
"method",
",",
"endpoint",
",",
"data",
"=",
"None",
")",
":",
"# No token - authorize",
"if",
"self",
".",
"token",
"is",
"None",
":",
"self",
".",
"_do_authorize",
"(",
")",
"r",
"=",
"requests",
".",
"request",
"(",
"method",
",",
"\"{0}{1}\"",
".",
"format",
"(",
"self",
".",
"base_url",
",",
"endpoint",
")",
",",
"headers",
"=",
"{",
"\"Authorization\"",
":",
"\"Bearer {0}\"",
".",
"format",
"(",
"self",
".",
"token",
")",
",",
"\"Content-Type\"",
":",
"\"application/json\"",
"}",
",",
"json",
"=",
"data",
",",
"verify",
"=",
"self",
".",
"verify_certificate",
",",
"timeout",
"=",
"self",
".",
"timeout",
")",
"if",
"r",
".",
"status_code",
"==",
"401",
":",
"self",
".",
"_do_authorize",
"(",
")",
"r",
"=",
"requests",
".",
"request",
"(",
"method",
",",
"\"{0}{1}\"",
".",
"format",
"(",
"self",
".",
"base_url",
",",
"endpoint",
")",
",",
"headers",
"=",
"{",
"\"Authorization\"",
":",
"\"Bearer {0}\"",
".",
"format",
"(",
"self",
".",
"token",
")",
",",
"\"Content-Type\"",
":",
"\"application/json\"",
"}",
",",
"json",
"=",
"data",
",",
"verify",
"=",
"self",
".",
"verify_certificate",
",",
"timeout",
"=",
"self",
".",
"timeout",
")",
"return",
"r"
] | Perform one request, possibly solving unauthorized return code | [
"Perform",
"one",
"request",
"possibly",
"solving",
"unauthorized",
"return",
"code"
] | 54b5e659be11447d7bb4a05d182ce772ce74b2dc | https://github.com/apiwatcher/apiwatcher-pyclient/blob/54b5e659be11447d7bb4a05d182ce772ce74b2dc/apiwatcher_pyclient/client.py#L115-L148 |
247,966 | shrubberysoft/homophony | src/homophony/__init__.py | DocFileSuite | def DocFileSuite(*paths, **kwargs):
"""Extension of the standard DocFileSuite that sets up test browser for
use in doctests."""
kwargs.setdefault('setUp', setUpBrowser)
kwargs.setdefault('tearDown', tearDownBrowser)
kwargs.setdefault('globs', {}).update(Browser=Browser)
kwargs.setdefault('optionflags', doctest.NORMALIZE_WHITESPACE |
doctest.REPORT_ONLY_FIRST_FAILURE |
doctest.ELLIPSIS)
if 'package' not in kwargs:
# Resolve relative names based on the caller's module
kwargs['package'] = doctest._normalize_module(None)
kwargs['module_relative'] = True
return doctest.DocFileSuite(*paths, **kwargs) | python | def DocFileSuite(*paths, **kwargs):
"""Extension of the standard DocFileSuite that sets up test browser for
use in doctests."""
kwargs.setdefault('setUp', setUpBrowser)
kwargs.setdefault('tearDown', tearDownBrowser)
kwargs.setdefault('globs', {}).update(Browser=Browser)
kwargs.setdefault('optionflags', doctest.NORMALIZE_WHITESPACE |
doctest.REPORT_ONLY_FIRST_FAILURE |
doctest.ELLIPSIS)
if 'package' not in kwargs:
# Resolve relative names based on the caller's module
kwargs['package'] = doctest._normalize_module(None)
kwargs['module_relative'] = True
return doctest.DocFileSuite(*paths, **kwargs) | [
"def",
"DocFileSuite",
"(",
"*",
"paths",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"setdefault",
"(",
"'setUp'",
",",
"setUpBrowser",
")",
"kwargs",
".",
"setdefault",
"(",
"'tearDown'",
",",
"tearDownBrowser",
")",
"kwargs",
".",
"setdefault",
"(",
"'globs'",
",",
"{",
"}",
")",
".",
"update",
"(",
"Browser",
"=",
"Browser",
")",
"kwargs",
".",
"setdefault",
"(",
"'optionflags'",
",",
"doctest",
".",
"NORMALIZE_WHITESPACE",
"|",
"doctest",
".",
"REPORT_ONLY_FIRST_FAILURE",
"|",
"doctest",
".",
"ELLIPSIS",
")",
"if",
"'package'",
"not",
"in",
"kwargs",
":",
"# Resolve relative names based on the caller's module",
"kwargs",
"[",
"'package'",
"]",
"=",
"doctest",
".",
"_normalize_module",
"(",
"None",
")",
"kwargs",
"[",
"'module_relative'",
"]",
"=",
"True",
"return",
"doctest",
".",
"DocFileSuite",
"(",
"*",
"paths",
",",
"*",
"*",
"kwargs",
")"
] | Extension of the standard DocFileSuite that sets up test browser for
use in doctests. | [
"Extension",
"of",
"the",
"standard",
"DocFileSuite",
"that",
"sets",
"up",
"test",
"browser",
"for",
"use",
"in",
"doctests",
"."
] | 5549371fd6c6fd73b69b3e9b5002d61cf42997f3 | https://github.com/shrubberysoft/homophony/blob/5549371fd6c6fd73b69b3e9b5002d61cf42997f3/src/homophony/__init__.py#L117-L130 |
247,967 | shrubberysoft/homophony | src/homophony/__init__.py | Browser.queryHTML | def queryHTML(self, path):
"""Run an XPath query on the HTML document and print matches."""
if etree is None:
raise Exception("lxml not available")
document = etree.HTML(self.contents)
for node in document.xpath(path):
if isinstance(node, basestring):
print node
else:
print etree.tostring(node, pretty_print=True).strip() | python | def queryHTML(self, path):
"""Run an XPath query on the HTML document and print matches."""
if etree is None:
raise Exception("lxml not available")
document = etree.HTML(self.contents)
for node in document.xpath(path):
if isinstance(node, basestring):
print node
else:
print etree.tostring(node, pretty_print=True).strip() | [
"def",
"queryHTML",
"(",
"self",
",",
"path",
")",
":",
"if",
"etree",
"is",
"None",
":",
"raise",
"Exception",
"(",
"\"lxml not available\"",
")",
"document",
"=",
"etree",
".",
"HTML",
"(",
"self",
".",
"contents",
")",
"for",
"node",
"in",
"document",
".",
"xpath",
"(",
"path",
")",
":",
"if",
"isinstance",
"(",
"node",
",",
"basestring",
")",
":",
"print",
"node",
"else",
":",
"print",
"etree",
".",
"tostring",
"(",
"node",
",",
"pretty_print",
"=",
"True",
")",
".",
"strip",
"(",
")"
] | Run an XPath query on the HTML document and print matches. | [
"Run",
"an",
"XPath",
"query",
"on",
"the",
"HTML",
"document",
"and",
"print",
"matches",
"."
] | 5549371fd6c6fd73b69b3e9b5002d61cf42997f3 | https://github.com/shrubberysoft/homophony/blob/5549371fd6c6fd73b69b3e9b5002d61cf42997f3/src/homophony/__init__.py#L80-L89 |
247,968 | redhog/pieshell | pieshell/pipeline.py | pipeline_repr | def pipeline_repr(obj):
"""Returns a string representation of an object, including pieshell
pipelines."""
if not hasattr(repr_state, 'in_repr'):
repr_state.in_repr = 0
repr_state.in_repr += 1
try:
return standard_repr(obj)
finally:
repr_state.in_repr -= 1 | python | def pipeline_repr(obj):
"""Returns a string representation of an object, including pieshell
pipelines."""
if not hasattr(repr_state, 'in_repr'):
repr_state.in_repr = 0
repr_state.in_repr += 1
try:
return standard_repr(obj)
finally:
repr_state.in_repr -= 1 | [
"def",
"pipeline_repr",
"(",
"obj",
")",
":",
"if",
"not",
"hasattr",
"(",
"repr_state",
",",
"'in_repr'",
")",
":",
"repr_state",
".",
"in_repr",
"=",
"0",
"repr_state",
".",
"in_repr",
"+=",
"1",
"try",
":",
"return",
"standard_repr",
"(",
"obj",
")",
"finally",
":",
"repr_state",
".",
"in_repr",
"-=",
"1"
] | Returns a string representation of an object, including pieshell
pipelines. | [
"Returns",
"a",
"string",
"representation",
"of",
"an",
"object",
"including",
"pieshell",
"pipelines",
"."
] | 11cff3b93785ee4446f99b9134be20380edeb767 | https://github.com/redhog/pieshell/blob/11cff3b93785ee4446f99b9134be20380edeb767/pieshell/pipeline.py#L28-L38 |
247,969 | redhog/pieshell | pieshell/pipeline.py | Pipeline.run | def run(self, redirects = []):
"""Runs the pipelines with the specified redirects and returns
a RunningPipeline instance."""
if not isinstance(redirects, redir.Redirects):
redirects = redir.Redirects(self._env._redirects, *redirects)
with copy.copy_session() as sess:
self = copy.deepcopy(self)
processes = self._run(redirects, sess)
pipeline = RunningPipeline(processes, self)
self._env.last_pipeline = pipeline
return pipeline | python | def run(self, redirects = []):
"""Runs the pipelines with the specified redirects and returns
a RunningPipeline instance."""
if not isinstance(redirects, redir.Redirects):
redirects = redir.Redirects(self._env._redirects, *redirects)
with copy.copy_session() as sess:
self = copy.deepcopy(self)
processes = self._run(redirects, sess)
pipeline = RunningPipeline(processes, self)
self._env.last_pipeline = pipeline
return pipeline | [
"def",
"run",
"(",
"self",
",",
"redirects",
"=",
"[",
"]",
")",
":",
"if",
"not",
"isinstance",
"(",
"redirects",
",",
"redir",
".",
"Redirects",
")",
":",
"redirects",
"=",
"redir",
".",
"Redirects",
"(",
"self",
".",
"_env",
".",
"_redirects",
",",
"*",
"redirects",
")",
"with",
"copy",
".",
"copy_session",
"(",
")",
"as",
"sess",
":",
"self",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
")",
"processes",
"=",
"self",
".",
"_run",
"(",
"redirects",
",",
"sess",
")",
"pipeline",
"=",
"RunningPipeline",
"(",
"processes",
",",
"self",
")",
"self",
".",
"_env",
".",
"last_pipeline",
"=",
"pipeline",
"return",
"pipeline"
] | Runs the pipelines with the specified redirects and returns
a RunningPipeline instance. | [
"Runs",
"the",
"pipelines",
"with",
"the",
"specified",
"redirects",
"and",
"returns",
"a",
"RunningPipeline",
"instance",
"."
] | 11cff3b93785ee4446f99b9134be20380edeb767 | https://github.com/redhog/pieshell/blob/11cff3b93785ee4446f99b9134be20380edeb767/pieshell/pipeline.py#L222-L232 |
247,970 | eallik/spinoff | spinoff/util/lockfile.py | lock_file | def lock_file(path, maxdelay=.1, lock_cls=LockFile, timeout=10.0):
"""Cooperative file lock. Uses `lockfile.LockFile` polling under the hood.
`maxdelay` defines the interval between individual polls.
"""
lock = lock_cls(path)
max_t = time.time() + timeout
while True:
if time.time() >= max_t:
raise LockTimeout("Timeout waiting to acquire lock for %s" % (path,)) # same exception messages as in lockfile
try:
lock.acquire(timeout=0)
except AlreadyLocked:
sleep(maxdelay)
else:
try:
yield lock
break
finally:
lock.release() | python | def lock_file(path, maxdelay=.1, lock_cls=LockFile, timeout=10.0):
"""Cooperative file lock. Uses `lockfile.LockFile` polling under the hood.
`maxdelay` defines the interval between individual polls.
"""
lock = lock_cls(path)
max_t = time.time() + timeout
while True:
if time.time() >= max_t:
raise LockTimeout("Timeout waiting to acquire lock for %s" % (path,)) # same exception messages as in lockfile
try:
lock.acquire(timeout=0)
except AlreadyLocked:
sleep(maxdelay)
else:
try:
yield lock
break
finally:
lock.release() | [
"def",
"lock_file",
"(",
"path",
",",
"maxdelay",
"=",
".1",
",",
"lock_cls",
"=",
"LockFile",
",",
"timeout",
"=",
"10.0",
")",
":",
"lock",
"=",
"lock_cls",
"(",
"path",
")",
"max_t",
"=",
"time",
".",
"time",
"(",
")",
"+",
"timeout",
"while",
"True",
":",
"if",
"time",
".",
"time",
"(",
")",
">=",
"max_t",
":",
"raise",
"LockTimeout",
"(",
"\"Timeout waiting to acquire lock for %s\"",
"%",
"(",
"path",
",",
")",
")",
"# same exception messages as in lockfile",
"try",
":",
"lock",
".",
"acquire",
"(",
"timeout",
"=",
"0",
")",
"except",
"AlreadyLocked",
":",
"sleep",
"(",
"maxdelay",
")",
"else",
":",
"try",
":",
"yield",
"lock",
"break",
"finally",
":",
"lock",
".",
"release",
"(",
")"
] | Cooperative file lock. Uses `lockfile.LockFile` polling under the hood.
`maxdelay` defines the interval between individual polls. | [
"Cooperative",
"file",
"lock",
".",
"Uses",
"lockfile",
".",
"LockFile",
"polling",
"under",
"the",
"hood",
"."
] | 06b00d6b86c7422c9cb8f9a4b2915906e92b7d52 | https://github.com/eallik/spinoff/blob/06b00d6b86c7422c9cb8f9a4b2915906e92b7d52/spinoff/util/lockfile.py#L11-L31 |
247,971 | sveetch/py-css-styleguide | py_css_styleguide/parser.py | TinycssSourceParser.digest_content | def digest_content(self, rule):
"""
Walk on rule content tokens to return a dict of properties.
This is pretty naive and will choke/fail on everything that is more
evolved than simple ``ident(string):value(string)``
Arguments:
rule (tinycss2.ast.QualifiedRule): Qualified rule object as
returned by tinycss2.
Returns:
dict: Dictionnary of retrieved variables and properties.
"""
data = OrderedDict()
current_key = None
for token in rule.content:
# Assume first identity token is the property name
if token.type == 'ident':
# Ignore starting '-' from css variables
name = token.value
if name.startswith('-'):
name = name[1:]
current_key = name
data[current_key] = None
# Assume first following string token is the property value.
if token.type == 'string':
data[current_key] = token.value
return data | python | def digest_content(self, rule):
"""
Walk on rule content tokens to return a dict of properties.
This is pretty naive and will choke/fail on everything that is more
evolved than simple ``ident(string):value(string)``
Arguments:
rule (tinycss2.ast.QualifiedRule): Qualified rule object as
returned by tinycss2.
Returns:
dict: Dictionnary of retrieved variables and properties.
"""
data = OrderedDict()
current_key = None
for token in rule.content:
# Assume first identity token is the property name
if token.type == 'ident':
# Ignore starting '-' from css variables
name = token.value
if name.startswith('-'):
name = name[1:]
current_key = name
data[current_key] = None
# Assume first following string token is the property value.
if token.type == 'string':
data[current_key] = token.value
return data | [
"def",
"digest_content",
"(",
"self",
",",
"rule",
")",
":",
"data",
"=",
"OrderedDict",
"(",
")",
"current_key",
"=",
"None",
"for",
"token",
"in",
"rule",
".",
"content",
":",
"# Assume first identity token is the property name",
"if",
"token",
".",
"type",
"==",
"'ident'",
":",
"# Ignore starting '-' from css variables",
"name",
"=",
"token",
".",
"value",
"if",
"name",
".",
"startswith",
"(",
"'-'",
")",
":",
"name",
"=",
"name",
"[",
"1",
":",
"]",
"current_key",
"=",
"name",
"data",
"[",
"current_key",
"]",
"=",
"None",
"# Assume first following string token is the property value.",
"if",
"token",
".",
"type",
"==",
"'string'",
":",
"data",
"[",
"current_key",
"]",
"=",
"token",
".",
"value",
"return",
"data"
] | Walk on rule content tokens to return a dict of properties.
This is pretty naive and will choke/fail on everything that is more
evolved than simple ``ident(string):value(string)``
Arguments:
rule (tinycss2.ast.QualifiedRule): Qualified rule object as
returned by tinycss2.
Returns:
dict: Dictionnary of retrieved variables and properties. | [
"Walk",
"on",
"rule",
"content",
"tokens",
"to",
"return",
"a",
"dict",
"of",
"properties",
"."
] | 5acc693f71b2fa7d944d7fed561ae0a7699ccd0f | https://github.com/sveetch/py-css-styleguide/blob/5acc693f71b2fa7d944d7fed561ae0a7699ccd0f/py_css_styleguide/parser.py#L54-L87 |
247,972 | sveetch/py-css-styleguide | py_css_styleguide/parser.py | TinycssSourceParser.consume | def consume(self, source):
"""
Parse source and consume tokens from tinycss2.
Arguments:
source (string): Source content to parse.
Returns:
dict: Retrieved rules.
"""
manifest = OrderedDict()
rules = parse_stylesheet(
source,
skip_comments=True,
skip_whitespace=True,
)
for rule in rules:
# Gather rule selector+properties
name = self.digest_prelude(rule)
# Ignore everything out of styleguide namespace
if not name.startswith(RULE_BASE_PREFIX):
continue
properties = self.digest_content(rule)
manifest[name] = properties
return manifest | python | def consume(self, source):
"""
Parse source and consume tokens from tinycss2.
Arguments:
source (string): Source content to parse.
Returns:
dict: Retrieved rules.
"""
manifest = OrderedDict()
rules = parse_stylesheet(
source,
skip_comments=True,
skip_whitespace=True,
)
for rule in rules:
# Gather rule selector+properties
name = self.digest_prelude(rule)
# Ignore everything out of styleguide namespace
if not name.startswith(RULE_BASE_PREFIX):
continue
properties = self.digest_content(rule)
manifest[name] = properties
return manifest | [
"def",
"consume",
"(",
"self",
",",
"source",
")",
":",
"manifest",
"=",
"OrderedDict",
"(",
")",
"rules",
"=",
"parse_stylesheet",
"(",
"source",
",",
"skip_comments",
"=",
"True",
",",
"skip_whitespace",
"=",
"True",
",",
")",
"for",
"rule",
"in",
"rules",
":",
"# Gather rule selector+properties",
"name",
"=",
"self",
".",
"digest_prelude",
"(",
"rule",
")",
"# Ignore everything out of styleguide namespace",
"if",
"not",
"name",
".",
"startswith",
"(",
"RULE_BASE_PREFIX",
")",
":",
"continue",
"properties",
"=",
"self",
".",
"digest_content",
"(",
"rule",
")",
"manifest",
"[",
"name",
"]",
"=",
"properties",
"return",
"manifest"
] | Parse source and consume tokens from tinycss2.
Arguments:
source (string): Source content to parse.
Returns:
dict: Retrieved rules. | [
"Parse",
"source",
"and",
"consume",
"tokens",
"from",
"tinycss2",
"."
] | 5acc693f71b2fa7d944d7fed561ae0a7699ccd0f | https://github.com/sveetch/py-css-styleguide/blob/5acc693f71b2fa7d944d7fed561ae0a7699ccd0f/py_css_styleguide/parser.py#L89-L118 |
247,973 | tBaxter/tango-comments | build/lib/tango_comments/views/comments.py | post_comment | def post_comment(request, next=None, using=None):
"""
Post a comment.
HTTP POST is required.
"""
# Fill out some initial data fields from an authenticated user, if present
data = request.POST.copy()
if request.user.is_authenticated:
data["user"] = request.user
else:
return CommentPostBadRequest("You must be logged in to comment")
# Look up the object we're trying to comment about
ctype = data.get("content_type")
object_pk = data.get("object_pk")
if ctype is None or object_pk is None:
return CommentPostBadRequest("Missing content_type or object_pk field.")
try:
model = models.get_model(*ctype.split(".", 1))
target = model._default_manager.using(using).get(pk=object_pk)
except TypeError:
return CommentPostBadRequest(
"Invalid content_type value: %r" % escape(ctype))
except AttributeError:
return CommentPostBadRequest(
"The given content-type %r does not resolve to a valid model." % \
escape(ctype))
except ObjectDoesNotExist:
return CommentPostBadRequest(
"No object matching content-type %r and object PK %r exists." % \
(escape(ctype), escape(object_pk)))
except (ValueError, ValidationError) as e:
return CommentPostBadRequest(
"Attempting go get content-type %r and object PK %r exists raised %s" % \
(escape(ctype), escape(object_pk), e.__class__.__name__))
# Construct the comment form
form = comments.get_form()(target, data=data)
# Check security information
if form.security_errors():
return CommentPostBadRequest(
"The comment form failed security verification: %s" % \
escape(str(form.security_errors())))
# Check for next
if not next:
next = data.get("next")
# If there are errors show the comment
if form.errors:
template_list = [
"comments/%s/%s/form.html" % (model._meta.app_label, model._meta.module_name),
"comments/%s/form.html" % model._meta.app_label,
"comments/form.html",
]
return render_to_response(
template_list, {
"comment": form.data.get("comment", ""),
"form": form,
"next": data.get("next", next),
},
RequestContext(request, {})
)
# Otherwise create the comment
comment = form.get_comment_object()
comment.ip_address = request.META.get("REMOTE_ADDR", None)
if request.user.is_authenticated:
comment.user = request.user
# Signal that the comment is about to be saved
responses = signals.comment_will_be_posted.send(
sender=comment.__class__,
comment=comment,
request=request
)
for (receiver, response) in responses:
if response == False:
return CommentPostBadRequest(
"comment_will_be_posted receiver %r killed the comment" % receiver.__name__)
# Save the comment and signal that it was saved
comment.save()
signals.comment_was_posted.send(
sender=comment.__class__,
comment=comment,
request=request
)
messages.success(request, 'Your comment was saved.')
return redirect(next) | python | def post_comment(request, next=None, using=None):
"""
Post a comment.
HTTP POST is required.
"""
# Fill out some initial data fields from an authenticated user, if present
data = request.POST.copy()
if request.user.is_authenticated:
data["user"] = request.user
else:
return CommentPostBadRequest("You must be logged in to comment")
# Look up the object we're trying to comment about
ctype = data.get("content_type")
object_pk = data.get("object_pk")
if ctype is None or object_pk is None:
return CommentPostBadRequest("Missing content_type or object_pk field.")
try:
model = models.get_model(*ctype.split(".", 1))
target = model._default_manager.using(using).get(pk=object_pk)
except TypeError:
return CommentPostBadRequest(
"Invalid content_type value: %r" % escape(ctype))
except AttributeError:
return CommentPostBadRequest(
"The given content-type %r does not resolve to a valid model." % \
escape(ctype))
except ObjectDoesNotExist:
return CommentPostBadRequest(
"No object matching content-type %r and object PK %r exists." % \
(escape(ctype), escape(object_pk)))
except (ValueError, ValidationError) as e:
return CommentPostBadRequest(
"Attempting go get content-type %r and object PK %r exists raised %s" % \
(escape(ctype), escape(object_pk), e.__class__.__name__))
# Construct the comment form
form = comments.get_form()(target, data=data)
# Check security information
if form.security_errors():
return CommentPostBadRequest(
"The comment form failed security verification: %s" % \
escape(str(form.security_errors())))
# Check for next
if not next:
next = data.get("next")
# If there are errors show the comment
if form.errors:
template_list = [
"comments/%s/%s/form.html" % (model._meta.app_label, model._meta.module_name),
"comments/%s/form.html" % model._meta.app_label,
"comments/form.html",
]
return render_to_response(
template_list, {
"comment": form.data.get("comment", ""),
"form": form,
"next": data.get("next", next),
},
RequestContext(request, {})
)
# Otherwise create the comment
comment = form.get_comment_object()
comment.ip_address = request.META.get("REMOTE_ADDR", None)
if request.user.is_authenticated:
comment.user = request.user
# Signal that the comment is about to be saved
responses = signals.comment_will_be_posted.send(
sender=comment.__class__,
comment=comment,
request=request
)
for (receiver, response) in responses:
if response == False:
return CommentPostBadRequest(
"comment_will_be_posted receiver %r killed the comment" % receiver.__name__)
# Save the comment and signal that it was saved
comment.save()
signals.comment_was_posted.send(
sender=comment.__class__,
comment=comment,
request=request
)
messages.success(request, 'Your comment was saved.')
return redirect(next) | [
"def",
"post_comment",
"(",
"request",
",",
"next",
"=",
"None",
",",
"using",
"=",
"None",
")",
":",
"# Fill out some initial data fields from an authenticated user, if present",
"data",
"=",
"request",
".",
"POST",
".",
"copy",
"(",
")",
"if",
"request",
".",
"user",
".",
"is_authenticated",
":",
"data",
"[",
"\"user\"",
"]",
"=",
"request",
".",
"user",
"else",
":",
"return",
"CommentPostBadRequest",
"(",
"\"You must be logged in to comment\"",
")",
"# Look up the object we're trying to comment about",
"ctype",
"=",
"data",
".",
"get",
"(",
"\"content_type\"",
")",
"object_pk",
"=",
"data",
".",
"get",
"(",
"\"object_pk\"",
")",
"if",
"ctype",
"is",
"None",
"or",
"object_pk",
"is",
"None",
":",
"return",
"CommentPostBadRequest",
"(",
"\"Missing content_type or object_pk field.\"",
")",
"try",
":",
"model",
"=",
"models",
".",
"get_model",
"(",
"*",
"ctype",
".",
"split",
"(",
"\".\"",
",",
"1",
")",
")",
"target",
"=",
"model",
".",
"_default_manager",
".",
"using",
"(",
"using",
")",
".",
"get",
"(",
"pk",
"=",
"object_pk",
")",
"except",
"TypeError",
":",
"return",
"CommentPostBadRequest",
"(",
"\"Invalid content_type value: %r\"",
"%",
"escape",
"(",
"ctype",
")",
")",
"except",
"AttributeError",
":",
"return",
"CommentPostBadRequest",
"(",
"\"The given content-type %r does not resolve to a valid model.\"",
"%",
"escape",
"(",
"ctype",
")",
")",
"except",
"ObjectDoesNotExist",
":",
"return",
"CommentPostBadRequest",
"(",
"\"No object matching content-type %r and object PK %r exists.\"",
"%",
"(",
"escape",
"(",
"ctype",
")",
",",
"escape",
"(",
"object_pk",
")",
")",
")",
"except",
"(",
"ValueError",
",",
"ValidationError",
")",
"as",
"e",
":",
"return",
"CommentPostBadRequest",
"(",
"\"Attempting go get content-type %r and object PK %r exists raised %s\"",
"%",
"(",
"escape",
"(",
"ctype",
")",
",",
"escape",
"(",
"object_pk",
")",
",",
"e",
".",
"__class__",
".",
"__name__",
")",
")",
"# Construct the comment form",
"form",
"=",
"comments",
".",
"get_form",
"(",
")",
"(",
"target",
",",
"data",
"=",
"data",
")",
"# Check security information",
"if",
"form",
".",
"security_errors",
"(",
")",
":",
"return",
"CommentPostBadRequest",
"(",
"\"The comment form failed security verification: %s\"",
"%",
"escape",
"(",
"str",
"(",
"form",
".",
"security_errors",
"(",
")",
")",
")",
")",
"# Check for next",
"if",
"not",
"next",
":",
"next",
"=",
"data",
".",
"get",
"(",
"\"next\"",
")",
"# If there are errors show the comment",
"if",
"form",
".",
"errors",
":",
"template_list",
"=",
"[",
"\"comments/%s/%s/form.html\"",
"%",
"(",
"model",
".",
"_meta",
".",
"app_label",
",",
"model",
".",
"_meta",
".",
"module_name",
")",
",",
"\"comments/%s/form.html\"",
"%",
"model",
".",
"_meta",
".",
"app_label",
",",
"\"comments/form.html\"",
",",
"]",
"return",
"render_to_response",
"(",
"template_list",
",",
"{",
"\"comment\"",
":",
"form",
".",
"data",
".",
"get",
"(",
"\"comment\"",
",",
"\"\"",
")",
",",
"\"form\"",
":",
"form",
",",
"\"next\"",
":",
"data",
".",
"get",
"(",
"\"next\"",
",",
"next",
")",
",",
"}",
",",
"RequestContext",
"(",
"request",
",",
"{",
"}",
")",
")",
"# Otherwise create the comment",
"comment",
"=",
"form",
".",
"get_comment_object",
"(",
")",
"comment",
".",
"ip_address",
"=",
"request",
".",
"META",
".",
"get",
"(",
"\"REMOTE_ADDR\"",
",",
"None",
")",
"if",
"request",
".",
"user",
".",
"is_authenticated",
":",
"comment",
".",
"user",
"=",
"request",
".",
"user",
"# Signal that the comment is about to be saved",
"responses",
"=",
"signals",
".",
"comment_will_be_posted",
".",
"send",
"(",
"sender",
"=",
"comment",
".",
"__class__",
",",
"comment",
"=",
"comment",
",",
"request",
"=",
"request",
")",
"for",
"(",
"receiver",
",",
"response",
")",
"in",
"responses",
":",
"if",
"response",
"==",
"False",
":",
"return",
"CommentPostBadRequest",
"(",
"\"comment_will_be_posted receiver %r killed the comment\"",
"%",
"receiver",
".",
"__name__",
")",
"# Save the comment and signal that it was saved",
"comment",
".",
"save",
"(",
")",
"signals",
".",
"comment_was_posted",
".",
"send",
"(",
"sender",
"=",
"comment",
".",
"__class__",
",",
"comment",
"=",
"comment",
",",
"request",
"=",
"request",
")",
"messages",
".",
"success",
"(",
"request",
",",
"'Your comment was saved.'",
")",
"return",
"redirect",
"(",
"next",
")"
] | Post a comment.
HTTP POST is required. | [
"Post",
"a",
"comment",
"."
] | 1fd335c6fc9e81bba158e42e1483f1a149622ab4 | https://github.com/tBaxter/tango-comments/blob/1fd335c6fc9e81bba158e42e1483f1a149622ab4/build/lib/tango_comments/views/comments.py#L35-L127 |
247,974 | dossier/dossier.web | dossier/web/search_engines.py | streaming_sample | def streaming_sample(seq, k, limit=None):
'''Streaming sample.
Iterate over seq (once!) keeping k random elements with uniform
distribution.
As a special case, if ``k`` is ``None``, then ``list(seq)`` is
returned.
:param seq: iterable of things to sample from
:param k: size of desired sample
:param limit: stop reading ``seq`` after considering this many
:return: list of elements from seq, length k (or less if seq is
short)
'''
if k is None:
return list(seq)
seq = iter(seq)
if limit is not None:
k = min(limit, k)
limit -= k
result = list(islice(seq, k))
for count, x in enumerate(islice(seq, limit), len(result)):
if rand.random() < (1.0 / count):
result[rand.randint(0, k-1)] = x
return result | python | def streaming_sample(seq, k, limit=None):
'''Streaming sample.
Iterate over seq (once!) keeping k random elements with uniform
distribution.
As a special case, if ``k`` is ``None``, then ``list(seq)`` is
returned.
:param seq: iterable of things to sample from
:param k: size of desired sample
:param limit: stop reading ``seq`` after considering this many
:return: list of elements from seq, length k (or less if seq is
short)
'''
if k is None:
return list(seq)
seq = iter(seq)
if limit is not None:
k = min(limit, k)
limit -= k
result = list(islice(seq, k))
for count, x in enumerate(islice(seq, limit), len(result)):
if rand.random() < (1.0 / count):
result[rand.randint(0, k-1)] = x
return result | [
"def",
"streaming_sample",
"(",
"seq",
",",
"k",
",",
"limit",
"=",
"None",
")",
":",
"if",
"k",
"is",
"None",
":",
"return",
"list",
"(",
"seq",
")",
"seq",
"=",
"iter",
"(",
"seq",
")",
"if",
"limit",
"is",
"not",
"None",
":",
"k",
"=",
"min",
"(",
"limit",
",",
"k",
")",
"limit",
"-=",
"k",
"result",
"=",
"list",
"(",
"islice",
"(",
"seq",
",",
"k",
")",
")",
"for",
"count",
",",
"x",
"in",
"enumerate",
"(",
"islice",
"(",
"seq",
",",
"limit",
")",
",",
"len",
"(",
"result",
")",
")",
":",
"if",
"rand",
".",
"random",
"(",
")",
"<",
"(",
"1.0",
"/",
"count",
")",
":",
"result",
"[",
"rand",
".",
"randint",
"(",
"0",
",",
"k",
"-",
"1",
")",
"]",
"=",
"x",
"return",
"result"
] | Streaming sample.
Iterate over seq (once!) keeping k random elements with uniform
distribution.
As a special case, if ``k`` is ``None``, then ``list(seq)`` is
returned.
:param seq: iterable of things to sample from
:param k: size of desired sample
:param limit: stop reading ``seq`` after considering this many
:return: list of elements from seq, length k (or less if seq is
short) | [
"Streaming",
"sample",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/search_engines.py#L102-L128 |
247,975 | minhhoit/yacms | yacms/accounts/__init__.py | get_profile_model | def get_profile_model():
"""
Returns the yacms profile model, defined in
``settings.ACCOUNTS_PROFILE_MODEL``, or ``None`` if no profile
model is configured.
"""
if not getattr(settings, "ACCOUNTS_PROFILE_MODEL", None):
raise ProfileNotConfigured
try:
return apps.get_model(settings.ACCOUNTS_PROFILE_MODEL)
except ValueError:
raise ImproperlyConfigured("ACCOUNTS_PROFILE_MODEL must be of "
"the form 'app_label.model_name'")
except LookupError:
raise ImproperlyConfigured("ACCOUNTS_PROFILE_MODEL refers to "
"model '%s' that has not been installed"
% settings.ACCOUNTS_PROFILE_MODEL) | python | def get_profile_model():
"""
Returns the yacms profile model, defined in
``settings.ACCOUNTS_PROFILE_MODEL``, or ``None`` if no profile
model is configured.
"""
if not getattr(settings, "ACCOUNTS_PROFILE_MODEL", None):
raise ProfileNotConfigured
try:
return apps.get_model(settings.ACCOUNTS_PROFILE_MODEL)
except ValueError:
raise ImproperlyConfigured("ACCOUNTS_PROFILE_MODEL must be of "
"the form 'app_label.model_name'")
except LookupError:
raise ImproperlyConfigured("ACCOUNTS_PROFILE_MODEL refers to "
"model '%s' that has not been installed"
% settings.ACCOUNTS_PROFILE_MODEL) | [
"def",
"get_profile_model",
"(",
")",
":",
"if",
"not",
"getattr",
"(",
"settings",
",",
"\"ACCOUNTS_PROFILE_MODEL\"",
",",
"None",
")",
":",
"raise",
"ProfileNotConfigured",
"try",
":",
"return",
"apps",
".",
"get_model",
"(",
"settings",
".",
"ACCOUNTS_PROFILE_MODEL",
")",
"except",
"ValueError",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"ACCOUNTS_PROFILE_MODEL must be of \"",
"\"the form 'app_label.model_name'\"",
")",
"except",
"LookupError",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"ACCOUNTS_PROFILE_MODEL refers to \"",
"\"model '%s' that has not been installed\"",
"%",
"settings",
".",
"ACCOUNTS_PROFILE_MODEL",
")"
] | Returns the yacms profile model, defined in
``settings.ACCOUNTS_PROFILE_MODEL``, or ``None`` if no profile
model is configured. | [
"Returns",
"the",
"yacms",
"profile",
"model",
"defined",
"in",
"settings",
".",
"ACCOUNTS_PROFILE_MODEL",
"or",
"None",
"if",
"no",
"profile",
"model",
"is",
"configured",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/accounts/__init__.py#L22-L40 |
247,976 | minhhoit/yacms | yacms/accounts/__init__.py | get_profile_for_user | def get_profile_for_user(user):
"""
Returns site-specific profile for this user. Raises
``ProfileNotConfigured`` if ``settings.ACCOUNTS_PROFILE_MODEL`` is not
set, and ``ImproperlyConfigured`` if the corresponding model can't
be found.
"""
if not hasattr(user, '_yacms_profile'):
# Raises ProfileNotConfigured if not bool(ACCOUNTS_PROFILE_MODEL)
profile_model = get_profile_model()
profile_manager = profile_model._default_manager.using(user._state.db)
user_field = get_profile_user_fieldname(profile_model, user.__class__)
profile, created = profile_manager.get_or_create(**{user_field: user})
profile.user = user
user._yacms_profile = profile
return user._yacms_profile | python | def get_profile_for_user(user):
"""
Returns site-specific profile for this user. Raises
``ProfileNotConfigured`` if ``settings.ACCOUNTS_PROFILE_MODEL`` is not
set, and ``ImproperlyConfigured`` if the corresponding model can't
be found.
"""
if not hasattr(user, '_yacms_profile'):
# Raises ProfileNotConfigured if not bool(ACCOUNTS_PROFILE_MODEL)
profile_model = get_profile_model()
profile_manager = profile_model._default_manager.using(user._state.db)
user_field = get_profile_user_fieldname(profile_model, user.__class__)
profile, created = profile_manager.get_or_create(**{user_field: user})
profile.user = user
user._yacms_profile = profile
return user._yacms_profile | [
"def",
"get_profile_for_user",
"(",
"user",
")",
":",
"if",
"not",
"hasattr",
"(",
"user",
",",
"'_yacms_profile'",
")",
":",
"# Raises ProfileNotConfigured if not bool(ACCOUNTS_PROFILE_MODEL)",
"profile_model",
"=",
"get_profile_model",
"(",
")",
"profile_manager",
"=",
"profile_model",
".",
"_default_manager",
".",
"using",
"(",
"user",
".",
"_state",
".",
"db",
")",
"user_field",
"=",
"get_profile_user_fieldname",
"(",
"profile_model",
",",
"user",
".",
"__class__",
")",
"profile",
",",
"created",
"=",
"profile_manager",
".",
"get_or_create",
"(",
"*",
"*",
"{",
"user_field",
":",
"user",
"}",
")",
"profile",
".",
"user",
"=",
"user",
"user",
".",
"_yacms_profile",
"=",
"profile",
"return",
"user",
".",
"_yacms_profile"
] | Returns site-specific profile for this user. Raises
``ProfileNotConfigured`` if ``settings.ACCOUNTS_PROFILE_MODEL`` is not
set, and ``ImproperlyConfigured`` if the corresponding model can't
be found. | [
"Returns",
"site",
"-",
"specific",
"profile",
"for",
"this",
"user",
".",
"Raises",
"ProfileNotConfigured",
"if",
"settings",
".",
"ACCOUNTS_PROFILE_MODEL",
"is",
"not",
"set",
"and",
"ImproperlyConfigured",
"if",
"the",
"corresponding",
"model",
"can",
"t",
"be",
"found",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/accounts/__init__.py#L43-L61 |
247,977 | minhhoit/yacms | yacms/accounts/__init__.py | get_profile_form | def get_profile_form():
"""
Returns the profile form defined by
``settings.ACCOUNTS_PROFILE_FORM_CLASS``.
"""
from yacms.conf import settings
try:
return import_dotted_path(settings.ACCOUNTS_PROFILE_FORM_CLASS)
except ImportError:
raise ImproperlyConfigured("Value for ACCOUNTS_PROFILE_FORM_CLASS "
"could not be imported: %s" %
settings.ACCOUNTS_PROFILE_FORM_CLASS) | python | def get_profile_form():
"""
Returns the profile form defined by
``settings.ACCOUNTS_PROFILE_FORM_CLASS``.
"""
from yacms.conf import settings
try:
return import_dotted_path(settings.ACCOUNTS_PROFILE_FORM_CLASS)
except ImportError:
raise ImproperlyConfigured("Value for ACCOUNTS_PROFILE_FORM_CLASS "
"could not be imported: %s" %
settings.ACCOUNTS_PROFILE_FORM_CLASS) | [
"def",
"get_profile_form",
"(",
")",
":",
"from",
"yacms",
".",
"conf",
"import",
"settings",
"try",
":",
"return",
"import_dotted_path",
"(",
"settings",
".",
"ACCOUNTS_PROFILE_FORM_CLASS",
")",
"except",
"ImportError",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"Value for ACCOUNTS_PROFILE_FORM_CLASS \"",
"\"could not be imported: %s\"",
"%",
"settings",
".",
"ACCOUNTS_PROFILE_FORM_CLASS",
")"
] | Returns the profile form defined by
``settings.ACCOUNTS_PROFILE_FORM_CLASS``. | [
"Returns",
"the",
"profile",
"form",
"defined",
"by",
"settings",
".",
"ACCOUNTS_PROFILE_FORM_CLASS",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/accounts/__init__.py#L64-L75 |
247,978 | minhhoit/yacms | yacms/accounts/__init__.py | get_profile_user_fieldname | def get_profile_user_fieldname(profile_model=None, user_model=None):
"""
Returns the name of the first field on the profile model that
points to the ``auth.User`` model.
"""
Profile = profile_model or get_profile_model()
User = user_model or get_user_model()
for field in Profile._meta.fields:
if field.rel and field.rel.to == User:
return field.name
raise ImproperlyConfigured("Value for ACCOUNTS_PROFILE_MODEL does not "
"contain a ForeignKey field for auth.User: %s"
% Profile.__name__) | python | def get_profile_user_fieldname(profile_model=None, user_model=None):
"""
Returns the name of the first field on the profile model that
points to the ``auth.User`` model.
"""
Profile = profile_model or get_profile_model()
User = user_model or get_user_model()
for field in Profile._meta.fields:
if field.rel and field.rel.to == User:
return field.name
raise ImproperlyConfigured("Value for ACCOUNTS_PROFILE_MODEL does not "
"contain a ForeignKey field for auth.User: %s"
% Profile.__name__) | [
"def",
"get_profile_user_fieldname",
"(",
"profile_model",
"=",
"None",
",",
"user_model",
"=",
"None",
")",
":",
"Profile",
"=",
"profile_model",
"or",
"get_profile_model",
"(",
")",
"User",
"=",
"user_model",
"or",
"get_user_model",
"(",
")",
"for",
"field",
"in",
"Profile",
".",
"_meta",
".",
"fields",
":",
"if",
"field",
".",
"rel",
"and",
"field",
".",
"rel",
".",
"to",
"==",
"User",
":",
"return",
"field",
".",
"name",
"raise",
"ImproperlyConfigured",
"(",
"\"Value for ACCOUNTS_PROFILE_MODEL does not \"",
"\"contain a ForeignKey field for auth.User: %s\"",
"%",
"Profile",
".",
"__name__",
")"
] | Returns the name of the first field on the profile model that
points to the ``auth.User`` model. | [
"Returns",
"the",
"name",
"of",
"the",
"first",
"field",
"on",
"the",
"profile",
"model",
"that",
"points",
"to",
"the",
"auth",
".",
"User",
"model",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/accounts/__init__.py#L78-L90 |
247,979 | ttinies/sc2common | sc2common/containers.py | RestrictedType.gameValue | def gameValue(self):
"""identify the correpsonding internal SC2 game value for self.type's value"""
allowed = type(self).ALLOWED_TYPES
try:
if isinstance(allowed, dict): # if ALLOWED_TYPES is not a dict, there is no-internal game value mapping defined
return allowed.get(self.type.name)
except: pass # None .type values are okay -- such result in a None gameValue() result
return None | python | def gameValue(self):
"""identify the correpsonding internal SC2 game value for self.type's value"""
allowed = type(self).ALLOWED_TYPES
try:
if isinstance(allowed, dict): # if ALLOWED_TYPES is not a dict, there is no-internal game value mapping defined
return allowed.get(self.type.name)
except: pass # None .type values are okay -- such result in a None gameValue() result
return None | [
"def",
"gameValue",
"(",
"self",
")",
":",
"allowed",
"=",
"type",
"(",
"self",
")",
".",
"ALLOWED_TYPES",
"try",
":",
"if",
"isinstance",
"(",
"allowed",
",",
"dict",
")",
":",
"# if ALLOWED_TYPES is not a dict, there is no-internal game value mapping defined",
"return",
"allowed",
".",
"get",
"(",
"self",
".",
"type",
".",
"name",
")",
"except",
":",
"pass",
"# None .type values are okay -- such result in a None gameValue() result",
"return",
"None"
] | identify the correpsonding internal SC2 game value for self.type's value | [
"identify",
"the",
"correpsonding",
"internal",
"SC2",
"game",
"value",
"for",
"self",
".",
"type",
"s",
"value"
] | 469623c319c7ab7af799551055839ea3b3f87d54 | https://github.com/ttinies/sc2common/blob/469623c319c7ab7af799551055839ea3b3f87d54/sc2common/containers.py#L86-L93 |
247,980 | ttinies/sc2common | sc2common/containers.py | MapPoint.direct2dDistance | def direct2dDistance(self, point):
"""consider the distance between two mapPoints, ignoring all terrain, pathing issues"""
if not isinstance(point, MapPoint): return 0.0
return ((self.x-point.x)**2 + (self.y-point.y)**2)**(0.5) # simple distance formula | python | def direct2dDistance(self, point):
"""consider the distance between two mapPoints, ignoring all terrain, pathing issues"""
if not isinstance(point, MapPoint): return 0.0
return ((self.x-point.x)**2 + (self.y-point.y)**2)**(0.5) # simple distance formula | [
"def",
"direct2dDistance",
"(",
"self",
",",
"point",
")",
":",
"if",
"not",
"isinstance",
"(",
"point",
",",
"MapPoint",
")",
":",
"return",
"0.0",
"return",
"(",
"(",
"self",
".",
"x",
"-",
"point",
".",
"x",
")",
"**",
"2",
"+",
"(",
"self",
".",
"y",
"-",
"point",
".",
"y",
")",
"**",
"2",
")",
"**",
"(",
"0.5",
")",
"# simple distance formula"
] | consider the distance between two mapPoints, ignoring all terrain, pathing issues | [
"consider",
"the",
"distance",
"between",
"two",
"mapPoints",
"ignoring",
"all",
"terrain",
"pathing",
"issues"
] | 469623c319c7ab7af799551055839ea3b3f87d54 | https://github.com/ttinies/sc2common/blob/469623c319c7ab7af799551055839ea3b3f87d54/sc2common/containers.py#L241-L244 |
247,981 | ttinies/sc2common | sc2common/containers.py | MapPoint.midPoint | def midPoint(self, point):
"""identify the midpoint between two mapPoints"""
x = (self.x + point.x)/2.0
y = (self.y + point.y)/2.0
z = (self.z + point.z)/2.0
return MapPoint(x,y,z) | python | def midPoint(self, point):
"""identify the midpoint between two mapPoints"""
x = (self.x + point.x)/2.0
y = (self.y + point.y)/2.0
z = (self.z + point.z)/2.0
return MapPoint(x,y,z) | [
"def",
"midPoint",
"(",
"self",
",",
"point",
")",
":",
"x",
"=",
"(",
"self",
".",
"x",
"+",
"point",
".",
"x",
")",
"/",
"2.0",
"y",
"=",
"(",
"self",
".",
"y",
"+",
"point",
".",
"y",
")",
"/",
"2.0",
"z",
"=",
"(",
"self",
".",
"z",
"+",
"point",
".",
"z",
")",
"/",
"2.0",
"return",
"MapPoint",
"(",
"x",
",",
"y",
",",
"z",
")"
] | identify the midpoint between two mapPoints | [
"identify",
"the",
"midpoint",
"between",
"two",
"mapPoints"
] | 469623c319c7ab7af799551055839ea3b3f87d54 | https://github.com/ttinies/sc2common/blob/469623c319c7ab7af799551055839ea3b3f87d54/sc2common/containers.py#L246-L251 |
247,982 | fotonauts/fwissr-python | fwissr/conf.py | merge_conf | def merge_conf(to_hash, other_hash, path=[]):
"merges other_hash into to_hash"
for key in other_hash:
if (key in to_hash and isinstance(to_hash[key], dict)
and isinstance(other_hash[key], dict)):
merge_conf(to_hash[key], other_hash[key], path + [str(key)])
else:
to_hash[key] = other_hash[key]
return to_hash | python | def merge_conf(to_hash, other_hash, path=[]):
"merges other_hash into to_hash"
for key in other_hash:
if (key in to_hash and isinstance(to_hash[key], dict)
and isinstance(other_hash[key], dict)):
merge_conf(to_hash[key], other_hash[key], path + [str(key)])
else:
to_hash[key] = other_hash[key]
return to_hash | [
"def",
"merge_conf",
"(",
"to_hash",
",",
"other_hash",
",",
"path",
"=",
"[",
"]",
")",
":",
"for",
"key",
"in",
"other_hash",
":",
"if",
"(",
"key",
"in",
"to_hash",
"and",
"isinstance",
"(",
"to_hash",
"[",
"key",
"]",
",",
"dict",
")",
"and",
"isinstance",
"(",
"other_hash",
"[",
"key",
"]",
",",
"dict",
")",
")",
":",
"merge_conf",
"(",
"to_hash",
"[",
"key",
"]",
",",
"other_hash",
"[",
"key",
"]",
",",
"path",
"+",
"[",
"str",
"(",
"key",
")",
"]",
")",
"else",
":",
"to_hash",
"[",
"key",
"]",
"=",
"other_hash",
"[",
"key",
"]",
"return",
"to_hash"
] | merges other_hash into to_hash | [
"merges",
"other_hash",
"into",
"to_hash"
] | 4314aa53ca45b4534cd312f6343a88596b4416d4 | https://github.com/fotonauts/fwissr-python/blob/4314aa53ca45b4534cd312f6343a88596b4416d4/fwissr/conf.py#L7-L15 |
247,983 | hmartiniano/faz | faz/task.py | Task.check_inputs | def check_inputs(self):
""" Check for the existence of input files """
self.inputs = self.expand_filenames(self.inputs)
result = False
if len(self.inputs) == 0 or self.files_exist(self.inputs):
result = True
else:
print("Not executing task. Input file(s) do not exist.")
return result | python | def check_inputs(self):
""" Check for the existence of input files """
self.inputs = self.expand_filenames(self.inputs)
result = False
if len(self.inputs) == 0 or self.files_exist(self.inputs):
result = True
else:
print("Not executing task. Input file(s) do not exist.")
return result | [
"def",
"check_inputs",
"(",
"self",
")",
":",
"self",
".",
"inputs",
"=",
"self",
".",
"expand_filenames",
"(",
"self",
".",
"inputs",
")",
"result",
"=",
"False",
"if",
"len",
"(",
"self",
".",
"inputs",
")",
"==",
"0",
"or",
"self",
".",
"files_exist",
"(",
"self",
".",
"inputs",
")",
":",
"result",
"=",
"True",
"else",
":",
"print",
"(",
"\"Not executing task. Input file(s) do not exist.\"",
")",
"return",
"result"
] | Check for the existence of input files | [
"Check",
"for",
"the",
"existence",
"of",
"input",
"files"
] | 36a58c45e8c0718d38cb3c533542c8743e7e7a65 | https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/task.py#L51-L59 |
247,984 | hmartiniano/faz | faz/task.py | Task.check_outputs | def check_outputs(self):
""" Check for the existence of output files """
self.outputs = self.expand_filenames(self.outputs)
result = False
if self.files_exist(self.outputs):
if self.dependencies_are_newer(self.outputs, self.inputs):
result = True
print("Dependencies are newer than outputs.")
print("Running task.")
elif self.force:
print("Dependencies are older than inputs, but 'force' option present.")
print("Running task.")
result = True
else:
print("Dependencies are older than inputs.")
else:
print("No ouput file(s).")
print("Running task.")
result = True
return result | python | def check_outputs(self):
""" Check for the existence of output files """
self.outputs = self.expand_filenames(self.outputs)
result = False
if self.files_exist(self.outputs):
if self.dependencies_are_newer(self.outputs, self.inputs):
result = True
print("Dependencies are newer than outputs.")
print("Running task.")
elif self.force:
print("Dependencies are older than inputs, but 'force' option present.")
print("Running task.")
result = True
else:
print("Dependencies are older than inputs.")
else:
print("No ouput file(s).")
print("Running task.")
result = True
return result | [
"def",
"check_outputs",
"(",
"self",
")",
":",
"self",
".",
"outputs",
"=",
"self",
".",
"expand_filenames",
"(",
"self",
".",
"outputs",
")",
"result",
"=",
"False",
"if",
"self",
".",
"files_exist",
"(",
"self",
".",
"outputs",
")",
":",
"if",
"self",
".",
"dependencies_are_newer",
"(",
"self",
".",
"outputs",
",",
"self",
".",
"inputs",
")",
":",
"result",
"=",
"True",
"print",
"(",
"\"Dependencies are newer than outputs.\"",
")",
"print",
"(",
"\"Running task.\"",
")",
"elif",
"self",
".",
"force",
":",
"print",
"(",
"\"Dependencies are older than inputs, but 'force' option present.\"",
")",
"print",
"(",
"\"Running task.\"",
")",
"result",
"=",
"True",
"else",
":",
"print",
"(",
"\"Dependencies are older than inputs.\"",
")",
"else",
":",
"print",
"(",
"\"No ouput file(s).\"",
")",
"print",
"(",
"\"Running task.\"",
")",
"result",
"=",
"True",
"return",
"result"
] | Check for the existence of output files | [
"Check",
"for",
"the",
"existence",
"of",
"output",
"files"
] | 36a58c45e8c0718d38cb3c533542c8743e7e7a65 | https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/task.py#L61-L80 |
247,985 | hmartiniano/faz | faz/task.py | Task.expand_filenames | def expand_filenames(self, filenames):
"""
Expand a list of filenames using environment variables,
followed by expansion of shell-style wildcards.
"""
results = []
for filename in filenames:
result = filename
if "$" in filename:
template = Template(filename)
result = template.substitute(**self.environment)
logging.debug(
"Expanding {} to {}.".format(filename, result))
if any([pattern in result for pattern in "*[]?"]):
expanded = glob.glob(result)
if len(expanded) > 0:
result = expanded
else:
result = "NONEXISTENT"
if isinstance(result, list):
results.extend(result)
else:
results.append(result)
return sorted(list(set(results))) | python | def expand_filenames(self, filenames):
"""
Expand a list of filenames using environment variables,
followed by expansion of shell-style wildcards.
"""
results = []
for filename in filenames:
result = filename
if "$" in filename:
template = Template(filename)
result = template.substitute(**self.environment)
logging.debug(
"Expanding {} to {}.".format(filename, result))
if any([pattern in result for pattern in "*[]?"]):
expanded = glob.glob(result)
if len(expanded) > 0:
result = expanded
else:
result = "NONEXISTENT"
if isinstance(result, list):
results.extend(result)
else:
results.append(result)
return sorted(list(set(results))) | [
"def",
"expand_filenames",
"(",
"self",
",",
"filenames",
")",
":",
"results",
"=",
"[",
"]",
"for",
"filename",
"in",
"filenames",
":",
"result",
"=",
"filename",
"if",
"\"$\"",
"in",
"filename",
":",
"template",
"=",
"Template",
"(",
"filename",
")",
"result",
"=",
"template",
".",
"substitute",
"(",
"*",
"*",
"self",
".",
"environment",
")",
"logging",
".",
"debug",
"(",
"\"Expanding {} to {}.\"",
".",
"format",
"(",
"filename",
",",
"result",
")",
")",
"if",
"any",
"(",
"[",
"pattern",
"in",
"result",
"for",
"pattern",
"in",
"\"*[]?\"",
"]",
")",
":",
"expanded",
"=",
"glob",
".",
"glob",
"(",
"result",
")",
"if",
"len",
"(",
"expanded",
")",
">",
"0",
":",
"result",
"=",
"expanded",
"else",
":",
"result",
"=",
"\"NONEXISTENT\"",
"if",
"isinstance",
"(",
"result",
",",
"list",
")",
":",
"results",
".",
"extend",
"(",
"result",
")",
"else",
":",
"results",
".",
"append",
"(",
"result",
")",
"return",
"sorted",
"(",
"list",
"(",
"set",
"(",
"results",
")",
")",
")"
] | Expand a list of filenames using environment variables,
followed by expansion of shell-style wildcards. | [
"Expand",
"a",
"list",
"of",
"filenames",
"using",
"environment",
"variables",
"followed",
"by",
"expansion",
"of",
"shell",
"-",
"style",
"wildcards",
"."
] | 36a58c45e8c0718d38cb3c533542c8743e7e7a65 | https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/task.py#L103-L126 |
247,986 | hmartiniano/faz | faz/task.py | Task.files_exist | def files_exist(self, filenames):
""" Check if all files in a given list exist. """
return all([os.path.exists(os.path.abspath(filename)) and os.path.isfile(os.path.abspath(filename))
for filename in filenames]) | python | def files_exist(self, filenames):
""" Check if all files in a given list exist. """
return all([os.path.exists(os.path.abspath(filename)) and os.path.isfile(os.path.abspath(filename))
for filename in filenames]) | [
"def",
"files_exist",
"(",
"self",
",",
"filenames",
")",
":",
"return",
"all",
"(",
"[",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"filename",
")",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"filename",
")",
")",
"for",
"filename",
"in",
"filenames",
"]",
")"
] | Check if all files in a given list exist. | [
"Check",
"if",
"all",
"files",
"in",
"a",
"given",
"list",
"exist",
"."
] | 36a58c45e8c0718d38cb3c533542c8743e7e7a65 | https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/task.py#L128-L131 |
247,987 | hmartiniano/faz | faz/task.py | Task.dependencies_are_newer | def dependencies_are_newer(self, files, dependencies):
"""
For two lists of files, check if any file in the
second list is newer than any file of the first.
"""
dependency_mtimes = [
os.path.getmtime(filename) for filename in dependencies]
file_mtimes = [os.path.getmtime(filename) for filename in files]
result = False
for file_mtime in file_mtimes:
for dependency_mtime in dependency_mtimes:
if dependency_mtime > file_mtime:
result = True
return result | python | def dependencies_are_newer(self, files, dependencies):
"""
For two lists of files, check if any file in the
second list is newer than any file of the first.
"""
dependency_mtimes = [
os.path.getmtime(filename) for filename in dependencies]
file_mtimes = [os.path.getmtime(filename) for filename in files]
result = False
for file_mtime in file_mtimes:
for dependency_mtime in dependency_mtimes:
if dependency_mtime > file_mtime:
result = True
return result | [
"def",
"dependencies_are_newer",
"(",
"self",
",",
"files",
",",
"dependencies",
")",
":",
"dependency_mtimes",
"=",
"[",
"os",
".",
"path",
".",
"getmtime",
"(",
"filename",
")",
"for",
"filename",
"in",
"dependencies",
"]",
"file_mtimes",
"=",
"[",
"os",
".",
"path",
".",
"getmtime",
"(",
"filename",
")",
"for",
"filename",
"in",
"files",
"]",
"result",
"=",
"False",
"for",
"file_mtime",
"in",
"file_mtimes",
":",
"for",
"dependency_mtime",
"in",
"dependency_mtimes",
":",
"if",
"dependency_mtime",
">",
"file_mtime",
":",
"result",
"=",
"True",
"return",
"result"
] | For two lists of files, check if any file in the
second list is newer than any file of the first. | [
"For",
"two",
"lists",
"of",
"files",
"check",
"if",
"any",
"file",
"in",
"the",
"second",
"list",
"is",
"newer",
"than",
"any",
"file",
"of",
"the",
"first",
"."
] | 36a58c45e8c0718d38cb3c533542c8743e7e7a65 | https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/task.py#L133-L146 |
247,988 | hmartiniano/faz | faz/task.py | Task.mktemp_file | def mktemp_file(self):
""" Create a temporary file in the '.faz' directory for
the code to feed to the interpreter. """
if not(os.path.exists(self.__dirname)):
logging.debug("Creating directory {}".format(self.__dirname))
os.mkdir(self.__dirname)
elif not(os.path.isdir(self.__dirname)):
raise TempDirIsFileException(
"There is a file called %s in this directory!!!" %
self.__dirname)
#self.fdesc, self.fname = tempfile.mkstemp(dir=self.__dirname, text=True)
self.f = tempfile.NamedTemporaryFile(dir=self.__dirname, delete=False, mode="wt")
logging.debug("Creating file {}".format(self.f.name)) | python | def mktemp_file(self):
""" Create a temporary file in the '.faz' directory for
the code to feed to the interpreter. """
if not(os.path.exists(self.__dirname)):
logging.debug("Creating directory {}".format(self.__dirname))
os.mkdir(self.__dirname)
elif not(os.path.isdir(self.__dirname)):
raise TempDirIsFileException(
"There is a file called %s in this directory!!!" %
self.__dirname)
#self.fdesc, self.fname = tempfile.mkstemp(dir=self.__dirname, text=True)
self.f = tempfile.NamedTemporaryFile(dir=self.__dirname, delete=False, mode="wt")
logging.debug("Creating file {}".format(self.f.name)) | [
"def",
"mktemp_file",
"(",
"self",
")",
":",
"if",
"not",
"(",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"__dirname",
")",
")",
":",
"logging",
".",
"debug",
"(",
"\"Creating directory {}\"",
".",
"format",
"(",
"self",
".",
"__dirname",
")",
")",
"os",
".",
"mkdir",
"(",
"self",
".",
"__dirname",
")",
"elif",
"not",
"(",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"__dirname",
")",
")",
":",
"raise",
"TempDirIsFileException",
"(",
"\"There is a file called %s in this directory!!!\"",
"%",
"self",
".",
"__dirname",
")",
"#self.fdesc, self.fname = tempfile.mkstemp(dir=self.__dirname, text=True)",
"self",
".",
"f",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"dir",
"=",
"self",
".",
"__dirname",
",",
"delete",
"=",
"False",
",",
"mode",
"=",
"\"wt\"",
")",
"logging",
".",
"debug",
"(",
"\"Creating file {}\"",
".",
"format",
"(",
"self",
".",
"f",
".",
"name",
")",
")"
] | Create a temporary file in the '.faz' directory for
the code to feed to the interpreter. | [
"Create",
"a",
"temporary",
"file",
"in",
"the",
".",
"faz",
"directory",
"for",
"the",
"code",
"to",
"feed",
"to",
"the",
"interpreter",
"."
] | 36a58c45e8c0718d38cb3c533542c8743e7e7a65 | https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/task.py#L182-L194 |
247,989 | edeposit/edeposit.amqp.harvester | src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py | _get_max_page | def _get_max_page(dom):
"""
Try to guess how much pages are in book listing.
Args:
dom (obj): HTMLElement container of the page with book list.
Returns:
int: Number of pages for given category.
"""
div = dom.find("div", {"class": "razeniKnihListovani"})
if not div:
return 1
# isolate only page numbers from links
links = div[0].find("a")
max_page = filter(
lambda x: "href" in x.params and "pageindex=" in x.params["href"],
links
)
max_page = map(
lambda x: x.params["href"].split("pageindex=")[-1],
max_page
)
max_page = filter(lambda x: x.isdigit(), max_page)
max_page = map(lambda x: int(x), max_page)
if not max_page:
return 1
return max(max_page) | python | def _get_max_page(dom):
"""
Try to guess how much pages are in book listing.
Args:
dom (obj): HTMLElement container of the page with book list.
Returns:
int: Number of pages for given category.
"""
div = dom.find("div", {"class": "razeniKnihListovani"})
if not div:
return 1
# isolate only page numbers from links
links = div[0].find("a")
max_page = filter(
lambda x: "href" in x.params and "pageindex=" in x.params["href"],
links
)
max_page = map(
lambda x: x.params["href"].split("pageindex=")[-1],
max_page
)
max_page = filter(lambda x: x.isdigit(), max_page)
max_page = map(lambda x: int(x), max_page)
if not max_page:
return 1
return max(max_page) | [
"def",
"_get_max_page",
"(",
"dom",
")",
":",
"div",
"=",
"dom",
".",
"find",
"(",
"\"div\"",
",",
"{",
"\"class\"",
":",
"\"razeniKnihListovani\"",
"}",
")",
"if",
"not",
"div",
":",
"return",
"1",
"# isolate only page numbers from links",
"links",
"=",
"div",
"[",
"0",
"]",
".",
"find",
"(",
"\"a\"",
")",
"max_page",
"=",
"filter",
"(",
"lambda",
"x",
":",
"\"href\"",
"in",
"x",
".",
"params",
"and",
"\"pageindex=\"",
"in",
"x",
".",
"params",
"[",
"\"href\"",
"]",
",",
"links",
")",
"max_page",
"=",
"map",
"(",
"lambda",
"x",
":",
"x",
".",
"params",
"[",
"\"href\"",
"]",
".",
"split",
"(",
"\"pageindex=\"",
")",
"[",
"-",
"1",
"]",
",",
"max_page",
")",
"max_page",
"=",
"filter",
"(",
"lambda",
"x",
":",
"x",
".",
"isdigit",
"(",
")",
",",
"max_page",
")",
"max_page",
"=",
"map",
"(",
"lambda",
"x",
":",
"int",
"(",
"x",
")",
",",
"max_page",
")",
"if",
"not",
"max_page",
":",
"return",
"1",
"return",
"max",
"(",
"max_page",
")"
] | Try to guess how much pages are in book listing.
Args:
dom (obj): HTMLElement container of the page with book list.
Returns:
int: Number of pages for given category. | [
"Try",
"to",
"guess",
"how",
"much",
"pages",
"are",
"in",
"book",
"listing",
"."
] | 38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e | https://github.com/edeposit/edeposit.amqp.harvester/blob/38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e/src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py#L34-L65 |
247,990 | edeposit/edeposit.amqp.harvester | src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py | _parse_book_links | def _parse_book_links(dom):
"""
Parse links to the details about publications from page with book list.
Args:
dom (obj): HTMLElement container of the page with book list.
Returns:
list: List of strings / absolute links to book details.
"""
links = []
picker = lambda x: x.params.get("class", "").startswith("boxProKnihy")
for el in dom.find(None, fn=picker):
book_ref = el.find("a")
if not book_ref or "href" not in book_ref[0].params:
continue
links.append(book_ref[0].params["href"])
return links | python | def _parse_book_links(dom):
"""
Parse links to the details about publications from page with book list.
Args:
dom (obj): HTMLElement container of the page with book list.
Returns:
list: List of strings / absolute links to book details.
"""
links = []
picker = lambda x: x.params.get("class", "").startswith("boxProKnihy")
for el in dom.find(None, fn=picker):
book_ref = el.find("a")
if not book_ref or "href" not in book_ref[0].params:
continue
links.append(book_ref[0].params["href"])
return links | [
"def",
"_parse_book_links",
"(",
"dom",
")",
":",
"links",
"=",
"[",
"]",
"picker",
"=",
"lambda",
"x",
":",
"x",
".",
"params",
".",
"get",
"(",
"\"class\"",
",",
"\"\"",
")",
".",
"startswith",
"(",
"\"boxProKnihy\"",
")",
"for",
"el",
"in",
"dom",
".",
"find",
"(",
"None",
",",
"fn",
"=",
"picker",
")",
":",
"book_ref",
"=",
"el",
".",
"find",
"(",
"\"a\"",
")",
"if",
"not",
"book_ref",
"or",
"\"href\"",
"not",
"in",
"book_ref",
"[",
"0",
"]",
".",
"params",
":",
"continue",
"links",
".",
"append",
"(",
"book_ref",
"[",
"0",
"]",
".",
"params",
"[",
"\"href\"",
"]",
")",
"return",
"links"
] | Parse links to the details about publications from page with book list.
Args:
dom (obj): HTMLElement container of the page with book list.
Returns:
list: List of strings / absolute links to book details. | [
"Parse",
"links",
"to",
"the",
"details",
"about",
"publications",
"from",
"page",
"with",
"book",
"list",
"."
] | 38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e | https://github.com/edeposit/edeposit.amqp.harvester/blob/38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e/src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py#L68-L89 |
247,991 | edeposit/edeposit.amqp.harvester | src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py | get_book_links | def get_book_links(links):
"""
Go thru `links` to categories and return list to all publications in all
given categories.
Args:
links (list): List of strings (absolute links to categories).
Returns:
list: List of strings / absolute links to book details.
"""
book_links = []
for link in links:
data = DOWNER.download(link + "1")
dom = dhtmlparser.parseString(data)
book_links.extend(_parse_book_links(dom))
max_page = _get_max_page(dom)
if max_page == 1:
continue
for i in range(max_page - 1):
data = DOWNER.download(link + str(i + 2))
book_links.extend(
_parse_book_links(
dhtmlparser.parseString(data)
)
)
return book_links | python | def get_book_links(links):
"""
Go thru `links` to categories and return list to all publications in all
given categories.
Args:
links (list): List of strings (absolute links to categories).
Returns:
list: List of strings / absolute links to book details.
"""
book_links = []
for link in links:
data = DOWNER.download(link + "1")
dom = dhtmlparser.parseString(data)
book_links.extend(_parse_book_links(dom))
max_page = _get_max_page(dom)
if max_page == 1:
continue
for i in range(max_page - 1):
data = DOWNER.download(link + str(i + 2))
book_links.extend(
_parse_book_links(
dhtmlparser.parseString(data)
)
)
return book_links | [
"def",
"get_book_links",
"(",
"links",
")",
":",
"book_links",
"=",
"[",
"]",
"for",
"link",
"in",
"links",
":",
"data",
"=",
"DOWNER",
".",
"download",
"(",
"link",
"+",
"\"1\"",
")",
"dom",
"=",
"dhtmlparser",
".",
"parseString",
"(",
"data",
")",
"book_links",
".",
"extend",
"(",
"_parse_book_links",
"(",
"dom",
")",
")",
"max_page",
"=",
"_get_max_page",
"(",
"dom",
")",
"if",
"max_page",
"==",
"1",
":",
"continue",
"for",
"i",
"in",
"range",
"(",
"max_page",
"-",
"1",
")",
":",
"data",
"=",
"DOWNER",
".",
"download",
"(",
"link",
"+",
"str",
"(",
"i",
"+",
"2",
")",
")",
"book_links",
".",
"extend",
"(",
"_parse_book_links",
"(",
"dhtmlparser",
".",
"parseString",
"(",
"data",
")",
")",
")",
"return",
"book_links"
] | Go thru `links` to categories and return list to all publications in all
given categories.
Args:
links (list): List of strings (absolute links to categories).
Returns:
list: List of strings / absolute links to book details. | [
"Go",
"thru",
"links",
"to",
"categories",
"and",
"return",
"list",
"to",
"all",
"publications",
"in",
"all",
"given",
"categories",
"."
] | 38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e | https://github.com/edeposit/edeposit.amqp.harvester/blob/38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e/src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py#L92-L124 |
247,992 | edeposit/edeposit.amqp.harvester | src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py | _parse_authors | def _parse_authors(authors):
"""
Parse informations about authors of the book.
Args:
dom (obj): HTMLElement containing slice of the page with details.
Returns:
list: List of :class:`.Author` objects. Blank if no author \
found.
"""
link = authors.find("a")
link = link[0].params.get("href") if link else None
author_list = _strip_content(authors)
if "(" in author_list:
author_list = author_list.split("(")[0]
if not author_list.strip():
return []
return map(
lambda author: Author(author.strip(), link),
author_list.strip().split(",")
) | python | def _parse_authors(authors):
"""
Parse informations about authors of the book.
Args:
dom (obj): HTMLElement containing slice of the page with details.
Returns:
list: List of :class:`.Author` objects. Blank if no author \
found.
"""
link = authors.find("a")
link = link[0].params.get("href") if link else None
author_list = _strip_content(authors)
if "(" in author_list:
author_list = author_list.split("(")[0]
if not author_list.strip():
return []
return map(
lambda author: Author(author.strip(), link),
author_list.strip().split(",")
) | [
"def",
"_parse_authors",
"(",
"authors",
")",
":",
"link",
"=",
"authors",
".",
"find",
"(",
"\"a\"",
")",
"link",
"=",
"link",
"[",
"0",
"]",
".",
"params",
".",
"get",
"(",
"\"href\"",
")",
"if",
"link",
"else",
"None",
"author_list",
"=",
"_strip_content",
"(",
"authors",
")",
"if",
"\"(\"",
"in",
"author_list",
":",
"author_list",
"=",
"author_list",
".",
"split",
"(",
"\"(\"",
")",
"[",
"0",
"]",
"if",
"not",
"author_list",
".",
"strip",
"(",
")",
":",
"return",
"[",
"]",
"return",
"map",
"(",
"lambda",
"author",
":",
"Author",
"(",
"author",
".",
"strip",
"(",
")",
",",
"link",
")",
",",
"author_list",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\",\"",
")",
")"
] | Parse informations about authors of the book.
Args:
dom (obj): HTMLElement containing slice of the page with details.
Returns:
list: List of :class:`.Author` objects. Blank if no author \
found. | [
"Parse",
"informations",
"about",
"authors",
"of",
"the",
"book",
"."
] | 38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e | https://github.com/edeposit/edeposit.amqp.harvester/blob/38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e/src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py#L146-L171 |
247,993 | edeposit/edeposit.amqp.harvester | src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py | _process_book | def _process_book(link):
"""
Download and parse available informations about book from the publishers
webpages.
Args:
link (str): URL of the book at the publishers webpages.
Returns:
obj: :class:`.Publication` instance with book details.
"""
# download and parse book info
data = DOWNER.download(link)
dom = dhtmlparser.parseString(
utils.handle_encodnig(data)
)
dhtmlparser.makeDoubleLinked(dom)
# some books are without price in expected elements, this will try to get
# it from elsewhere
price = None
try:
price = _strip_content(zapi.get_price(dom))
except UserWarning:
price = dom.find("p", {"class": "vaseCena"})
if price:
price = price[0].getContent().replace(" ", " ")
price = filter(lambda x: x.isdigit(), price.strip())
if price:
price = price[0] + "kč"
else:
price = "-1"
else:
price = "-1"
# required informations
pub = Publication(
title=_strip_content(zapi.get_title(dom)),
authors=_parse_authors(zapi.get_author(dom)),
price=price,
publisher=_strip_content(zapi.get_publisher(dom))
)
# optional informations
pub.optionals.URL = link
pub.optionals.pages = _strip_content(zapi.get_pages(dom))
pub.optionals.pub_date = _strip_content(zapi.get_pub_date(dom))
pub.optionals.ISBN = _strip_content(zapi.get_ISBN(dom))
pub.optionals.binding = _strip_content(zapi.get_binding(dom))
# post checks
if pub.title.startswith("E-kniha:"):
pub.title = pub.title.replace("E-kniha:", "", 1).strip()
pub.optionals.is_ebook = True
if pub.optionals.ISBN:
if " " in pub.optionals.ISBN:
pub.optionals.ISBN = pub.optionals.ISBN.split(" ")[0]
if "(" in pub.optionals.ISBN:
pub.optionals.ISBN = pub.optionals.ISBN.split("(")[0]
return pub | python | def _process_book(link):
"""
Download and parse available informations about book from the publishers
webpages.
Args:
link (str): URL of the book at the publishers webpages.
Returns:
obj: :class:`.Publication` instance with book details.
"""
# download and parse book info
data = DOWNER.download(link)
dom = dhtmlparser.parseString(
utils.handle_encodnig(data)
)
dhtmlparser.makeDoubleLinked(dom)
# some books are without price in expected elements, this will try to get
# it from elsewhere
price = None
try:
price = _strip_content(zapi.get_price(dom))
except UserWarning:
price = dom.find("p", {"class": "vaseCena"})
if price:
price = price[0].getContent().replace(" ", " ")
price = filter(lambda x: x.isdigit(), price.strip())
if price:
price = price[0] + "kč"
else:
price = "-1"
else:
price = "-1"
# required informations
pub = Publication(
title=_strip_content(zapi.get_title(dom)),
authors=_parse_authors(zapi.get_author(dom)),
price=price,
publisher=_strip_content(zapi.get_publisher(dom))
)
# optional informations
pub.optionals.URL = link
pub.optionals.pages = _strip_content(zapi.get_pages(dom))
pub.optionals.pub_date = _strip_content(zapi.get_pub_date(dom))
pub.optionals.ISBN = _strip_content(zapi.get_ISBN(dom))
pub.optionals.binding = _strip_content(zapi.get_binding(dom))
# post checks
if pub.title.startswith("E-kniha:"):
pub.title = pub.title.replace("E-kniha:", "", 1).strip()
pub.optionals.is_ebook = True
if pub.optionals.ISBN:
if " " in pub.optionals.ISBN:
pub.optionals.ISBN = pub.optionals.ISBN.split(" ")[0]
if "(" in pub.optionals.ISBN:
pub.optionals.ISBN = pub.optionals.ISBN.split("(")[0]
return pub | [
"def",
"_process_book",
"(",
"link",
")",
":",
"# download and parse book info",
"data",
"=",
"DOWNER",
".",
"download",
"(",
"link",
")",
"dom",
"=",
"dhtmlparser",
".",
"parseString",
"(",
"utils",
".",
"handle_encodnig",
"(",
"data",
")",
")",
"dhtmlparser",
".",
"makeDoubleLinked",
"(",
"dom",
")",
"# some books are without price in expected elements, this will try to get",
"# it from elsewhere",
"price",
"=",
"None",
"try",
":",
"price",
"=",
"_strip_content",
"(",
"zapi",
".",
"get_price",
"(",
"dom",
")",
")",
"except",
"UserWarning",
":",
"price",
"=",
"dom",
".",
"find",
"(",
"\"p\"",
",",
"{",
"\"class\"",
":",
"\"vaseCena\"",
"}",
")",
"if",
"price",
":",
"price",
"=",
"price",
"[",
"0",
"]",
".",
"getContent",
"(",
")",
".",
"replace",
"(",
"\" \"",
",",
"\" \"",
")",
"price",
"=",
"filter",
"(",
"lambda",
"x",
":",
"x",
".",
"isdigit",
"(",
")",
",",
"price",
".",
"strip",
"(",
")",
")",
"if",
"price",
":",
"price",
"=",
"price",
"[",
"0",
"]",
"+",
"\"kč\"",
"else",
":",
"price",
"=",
"\"-1\"",
"else",
":",
"price",
"=",
"\"-1\"",
"# required informations",
"pub",
"=",
"Publication",
"(",
"title",
"=",
"_strip_content",
"(",
"zapi",
".",
"get_title",
"(",
"dom",
")",
")",
",",
"authors",
"=",
"_parse_authors",
"(",
"zapi",
".",
"get_author",
"(",
"dom",
")",
")",
",",
"price",
"=",
"price",
",",
"publisher",
"=",
"_strip_content",
"(",
"zapi",
".",
"get_publisher",
"(",
"dom",
")",
")",
")",
"# optional informations",
"pub",
".",
"optionals",
".",
"URL",
"=",
"link",
"pub",
".",
"optionals",
".",
"pages",
"=",
"_strip_content",
"(",
"zapi",
".",
"get_pages",
"(",
"dom",
")",
")",
"pub",
".",
"optionals",
".",
"pub_date",
"=",
"_strip_content",
"(",
"zapi",
".",
"get_pub_date",
"(",
"dom",
")",
")",
"pub",
".",
"optionals",
".",
"ISBN",
"=",
"_strip_content",
"(",
"zapi",
".",
"get_ISBN",
"(",
"dom",
")",
")",
"pub",
".",
"optionals",
".",
"binding",
"=",
"_strip_content",
"(",
"zapi",
".",
"get_binding",
"(",
"dom",
")",
")",
"# post checks",
"if",
"pub",
".",
"title",
".",
"startswith",
"(",
"\"E-kniha:\"",
")",
":",
"pub",
".",
"title",
"=",
"pub",
".",
"title",
".",
"replace",
"(",
"\"E-kniha:\"",
",",
"\"\"",
",",
"1",
")",
".",
"strip",
"(",
")",
"pub",
".",
"optionals",
".",
"is_ebook",
"=",
"True",
"if",
"pub",
".",
"optionals",
".",
"ISBN",
":",
"if",
"\" \"",
"in",
"pub",
".",
"optionals",
".",
"ISBN",
":",
"pub",
".",
"optionals",
".",
"ISBN",
"=",
"pub",
".",
"optionals",
".",
"ISBN",
".",
"split",
"(",
"\" \"",
")",
"[",
"0",
"]",
"if",
"\"(\"",
"in",
"pub",
".",
"optionals",
".",
"ISBN",
":",
"pub",
".",
"optionals",
".",
"ISBN",
"=",
"pub",
".",
"optionals",
".",
"ISBN",
".",
"split",
"(",
"\"(\"",
")",
"[",
"0",
"]",
"return",
"pub"
] | Download and parse available informations about book from the publishers
webpages.
Args:
link (str): URL of the book at the publishers webpages.
Returns:
obj: :class:`.Publication` instance with book details. | [
"Download",
"and",
"parse",
"available",
"informations",
"about",
"book",
"from",
"the",
"publishers",
"webpages",
"."
] | 38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e | https://github.com/edeposit/edeposit.amqp.harvester/blob/38cb87ccdf6bf2f550a98460d0a329c4b9dc8e2e/src/edeposit/amqp/harvester/scrappers/zonerpress_cz/__init__.py#L174-L238 |
247,994 | rorr73/LifeSOSpy | lifesospy/response.py | Response.parse | def parse(text) -> Optional['Response']:
"""Parse response into an instance of the appropriate child class."""
# Trim the start and end markers, and ensure only lowercase is used
if text.startswith(MARKER_START) and text.endswith(MARKER_END):
text = text[1:len(text)-1].lower()
# No-op; can just ignore these
if not text:
return None
if text.startswith(CMD_DATETIME):
return DateTimeResponse(text)
elif text.startswith(CMD_OPMODE):
return OpModeResponse(text)
elif text.startswith(CMD_DEVBYIDX_PREFIX):
if RESPONSE_ERROR == text[2:] or text[2:4] == '00':
return DeviceNotFoundResponse(text)
return DeviceInfoResponse(text)
elif text.startswith(CMD_DEVICE_PREFIX):
action = next((a for a in [ACTION_ADD, ACTION_DEL, ACTION_SET]
if a == text[2:3]), ACTION_NONE)
args = text[2+len(action):]
if RESPONSE_ERROR == args:
return DeviceNotFoundResponse(text)
elif action == ACTION_ADD:
if not args:
return DeviceAddingResponse(text)
return DeviceAddedResponse(text)
elif action == ACTION_SET:
return DeviceChangedResponse(text)
elif action == ACTION_DEL:
return DeviceDeletedResponse(text)
else:
return DeviceInfoResponse(text)
elif text.startswith(CMD_CLEAR_STATUS):
return ClearedStatusResponse(text)
elif text.startswith(CMD_ROMVER):
return ROMVersionResponse(text)
elif text.startswith(CMD_EXIT_DELAY):
return ExitDelayResponse(text)
elif text.startswith(CMD_ENTRY_DELAY):
return EntryDelayResponse(text)
elif text.startswith(CMD_SWITCH_PREFIX) and is_ascii_hex(text[1:2]):
return SwitchResponse(text)
elif text.startswith(CMD_EVENT_LOG):
if RESPONSE_ERROR == text[2:]:
return EventLogNotFoundResponse(text)
return EventLogResponse(text)
elif text.startswith(CMD_SENSOR_LOG):
if RESPONSE_ERROR == text[2:]:
return SensorLogNotFoundResponse(text)
return SensorLogResponse(text)
else:
raise ValueError("Response not recognised: " + text) | python | def parse(text) -> Optional['Response']:
"""Parse response into an instance of the appropriate child class."""
# Trim the start and end markers, and ensure only lowercase is used
if text.startswith(MARKER_START) and text.endswith(MARKER_END):
text = text[1:len(text)-1].lower()
# No-op; can just ignore these
if not text:
return None
if text.startswith(CMD_DATETIME):
return DateTimeResponse(text)
elif text.startswith(CMD_OPMODE):
return OpModeResponse(text)
elif text.startswith(CMD_DEVBYIDX_PREFIX):
if RESPONSE_ERROR == text[2:] or text[2:4] == '00':
return DeviceNotFoundResponse(text)
return DeviceInfoResponse(text)
elif text.startswith(CMD_DEVICE_PREFIX):
action = next((a for a in [ACTION_ADD, ACTION_DEL, ACTION_SET]
if a == text[2:3]), ACTION_NONE)
args = text[2+len(action):]
if RESPONSE_ERROR == args:
return DeviceNotFoundResponse(text)
elif action == ACTION_ADD:
if not args:
return DeviceAddingResponse(text)
return DeviceAddedResponse(text)
elif action == ACTION_SET:
return DeviceChangedResponse(text)
elif action == ACTION_DEL:
return DeviceDeletedResponse(text)
else:
return DeviceInfoResponse(text)
elif text.startswith(CMD_CLEAR_STATUS):
return ClearedStatusResponse(text)
elif text.startswith(CMD_ROMVER):
return ROMVersionResponse(text)
elif text.startswith(CMD_EXIT_DELAY):
return ExitDelayResponse(text)
elif text.startswith(CMD_ENTRY_DELAY):
return EntryDelayResponse(text)
elif text.startswith(CMD_SWITCH_PREFIX) and is_ascii_hex(text[1:2]):
return SwitchResponse(text)
elif text.startswith(CMD_EVENT_LOG):
if RESPONSE_ERROR == text[2:]:
return EventLogNotFoundResponse(text)
return EventLogResponse(text)
elif text.startswith(CMD_SENSOR_LOG):
if RESPONSE_ERROR == text[2:]:
return SensorLogNotFoundResponse(text)
return SensorLogResponse(text)
else:
raise ValueError("Response not recognised: " + text) | [
"def",
"parse",
"(",
"text",
")",
"->",
"Optional",
"[",
"'Response'",
"]",
":",
"# Trim the start and end markers, and ensure only lowercase is used",
"if",
"text",
".",
"startswith",
"(",
"MARKER_START",
")",
"and",
"text",
".",
"endswith",
"(",
"MARKER_END",
")",
":",
"text",
"=",
"text",
"[",
"1",
":",
"len",
"(",
"text",
")",
"-",
"1",
"]",
".",
"lower",
"(",
")",
"# No-op; can just ignore these",
"if",
"not",
"text",
":",
"return",
"None",
"if",
"text",
".",
"startswith",
"(",
"CMD_DATETIME",
")",
":",
"return",
"DateTimeResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_OPMODE",
")",
":",
"return",
"OpModeResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_DEVBYIDX_PREFIX",
")",
":",
"if",
"RESPONSE_ERROR",
"==",
"text",
"[",
"2",
":",
"]",
"or",
"text",
"[",
"2",
":",
"4",
"]",
"==",
"'00'",
":",
"return",
"DeviceNotFoundResponse",
"(",
"text",
")",
"return",
"DeviceInfoResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_DEVICE_PREFIX",
")",
":",
"action",
"=",
"next",
"(",
"(",
"a",
"for",
"a",
"in",
"[",
"ACTION_ADD",
",",
"ACTION_DEL",
",",
"ACTION_SET",
"]",
"if",
"a",
"==",
"text",
"[",
"2",
":",
"3",
"]",
")",
",",
"ACTION_NONE",
")",
"args",
"=",
"text",
"[",
"2",
"+",
"len",
"(",
"action",
")",
":",
"]",
"if",
"RESPONSE_ERROR",
"==",
"args",
":",
"return",
"DeviceNotFoundResponse",
"(",
"text",
")",
"elif",
"action",
"==",
"ACTION_ADD",
":",
"if",
"not",
"args",
":",
"return",
"DeviceAddingResponse",
"(",
"text",
")",
"return",
"DeviceAddedResponse",
"(",
"text",
")",
"elif",
"action",
"==",
"ACTION_SET",
":",
"return",
"DeviceChangedResponse",
"(",
"text",
")",
"elif",
"action",
"==",
"ACTION_DEL",
":",
"return",
"DeviceDeletedResponse",
"(",
"text",
")",
"else",
":",
"return",
"DeviceInfoResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_CLEAR_STATUS",
")",
":",
"return",
"ClearedStatusResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_ROMVER",
")",
":",
"return",
"ROMVersionResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_EXIT_DELAY",
")",
":",
"return",
"ExitDelayResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_ENTRY_DELAY",
")",
":",
"return",
"EntryDelayResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_SWITCH_PREFIX",
")",
"and",
"is_ascii_hex",
"(",
"text",
"[",
"1",
":",
"2",
"]",
")",
":",
"return",
"SwitchResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_EVENT_LOG",
")",
":",
"if",
"RESPONSE_ERROR",
"==",
"text",
"[",
"2",
":",
"]",
":",
"return",
"EventLogNotFoundResponse",
"(",
"text",
")",
"return",
"EventLogResponse",
"(",
"text",
")",
"elif",
"text",
".",
"startswith",
"(",
"CMD_SENSOR_LOG",
")",
":",
"if",
"RESPONSE_ERROR",
"==",
"text",
"[",
"2",
":",
"]",
":",
"return",
"SensorLogNotFoundResponse",
"(",
"text",
")",
"return",
"SensorLogResponse",
"(",
"text",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Response not recognised: \"",
"+",
"text",
")"
] | Parse response into an instance of the appropriate child class. | [
"Parse",
"response",
"into",
"an",
"instance",
"of",
"the",
"appropriate",
"child",
"class",
"."
] | 62360fbab2e90bf04d52b547093bdab2d4e389b4 | https://github.com/rorr73/LifeSOSpy/blob/62360fbab2e90bf04d52b547093bdab2d4e389b4/lifesospy/response.py#L41-L106 |
247,995 | rorr73/LifeSOSpy | lifesospy/response.py | DeviceInfoResponse.is_closed | def is_closed(self) -> Optional[bool]:
"""For Magnet Sensor; True if Closed, False if Open."""
if self._device_type is not None and self._device_type == DeviceType.DoorMagnet:
return bool(self._current_status & 0x01)
return None | python | def is_closed(self) -> Optional[bool]:
"""For Magnet Sensor; True if Closed, False if Open."""
if self._device_type is not None and self._device_type == DeviceType.DoorMagnet:
return bool(self._current_status & 0x01)
return None | [
"def",
"is_closed",
"(",
"self",
")",
"->",
"Optional",
"[",
"bool",
"]",
":",
"if",
"self",
".",
"_device_type",
"is",
"not",
"None",
"and",
"self",
".",
"_device_type",
"==",
"DeviceType",
".",
"DoorMagnet",
":",
"return",
"bool",
"(",
"self",
".",
"_current_status",
"&",
"0x01",
")",
"return",
"None"
] | For Magnet Sensor; True if Closed, False if Open. | [
"For",
"Magnet",
"Sensor",
";",
"True",
"if",
"Closed",
"False",
"if",
"Open",
"."
] | 62360fbab2e90bf04d52b547093bdab2d4e389b4 | https://github.com/rorr73/LifeSOSpy/blob/62360fbab2e90bf04d52b547093bdab2d4e389b4/lifesospy/response.py#L333-L337 |
247,996 | rorr73/LifeSOSpy | lifesospy/response.py | DeviceInfoResponse.rssi_bars | def rssi_bars(self) -> int:
"""Received Signal Strength Indication, from 0 to 4 bars."""
rssi_db = self.rssi_db
if rssi_db < 45:
return 0
elif rssi_db < 60:
return 1
elif rssi_db < 75:
return 2
elif rssi_db < 90:
return 3
return 4 | python | def rssi_bars(self) -> int:
"""Received Signal Strength Indication, from 0 to 4 bars."""
rssi_db = self.rssi_db
if rssi_db < 45:
return 0
elif rssi_db < 60:
return 1
elif rssi_db < 75:
return 2
elif rssi_db < 90:
return 3
return 4 | [
"def",
"rssi_bars",
"(",
"self",
")",
"->",
"int",
":",
"rssi_db",
"=",
"self",
".",
"rssi_db",
"if",
"rssi_db",
"<",
"45",
":",
"return",
"0",
"elif",
"rssi_db",
"<",
"60",
":",
"return",
"1",
"elif",
"rssi_db",
"<",
"75",
":",
"return",
"2",
"elif",
"rssi_db",
"<",
"90",
":",
"return",
"3",
"return",
"4"
] | Received Signal Strength Indication, from 0 to 4 bars. | [
"Received",
"Signal",
"Strength",
"Indication",
"from",
"0",
"to",
"4",
"bars",
"."
] | 62360fbab2e90bf04d52b547093bdab2d4e389b4 | https://github.com/rorr73/LifeSOSpy/blob/62360fbab2e90bf04d52b547093bdab2d4e389b4/lifesospy/response.py#L350-L361 |
247,997 | rorr73/LifeSOSpy | lifesospy/response.py | EventLogResponse.zone | def zone(self) -> Optional[str]:
"""Zone the device is assigned to."""
if self._device_category == DC_BASEUNIT:
return None
return '{:02x}-{:02x}'.format(self._group_number, self._unit_number) | python | def zone(self) -> Optional[str]:
"""Zone the device is assigned to."""
if self._device_category == DC_BASEUNIT:
return None
return '{:02x}-{:02x}'.format(self._group_number, self._unit_number) | [
"def",
"zone",
"(",
"self",
")",
"->",
"Optional",
"[",
"str",
"]",
":",
"if",
"self",
".",
"_device_category",
"==",
"DC_BASEUNIT",
":",
"return",
"None",
"return",
"'{:02x}-{:02x}'",
".",
"format",
"(",
"self",
".",
"_group_number",
",",
"self",
".",
"_unit_number",
")"
] | Zone the device is assigned to. | [
"Zone",
"the",
"device",
"is",
"assigned",
"to",
"."
] | 62360fbab2e90bf04d52b547093bdab2d4e389b4 | https://github.com/rorr73/LifeSOSpy/blob/62360fbab2e90bf04d52b547093bdab2d4e389b4/lifesospy/response.py#L926-L930 |
247,998 | ploneintranet/ploneintranet.workspace | src/ploneintranet/workspace/subscribers.py | workspace_state_changed | def workspace_state_changed(ob, event):
"""
when a workspace is made 'open', we need to
give all intranet users the 'Guest' role
equally, when the workspace is not open, we need
to remove the role again
"""
workspace = event.object
roles = ['Guest', ]
if event.new_state.id == 'open':
api.group.grant_roles(
groupname=INTRANET_USERS_GROUP_ID,
obj=workspace,
roles=roles,
)
workspace.reindexObjectSecurity()
elif event.old_state.id == 'open':
api.group.revoke_roles(
groupname=INTRANET_USERS_GROUP_ID,
obj=workspace,
roles=roles,
)
workspace.reindexObjectSecurity() | python | def workspace_state_changed(ob, event):
"""
when a workspace is made 'open', we need to
give all intranet users the 'Guest' role
equally, when the workspace is not open, we need
to remove the role again
"""
workspace = event.object
roles = ['Guest', ]
if event.new_state.id == 'open':
api.group.grant_roles(
groupname=INTRANET_USERS_GROUP_ID,
obj=workspace,
roles=roles,
)
workspace.reindexObjectSecurity()
elif event.old_state.id == 'open':
api.group.revoke_roles(
groupname=INTRANET_USERS_GROUP_ID,
obj=workspace,
roles=roles,
)
workspace.reindexObjectSecurity() | [
"def",
"workspace_state_changed",
"(",
"ob",
",",
"event",
")",
":",
"workspace",
"=",
"event",
".",
"object",
"roles",
"=",
"[",
"'Guest'",
",",
"]",
"if",
"event",
".",
"new_state",
".",
"id",
"==",
"'open'",
":",
"api",
".",
"group",
".",
"grant_roles",
"(",
"groupname",
"=",
"INTRANET_USERS_GROUP_ID",
",",
"obj",
"=",
"workspace",
",",
"roles",
"=",
"roles",
",",
")",
"workspace",
".",
"reindexObjectSecurity",
"(",
")",
"elif",
"event",
".",
"old_state",
".",
"id",
"==",
"'open'",
":",
"api",
".",
"group",
".",
"revoke_roles",
"(",
"groupname",
"=",
"INTRANET_USERS_GROUP_ID",
",",
"obj",
"=",
"workspace",
",",
"roles",
"=",
"roles",
",",
")",
"workspace",
".",
"reindexObjectSecurity",
"(",
")"
] | when a workspace is made 'open', we need to
give all intranet users the 'Guest' role
equally, when the workspace is not open, we need
to remove the role again | [
"when",
"a",
"workspace",
"is",
"made",
"open",
"we",
"need",
"to",
"give",
"all",
"intranet",
"users",
"the",
"Guest",
"role"
] | a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba | https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/subscribers.py#L13-L36 |
247,999 | ploneintranet/ploneintranet.workspace | src/ploneintranet/workspace/subscribers.py | workspace_added | def workspace_added(ob, event):
"""
when a workspace is created, we add the creator to
the admin group. We then setup our placeful workflow
"""
# Whoever creates the workspace should be added as an Admin
creator = ob.Creator()
IWorkspace(ob).add_to_team(
user=creator,
groups=set(['Admins']),
)
# Configure our placeful workflow
cmfpw = 'CMFPlacefulWorkflow'
ob.manage_addProduct[cmfpw].manage_addWorkflowPolicyConfig()
# Set the policy for the config
pc = getattr(ob, WorkflowPolicyConfig_id)
pc.setPolicyIn('')
pc.setPolicyBelow('ploneintranet_policy') | python | def workspace_added(ob, event):
"""
when a workspace is created, we add the creator to
the admin group. We then setup our placeful workflow
"""
# Whoever creates the workspace should be added as an Admin
creator = ob.Creator()
IWorkspace(ob).add_to_team(
user=creator,
groups=set(['Admins']),
)
# Configure our placeful workflow
cmfpw = 'CMFPlacefulWorkflow'
ob.manage_addProduct[cmfpw].manage_addWorkflowPolicyConfig()
# Set the policy for the config
pc = getattr(ob, WorkflowPolicyConfig_id)
pc.setPolicyIn('')
pc.setPolicyBelow('ploneintranet_policy') | [
"def",
"workspace_added",
"(",
"ob",
",",
"event",
")",
":",
"# Whoever creates the workspace should be added as an Admin",
"creator",
"=",
"ob",
".",
"Creator",
"(",
")",
"IWorkspace",
"(",
"ob",
")",
".",
"add_to_team",
"(",
"user",
"=",
"creator",
",",
"groups",
"=",
"set",
"(",
"[",
"'Admins'",
"]",
")",
",",
")",
"# Configure our placeful workflow",
"cmfpw",
"=",
"'CMFPlacefulWorkflow'",
"ob",
".",
"manage_addProduct",
"[",
"cmfpw",
"]",
".",
"manage_addWorkflowPolicyConfig",
"(",
")",
"# Set the policy for the config",
"pc",
"=",
"getattr",
"(",
"ob",
",",
"WorkflowPolicyConfig_id",
")",
"pc",
".",
"setPolicyIn",
"(",
"''",
")",
"pc",
".",
"setPolicyBelow",
"(",
"'ploneintranet_policy'",
")"
] | when a workspace is created, we add the creator to
the admin group. We then setup our placeful workflow | [
"when",
"a",
"workspace",
"is",
"created",
"we",
"add",
"the",
"creator",
"to",
"the",
"admin",
"group",
".",
"We",
"then",
"setup",
"our",
"placeful",
"workflow"
] | a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba | https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/subscribers.py#L39-L59 |
Subsets and Splits