repo
stringlengths 7
54
| path
stringlengths 4
192
| url
stringlengths 87
284
| code
stringlengths 78
104k
| code_tokens
list | docstring
stringlengths 1
46.9k
| docstring_tokens
list | language
stringclasses 1
value | partition
stringclasses 3
values |
---|---|---|---|---|---|---|---|---|
wandb/client
|
wandb/wandb_torch.py
|
https://github.com/wandb/client/blob/7d08954ed5674fee223cd85ed0d8518fe47266b2/wandb/wandb_torch.py#L78-L108
|
def add_log_hooks_to_pytorch_module(self, module, name=None, prefix='', log_parameters=True, log_gradients=True, log_freq=0):
""" This instuments hooks into the pytorch module
log_parameters - log parameters after a forward pass
log_gradients - log gradients after a backward pass
log_freq - log gradients/parameters every N batches
"""
if name is not None:
prefix = prefix + name
if log_parameters:
def parameter_log_hook(module, input_, output, log_track):
if not log_track_update(log_track):
return
for name, parameter in module.named_parameters():
# for pytorch 0.3 Variables
if isinstance(parameter, torch.autograd.Variable):
data = parameter.data
else:
data = parameter
self.log_tensor_stats(
data.cpu(), 'parameters/' + prefix + name)
log_track_params = log_track_init(log_freq)
module.register_forward_hook(
lambda mod, inp, outp: parameter_log_hook(mod, inp, outp, log_track_params))
if log_gradients:
for name, parameter in module.named_parameters():
if parameter.requires_grad:
log_track_grad = log_track_init(log_freq)
self._hook_variable_gradient_stats(
parameter, 'gradients/' + prefix + name, log_track_grad)
|
[
"def",
"add_log_hooks_to_pytorch_module",
"(",
"self",
",",
"module",
",",
"name",
"=",
"None",
",",
"prefix",
"=",
"''",
",",
"log_parameters",
"=",
"True",
",",
"log_gradients",
"=",
"True",
",",
"log_freq",
"=",
"0",
")",
":",
"if",
"name",
"is",
"not",
"None",
":",
"prefix",
"=",
"prefix",
"+",
"name",
"if",
"log_parameters",
":",
"def",
"parameter_log_hook",
"(",
"module",
",",
"input_",
",",
"output",
",",
"log_track",
")",
":",
"if",
"not",
"log_track_update",
"(",
"log_track",
")",
":",
"return",
"for",
"name",
",",
"parameter",
"in",
"module",
".",
"named_parameters",
"(",
")",
":",
"# for pytorch 0.3 Variables",
"if",
"isinstance",
"(",
"parameter",
",",
"torch",
".",
"autograd",
".",
"Variable",
")",
":",
"data",
"=",
"parameter",
".",
"data",
"else",
":",
"data",
"=",
"parameter",
"self",
".",
"log_tensor_stats",
"(",
"data",
".",
"cpu",
"(",
")",
",",
"'parameters/'",
"+",
"prefix",
"+",
"name",
")",
"log_track_params",
"=",
"log_track_init",
"(",
"log_freq",
")",
"module",
".",
"register_forward_hook",
"(",
"lambda",
"mod",
",",
"inp",
",",
"outp",
":",
"parameter_log_hook",
"(",
"mod",
",",
"inp",
",",
"outp",
",",
"log_track_params",
")",
")",
"if",
"log_gradients",
":",
"for",
"name",
",",
"parameter",
"in",
"module",
".",
"named_parameters",
"(",
")",
":",
"if",
"parameter",
".",
"requires_grad",
":",
"log_track_grad",
"=",
"log_track_init",
"(",
"log_freq",
")",
"self",
".",
"_hook_variable_gradient_stats",
"(",
"parameter",
",",
"'gradients/'",
"+",
"prefix",
"+",
"name",
",",
"log_track_grad",
")"
] |
This instuments hooks into the pytorch module
log_parameters - log parameters after a forward pass
log_gradients - log gradients after a backward pass
log_freq - log gradients/parameters every N batches
|
[
"This",
"instuments",
"hooks",
"into",
"the",
"pytorch",
"module",
"log_parameters",
"-",
"log",
"parameters",
"after",
"a",
"forward",
"pass",
"log_gradients",
"-",
"log",
"gradients",
"after",
"a",
"backward",
"pass",
"log_freq",
"-",
"log",
"gradients",
"/",
"parameters",
"every",
"N",
"batches"
] |
python
|
train
|
AndrewAnnex/SpiceyPy
|
spiceypy/spiceypy.py
|
https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/spiceypy.py#L12130-L12150
|
def spkopn(filename, ifname, ncomch):
"""
Create a new SPK file, returning the handle of the opened file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkopn_c.html
:param filename: The name of the new SPK file to be created.
:type filename: str
:param ifname: The internal filename for the SPK file.
:type ifname: str
:param ncomch: The number of characters to reserve for comments.
:type ncomch: int
:return: The handle of the opened SPK file.
:rtype: int
"""
filename = stypes.stringToCharP(filename)
ifname = stypes.stringToCharP(ifname)
ncomch = ctypes.c_int(ncomch)
handle = ctypes.c_int()
libspice.spkopn_c(filename, ifname, ncomch, ctypes.byref(handle))
return handle.value
|
[
"def",
"spkopn",
"(",
"filename",
",",
"ifname",
",",
"ncomch",
")",
":",
"filename",
"=",
"stypes",
".",
"stringToCharP",
"(",
"filename",
")",
"ifname",
"=",
"stypes",
".",
"stringToCharP",
"(",
"ifname",
")",
"ncomch",
"=",
"ctypes",
".",
"c_int",
"(",
"ncomch",
")",
"handle",
"=",
"ctypes",
".",
"c_int",
"(",
")",
"libspice",
".",
"spkopn_c",
"(",
"filename",
",",
"ifname",
",",
"ncomch",
",",
"ctypes",
".",
"byref",
"(",
"handle",
")",
")",
"return",
"handle",
".",
"value"
] |
Create a new SPK file, returning the handle of the opened file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkopn_c.html
:param filename: The name of the new SPK file to be created.
:type filename: str
:param ifname: The internal filename for the SPK file.
:type ifname: str
:param ncomch: The number of characters to reserve for comments.
:type ncomch: int
:return: The handle of the opened SPK file.
:rtype: int
|
[
"Create",
"a",
"new",
"SPK",
"file",
"returning",
"the",
"handle",
"of",
"the",
"opened",
"file",
"."
] |
python
|
train
|
eddiejessup/spatious
|
spatious/vector.py
|
https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L185-L216
|
def sphere_pick_polar(d, n=1, rng=None):
"""Return vectors uniformly picked on the unit sphere.
Vectors are in a polar representation.
Parameters
----------
d: float
The number of dimensions of the space in which the sphere lives.
n: integer
Number of samples to pick.
Returns
-------
r: array, shape (n, d)
Sample vectors.
"""
if rng is None:
rng = np.random
a = np.empty([n, d])
if d == 1:
a[:, 0] = rng.randint(2, size=n) * 2 - 1
elif d == 2:
a[:, 0] = 1.0
a[:, 1] = rng.uniform(-np.pi, +np.pi, n)
elif d == 3:
u, v = rng.uniform(0.0, 1.0, (2, n))
a[:, 0] = 1.0
a[:, 1] = np.arccos(2.0 * v - 1.0)
a[:, 2] = 2.0 * np.pi * u
else:
raise Exception('Invalid vector for polar representation')
return a
|
[
"def",
"sphere_pick_polar",
"(",
"d",
",",
"n",
"=",
"1",
",",
"rng",
"=",
"None",
")",
":",
"if",
"rng",
"is",
"None",
":",
"rng",
"=",
"np",
".",
"random",
"a",
"=",
"np",
".",
"empty",
"(",
"[",
"n",
",",
"d",
"]",
")",
"if",
"d",
"==",
"1",
":",
"a",
"[",
":",
",",
"0",
"]",
"=",
"rng",
".",
"randint",
"(",
"2",
",",
"size",
"=",
"n",
")",
"*",
"2",
"-",
"1",
"elif",
"d",
"==",
"2",
":",
"a",
"[",
":",
",",
"0",
"]",
"=",
"1.0",
"a",
"[",
":",
",",
"1",
"]",
"=",
"rng",
".",
"uniform",
"(",
"-",
"np",
".",
"pi",
",",
"+",
"np",
".",
"pi",
",",
"n",
")",
"elif",
"d",
"==",
"3",
":",
"u",
",",
"v",
"=",
"rng",
".",
"uniform",
"(",
"0.0",
",",
"1.0",
",",
"(",
"2",
",",
"n",
")",
")",
"a",
"[",
":",
",",
"0",
"]",
"=",
"1.0",
"a",
"[",
":",
",",
"1",
"]",
"=",
"np",
".",
"arccos",
"(",
"2.0",
"*",
"v",
"-",
"1.0",
")",
"a",
"[",
":",
",",
"2",
"]",
"=",
"2.0",
"*",
"np",
".",
"pi",
"*",
"u",
"else",
":",
"raise",
"Exception",
"(",
"'Invalid vector for polar representation'",
")",
"return",
"a"
] |
Return vectors uniformly picked on the unit sphere.
Vectors are in a polar representation.
Parameters
----------
d: float
The number of dimensions of the space in which the sphere lives.
n: integer
Number of samples to pick.
Returns
-------
r: array, shape (n, d)
Sample vectors.
|
[
"Return",
"vectors",
"uniformly",
"picked",
"on",
"the",
"unit",
"sphere",
".",
"Vectors",
"are",
"in",
"a",
"polar",
"representation",
"."
] |
python
|
train
|
GoogleCloudPlatform/datastore-ndb-python
|
ndb/tasklets.py
|
https://github.com/GoogleCloudPlatform/datastore-ndb-python/blob/cf4cab3f1f69cd04e1a9229871be466b53729f3f/ndb/tasklets.py#L214-L227
|
def _init_flow_exceptions():
"""Internal helper to initialize _flow_exceptions.
This automatically adds webob.exc.HTTPException, if it can be imported.
"""
global _flow_exceptions
_flow_exceptions = ()
add_flow_exception(datastore_errors.Rollback)
try:
from webob import exc
except ImportError:
pass
else:
add_flow_exception(exc.HTTPException)
|
[
"def",
"_init_flow_exceptions",
"(",
")",
":",
"global",
"_flow_exceptions",
"_flow_exceptions",
"=",
"(",
")",
"add_flow_exception",
"(",
"datastore_errors",
".",
"Rollback",
")",
"try",
":",
"from",
"webob",
"import",
"exc",
"except",
"ImportError",
":",
"pass",
"else",
":",
"add_flow_exception",
"(",
"exc",
".",
"HTTPException",
")"
] |
Internal helper to initialize _flow_exceptions.
This automatically adds webob.exc.HTTPException, if it can be imported.
|
[
"Internal",
"helper",
"to",
"initialize",
"_flow_exceptions",
"."
] |
python
|
train
|
klahnakoski/pyLibrary
|
jx_elasticsearch/es52/setop.py
|
https://github.com/klahnakoski/pyLibrary/blob/fa2dcbc48fda8d26999baef400e9a98149e0b982/jx_elasticsearch/es52/setop.py#L409-L453
|
def es_query_proto(path, selects, wheres, schema):
"""
RETURN TEMPLATE AND PATH-TO-FILTER AS A 2-TUPLE
:param path: THE NESTED PATH (NOT INCLUDING TABLE NAME)
:param wheres: MAP FROM path TO LIST OF WHERE CONDITIONS
:return: (es_query, filters_map) TUPLE
"""
output = None
last_where = MATCH_ALL
for p in reversed(sorted( wheres.keys() | set(selects.keys()))):
where = wheres.get(p)
select = selects.get(p)
if where:
where = AndOp(where).partial_eval().to_esfilter(schema)
if output:
where = es_or([es_and([output, where]), where])
else:
if output:
if last_where is MATCH_ALL:
where = es_or([output, MATCH_ALL])
else:
where = output
else:
where = MATCH_ALL
if p == ".":
output = set_default(
{
"from": 0,
"size": 0,
"sort": [],
"query": where
},
select.to_es()
)
else:
output = {"nested": {
"path": p,
"inner_hits": set_default({"size": 100000}, select.to_es()) if select else None,
"query": where
}}
last_where = where
return output
|
[
"def",
"es_query_proto",
"(",
"path",
",",
"selects",
",",
"wheres",
",",
"schema",
")",
":",
"output",
"=",
"None",
"last_where",
"=",
"MATCH_ALL",
"for",
"p",
"in",
"reversed",
"(",
"sorted",
"(",
"wheres",
".",
"keys",
"(",
")",
"|",
"set",
"(",
"selects",
".",
"keys",
"(",
")",
")",
")",
")",
":",
"where",
"=",
"wheres",
".",
"get",
"(",
"p",
")",
"select",
"=",
"selects",
".",
"get",
"(",
"p",
")",
"if",
"where",
":",
"where",
"=",
"AndOp",
"(",
"where",
")",
".",
"partial_eval",
"(",
")",
".",
"to_esfilter",
"(",
"schema",
")",
"if",
"output",
":",
"where",
"=",
"es_or",
"(",
"[",
"es_and",
"(",
"[",
"output",
",",
"where",
"]",
")",
",",
"where",
"]",
")",
"else",
":",
"if",
"output",
":",
"if",
"last_where",
"is",
"MATCH_ALL",
":",
"where",
"=",
"es_or",
"(",
"[",
"output",
",",
"MATCH_ALL",
"]",
")",
"else",
":",
"where",
"=",
"output",
"else",
":",
"where",
"=",
"MATCH_ALL",
"if",
"p",
"==",
"\".\"",
":",
"output",
"=",
"set_default",
"(",
"{",
"\"from\"",
":",
"0",
",",
"\"size\"",
":",
"0",
",",
"\"sort\"",
":",
"[",
"]",
",",
"\"query\"",
":",
"where",
"}",
",",
"select",
".",
"to_es",
"(",
")",
")",
"else",
":",
"output",
"=",
"{",
"\"nested\"",
":",
"{",
"\"path\"",
":",
"p",
",",
"\"inner_hits\"",
":",
"set_default",
"(",
"{",
"\"size\"",
":",
"100000",
"}",
",",
"select",
".",
"to_es",
"(",
")",
")",
"if",
"select",
"else",
"None",
",",
"\"query\"",
":",
"where",
"}",
"}",
"last_where",
"=",
"where",
"return",
"output"
] |
RETURN TEMPLATE AND PATH-TO-FILTER AS A 2-TUPLE
:param path: THE NESTED PATH (NOT INCLUDING TABLE NAME)
:param wheres: MAP FROM path TO LIST OF WHERE CONDITIONS
:return: (es_query, filters_map) TUPLE
|
[
"RETURN",
"TEMPLATE",
"AND",
"PATH",
"-",
"TO",
"-",
"FILTER",
"AS",
"A",
"2",
"-",
"TUPLE",
":",
"param",
"path",
":",
"THE",
"NESTED",
"PATH",
"(",
"NOT",
"INCLUDING",
"TABLE",
"NAME",
")",
":",
"param",
"wheres",
":",
"MAP",
"FROM",
"path",
"TO",
"LIST",
"OF",
"WHERE",
"CONDITIONS",
":",
"return",
":",
"(",
"es_query",
"filters_map",
")",
"TUPLE"
] |
python
|
train
|
eventbrite/eventbrite-sdk-python
|
eventbrite/access_methods.py
|
https://github.com/eventbrite/eventbrite-sdk-python/blob/f2e5dc5aa1aa3e45766de13f16fd65722163d91a/eventbrite/access_methods.py#L792-L800
|
def get_user_owned_event_attendees(self, id, **data):
"""
GET /users/:id/owned_event_attendees/
Returns a :ref:`paginated <pagination>` response of :format:`attendees <attendee>`,
under the key ``attendees``, of attendees visiting any of the events the user owns
(events that would be returned from ``/users/:id/owned_events/``)
"""
return self.get("/users/{0}/owned_event_attendees/".format(id), data=data)
|
[
"def",
"get_user_owned_event_attendees",
"(",
"self",
",",
"id",
",",
"*",
"*",
"data",
")",
":",
"return",
"self",
".",
"get",
"(",
"\"/users/{0}/owned_event_attendees/\"",
".",
"format",
"(",
"id",
")",
",",
"data",
"=",
"data",
")"
] |
GET /users/:id/owned_event_attendees/
Returns a :ref:`paginated <pagination>` response of :format:`attendees <attendee>`,
under the key ``attendees``, of attendees visiting any of the events the user owns
(events that would be returned from ``/users/:id/owned_events/``)
|
[
"GET",
"/",
"users",
"/",
":",
"id",
"/",
"owned_event_attendees",
"/",
"Returns",
"a",
":",
"ref",
":",
"paginated",
"<pagination",
">",
"response",
"of",
":",
"format",
":",
"attendees",
"<attendee",
">",
"under",
"the",
"key",
"attendees",
"of",
"attendees",
"visiting",
"any",
"of",
"the",
"events",
"the",
"user",
"owns",
"(",
"events",
"that",
"would",
"be",
"returned",
"from",
"/",
"users",
"/",
":",
"id",
"/",
"owned_events",
"/",
")"
] |
python
|
train
|
chaosim/dao
|
dao/builtins/terminal.py
|
https://github.com/chaosim/dao/blob/d7ba65c98ee063aefd1ff4eabb192d1536fdbaaa/dao/builtins/terminal.py#L59-L78
|
def char_on_predicate(compiler, cont, test):
'''return current char and step if @test succeed, where
@test: a python function with one argument, which tests on one char and return True or False
@test must be registered with register_function'''
test = test.interlang()
text = compiler.new_var(il.ConstLocalVar('text'))
pos = compiler.new_var(il.ConstLocalVar('pos'))
if not isinstance(test, il.PyFunction):
raise DaoCompileTypeError(test)
return il.Begin((
il.AssignFromList(text, pos, il.parse_state),
il.If(il.Ge(pos,il.Len(text)),
il.failcont(il.FALSE),
il.If(il.Call(test, il.GetItem(text, pos)),
il.begin(
il.SetParseState(il.Tuple(text, il.add(pos, il.Integer(1)))),
il.append_failcont(compiler,
il.SetParseState(il.Tuple(text, pos))),
cont(il.GetItem(text, pos))),
il.failcont(il.FALSE)))))
|
[
"def",
"char_on_predicate",
"(",
"compiler",
",",
"cont",
",",
"test",
")",
":",
"test",
"=",
"test",
".",
"interlang",
"(",
")",
"text",
"=",
"compiler",
".",
"new_var",
"(",
"il",
".",
"ConstLocalVar",
"(",
"'text'",
")",
")",
"pos",
"=",
"compiler",
".",
"new_var",
"(",
"il",
".",
"ConstLocalVar",
"(",
"'pos'",
")",
")",
"if",
"not",
"isinstance",
"(",
"test",
",",
"il",
".",
"PyFunction",
")",
":",
"raise",
"DaoCompileTypeError",
"(",
"test",
")",
"return",
"il",
".",
"Begin",
"(",
"(",
"il",
".",
"AssignFromList",
"(",
"text",
",",
"pos",
",",
"il",
".",
"parse_state",
")",
",",
"il",
".",
"If",
"(",
"il",
".",
"Ge",
"(",
"pos",
",",
"il",
".",
"Len",
"(",
"text",
")",
")",
",",
"il",
".",
"failcont",
"(",
"il",
".",
"FALSE",
")",
",",
"il",
".",
"If",
"(",
"il",
".",
"Call",
"(",
"test",
",",
"il",
".",
"GetItem",
"(",
"text",
",",
"pos",
")",
")",
",",
"il",
".",
"begin",
"(",
"il",
".",
"SetParseState",
"(",
"il",
".",
"Tuple",
"(",
"text",
",",
"il",
".",
"add",
"(",
"pos",
",",
"il",
".",
"Integer",
"(",
"1",
")",
")",
")",
")",
",",
"il",
".",
"append_failcont",
"(",
"compiler",
",",
"il",
".",
"SetParseState",
"(",
"il",
".",
"Tuple",
"(",
"text",
",",
"pos",
")",
")",
")",
",",
"cont",
"(",
"il",
".",
"GetItem",
"(",
"text",
",",
"pos",
")",
")",
")",
",",
"il",
".",
"failcont",
"(",
"il",
".",
"FALSE",
")",
")",
")",
")",
")"
] |
return current char and step if @test succeed, where
@test: a python function with one argument, which tests on one char and return True or False
@test must be registered with register_function
|
[
"return",
"current",
"char",
"and",
"step",
"if"
] |
python
|
train
|
josiah-wolf-oberholtzer/uqbar
|
uqbar/sphinx/book.py
|
https://github.com/josiah-wolf-oberholtzer/uqbar/blob/eca7fefebbbee1e2ae13bf5d6baa838be66b1db6/uqbar/sphinx/book.py#L116-L128
|
def on_build_finished(app, exception):
"""
Hooks into Sphinx's ``build-finished`` event.
"""
if not app.config["uqbar_book_use_cache"]:
return
logger.info("")
for row in app.connection.execute("SELECT path, hits FROM cache ORDER BY path"):
path, hits = row
if not hits:
continue
logger.info(bold("[uqbar-book]"), nonl=True)
logger.info(" Cache hits for {}: {}".format(path, hits))
|
[
"def",
"on_build_finished",
"(",
"app",
",",
"exception",
")",
":",
"if",
"not",
"app",
".",
"config",
"[",
"\"uqbar_book_use_cache\"",
"]",
":",
"return",
"logger",
".",
"info",
"(",
"\"\"",
")",
"for",
"row",
"in",
"app",
".",
"connection",
".",
"execute",
"(",
"\"SELECT path, hits FROM cache ORDER BY path\"",
")",
":",
"path",
",",
"hits",
"=",
"row",
"if",
"not",
"hits",
":",
"continue",
"logger",
".",
"info",
"(",
"bold",
"(",
"\"[uqbar-book]\"",
")",
",",
"nonl",
"=",
"True",
")",
"logger",
".",
"info",
"(",
"\" Cache hits for {}: {}\"",
".",
"format",
"(",
"path",
",",
"hits",
")",
")"
] |
Hooks into Sphinx's ``build-finished`` event.
|
[
"Hooks",
"into",
"Sphinx",
"s",
"build",
"-",
"finished",
"event",
"."
] |
python
|
train
|
manahl/arctic
|
arctic/arctic.py
|
https://github.com/manahl/arctic/blob/57e110b6e182dbab00e7e214dc26f7d9ec47c120/arctic/arctic.py#L328-L361
|
def get_library(self, library):
"""
Return the library instance. Can generally use slicing to return the library:
arctic_store[library]
Parameters
----------
library : `str`
The name of the library. e.g. 'library' or 'user.library'
"""
if library in self._library_cache:
return self._library_cache[library]
try:
error = None
lib = ArcticLibraryBinding(self, library)
lib_type = lib.get_library_type()
except (OperationFailure, AutoReconnect) as e:
error = e
if error:
raise LibraryNotFoundException("Library %s was not correctly initialized in %s.\nReason: %r)" %
(library, self, error))
elif not lib_type:
raise LibraryNotFoundException("Library %s was not correctly initialized in %s." %
(library, self))
elif lib_type not in LIBRARY_TYPES:
raise LibraryNotFoundException("Couldn't load LibraryType '%s' for '%s' (has the class been registered?)" %
(lib_type, library))
instance = LIBRARY_TYPES[lib_type](lib)
self._library_cache[library] = instance
# The library official name may be different from 'library': e.g. 'library' vs 'user.library'
self._library_cache[lib.get_name()] = instance
return self._library_cache[library]
|
[
"def",
"get_library",
"(",
"self",
",",
"library",
")",
":",
"if",
"library",
"in",
"self",
".",
"_library_cache",
":",
"return",
"self",
".",
"_library_cache",
"[",
"library",
"]",
"try",
":",
"error",
"=",
"None",
"lib",
"=",
"ArcticLibraryBinding",
"(",
"self",
",",
"library",
")",
"lib_type",
"=",
"lib",
".",
"get_library_type",
"(",
")",
"except",
"(",
"OperationFailure",
",",
"AutoReconnect",
")",
"as",
"e",
":",
"error",
"=",
"e",
"if",
"error",
":",
"raise",
"LibraryNotFoundException",
"(",
"\"Library %s was not correctly initialized in %s.\\nReason: %r)\"",
"%",
"(",
"library",
",",
"self",
",",
"error",
")",
")",
"elif",
"not",
"lib_type",
":",
"raise",
"LibraryNotFoundException",
"(",
"\"Library %s was not correctly initialized in %s.\"",
"%",
"(",
"library",
",",
"self",
")",
")",
"elif",
"lib_type",
"not",
"in",
"LIBRARY_TYPES",
":",
"raise",
"LibraryNotFoundException",
"(",
"\"Couldn't load LibraryType '%s' for '%s' (has the class been registered?)\"",
"%",
"(",
"lib_type",
",",
"library",
")",
")",
"instance",
"=",
"LIBRARY_TYPES",
"[",
"lib_type",
"]",
"(",
"lib",
")",
"self",
".",
"_library_cache",
"[",
"library",
"]",
"=",
"instance",
"# The library official name may be different from 'library': e.g. 'library' vs 'user.library'",
"self",
".",
"_library_cache",
"[",
"lib",
".",
"get_name",
"(",
")",
"]",
"=",
"instance",
"return",
"self",
".",
"_library_cache",
"[",
"library",
"]"
] |
Return the library instance. Can generally use slicing to return the library:
arctic_store[library]
Parameters
----------
library : `str`
The name of the library. e.g. 'library' or 'user.library'
|
[
"Return",
"the",
"library",
"instance",
".",
"Can",
"generally",
"use",
"slicing",
"to",
"return",
"the",
"library",
":",
"arctic_store",
"[",
"library",
"]"
] |
python
|
train
|
shoebot/shoebot
|
shoebot/data/bezier.py
|
https://github.com/shoebot/shoebot/blob/d554c1765c1899fa25727c9fc6805d221585562b/shoebot/data/bezier.py#L340-L370
|
def _locate(self, t, segments=None):
""" Locates t on a specific segment in the path.
Returns (index, t, PathElement)
A path is a combination of lines and curves (segments).
The returned index indicates the start of the segment that contains point t.
The returned t is the absolute time on that segment,
in contrast to the relative t on the whole of the path.
The returned point is the last MOVETO, any subsequent CLOSETO after i closes to that point.
When you supply the list of segment lengths yourself, as returned from length(path, segmented=True),
point() works about thirty times faster in a for-loop since it doesn't need to recalculate
the length during each iteration.
"""
# Originally from nodebox-gl
if segments is None:
segments = self._segment_lengths(relative=True)
if len(segments) == 0:
raise PathError, "The given path is empty"
for i, el in enumerate(self._get_elements()):
if i == 0 or el.cmd == MOVETO:
closeto = Point(el.x, el.y)
if t <= segments[i] or i == len(segments) - 1:
break
else:
t -= segments[i]
try:
t /= segments[i]
except ZeroDivisionError:
pass
if i == len(segments) - 1 and segments[i] == 0:
i -= 1
return (i, t, closeto)
|
[
"def",
"_locate",
"(",
"self",
",",
"t",
",",
"segments",
"=",
"None",
")",
":",
"# Originally from nodebox-gl",
"if",
"segments",
"is",
"None",
":",
"segments",
"=",
"self",
".",
"_segment_lengths",
"(",
"relative",
"=",
"True",
")",
"if",
"len",
"(",
"segments",
")",
"==",
"0",
":",
"raise",
"PathError",
",",
"\"The given path is empty\"",
"for",
"i",
",",
"el",
"in",
"enumerate",
"(",
"self",
".",
"_get_elements",
"(",
")",
")",
":",
"if",
"i",
"==",
"0",
"or",
"el",
".",
"cmd",
"==",
"MOVETO",
":",
"closeto",
"=",
"Point",
"(",
"el",
".",
"x",
",",
"el",
".",
"y",
")",
"if",
"t",
"<=",
"segments",
"[",
"i",
"]",
"or",
"i",
"==",
"len",
"(",
"segments",
")",
"-",
"1",
":",
"break",
"else",
":",
"t",
"-=",
"segments",
"[",
"i",
"]",
"try",
":",
"t",
"/=",
"segments",
"[",
"i",
"]",
"except",
"ZeroDivisionError",
":",
"pass",
"if",
"i",
"==",
"len",
"(",
"segments",
")",
"-",
"1",
"and",
"segments",
"[",
"i",
"]",
"==",
"0",
":",
"i",
"-=",
"1",
"return",
"(",
"i",
",",
"t",
",",
"closeto",
")"
] |
Locates t on a specific segment in the path.
Returns (index, t, PathElement)
A path is a combination of lines and curves (segments).
The returned index indicates the start of the segment that contains point t.
The returned t is the absolute time on that segment,
in contrast to the relative t on the whole of the path.
The returned point is the last MOVETO, any subsequent CLOSETO after i closes to that point.
When you supply the list of segment lengths yourself, as returned from length(path, segmented=True),
point() works about thirty times faster in a for-loop since it doesn't need to recalculate
the length during each iteration.
|
[
"Locates",
"t",
"on",
"a",
"specific",
"segment",
"in",
"the",
"path",
".",
"Returns",
"(",
"index",
"t",
"PathElement",
")",
"A",
"path",
"is",
"a",
"combination",
"of",
"lines",
"and",
"curves",
"(",
"segments",
")",
".",
"The",
"returned",
"index",
"indicates",
"the",
"start",
"of",
"the",
"segment",
"that",
"contains",
"point",
"t",
".",
"The",
"returned",
"t",
"is",
"the",
"absolute",
"time",
"on",
"that",
"segment",
"in",
"contrast",
"to",
"the",
"relative",
"t",
"on",
"the",
"whole",
"of",
"the",
"path",
".",
"The",
"returned",
"point",
"is",
"the",
"last",
"MOVETO",
"any",
"subsequent",
"CLOSETO",
"after",
"i",
"closes",
"to",
"that",
"point",
".",
"When",
"you",
"supply",
"the",
"list",
"of",
"segment",
"lengths",
"yourself",
"as",
"returned",
"from",
"length",
"(",
"path",
"segmented",
"=",
"True",
")",
"point",
"()",
"works",
"about",
"thirty",
"times",
"faster",
"in",
"a",
"for",
"-",
"loop",
"since",
"it",
"doesn",
"t",
"need",
"to",
"recalculate",
"the",
"length",
"during",
"each",
"iteration",
"."
] |
python
|
valid
|
ardexa/ardexaplugin
|
ardexaplugin.py
|
https://github.com/ardexa/ardexaplugin/blob/5068532f601ae3042bd87af1063057e8f274f670/ardexaplugin.py#L88-L108
|
def check_pidfile(pidfile, debug):
"""Check that a process is not running more than once, using PIDFILE"""
# Check PID exists and see if the PID is running
if os.path.isfile(pidfile):
pidfile_handle = open(pidfile, 'r')
# try and read the PID file. If no luck, remove it
try:
pid = int(pidfile_handle.read())
pidfile_handle.close()
if check_pid(pid, debug):
return True
except:
pass
# PID is not active, remove the PID file
os.unlink(pidfile)
# Create a PID file, to ensure this is script is only run once (at a time)
pid = str(os.getpid())
open(pidfile, 'w').write(pid)
return False
|
[
"def",
"check_pidfile",
"(",
"pidfile",
",",
"debug",
")",
":",
"# Check PID exists and see if the PID is running",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"pidfile",
")",
":",
"pidfile_handle",
"=",
"open",
"(",
"pidfile",
",",
"'r'",
")",
"# try and read the PID file. If no luck, remove it",
"try",
":",
"pid",
"=",
"int",
"(",
"pidfile_handle",
".",
"read",
"(",
")",
")",
"pidfile_handle",
".",
"close",
"(",
")",
"if",
"check_pid",
"(",
"pid",
",",
"debug",
")",
":",
"return",
"True",
"except",
":",
"pass",
"# PID is not active, remove the PID file",
"os",
".",
"unlink",
"(",
"pidfile",
")",
"# Create a PID file, to ensure this is script is only run once (at a time)",
"pid",
"=",
"str",
"(",
"os",
".",
"getpid",
"(",
")",
")",
"open",
"(",
"pidfile",
",",
"'w'",
")",
".",
"write",
"(",
"pid",
")",
"return",
"False"
] |
Check that a process is not running more than once, using PIDFILE
|
[
"Check",
"that",
"a",
"process",
"is",
"not",
"running",
"more",
"than",
"once",
"using",
"PIDFILE"
] |
python
|
valid
|
Jammy2211/PyAutoLens
|
autolens/data/array/grids.py
|
https://github.com/Jammy2211/PyAutoLens/blob/91e50369c7a9c048c83d217625578b72423cd5a7/autolens/data/array/grids.py#L123-L138
|
def grid_stack_from_mask_sub_grid_size_and_psf_shape(cls, mask, sub_grid_size, psf_shape):
"""Setup a grid-stack of grid_stack from a mask, sub-grid size and psf-shape.
Parameters
-----------
mask : Mask
The mask whose unmasked pixels (*False*) are used to generate the grid-stack's grid_stack.
sub_grid_size : int
The size of a sub-pixel's sub-grid (sub_grid_size x sub_grid_size).
psf_shape : (int, int)
the shape of the PSF used in the analysis, which defines the mask's blurring-region.
"""
regular_grid = RegularGrid.from_mask(mask)
sub_grid = SubGrid.from_mask_and_sub_grid_size(mask, sub_grid_size)
blurring_grid = RegularGrid.blurring_grid_from_mask_and_psf_shape(mask, psf_shape)
return GridStack(regular_grid, sub_grid, blurring_grid)
|
[
"def",
"grid_stack_from_mask_sub_grid_size_and_psf_shape",
"(",
"cls",
",",
"mask",
",",
"sub_grid_size",
",",
"psf_shape",
")",
":",
"regular_grid",
"=",
"RegularGrid",
".",
"from_mask",
"(",
"mask",
")",
"sub_grid",
"=",
"SubGrid",
".",
"from_mask_and_sub_grid_size",
"(",
"mask",
",",
"sub_grid_size",
")",
"blurring_grid",
"=",
"RegularGrid",
".",
"blurring_grid_from_mask_and_psf_shape",
"(",
"mask",
",",
"psf_shape",
")",
"return",
"GridStack",
"(",
"regular_grid",
",",
"sub_grid",
",",
"blurring_grid",
")"
] |
Setup a grid-stack of grid_stack from a mask, sub-grid size and psf-shape.
Parameters
-----------
mask : Mask
The mask whose unmasked pixels (*False*) are used to generate the grid-stack's grid_stack.
sub_grid_size : int
The size of a sub-pixel's sub-grid (sub_grid_size x sub_grid_size).
psf_shape : (int, int)
the shape of the PSF used in the analysis, which defines the mask's blurring-region.
|
[
"Setup",
"a",
"grid",
"-",
"stack",
"of",
"grid_stack",
"from",
"a",
"mask",
"sub",
"-",
"grid",
"size",
"and",
"psf",
"-",
"shape",
"."
] |
python
|
valid
|
great-expectations/great_expectations
|
great_expectations/dataset/dataset.py
|
https://github.com/great-expectations/great_expectations/blob/08385c40529d4f14a1c46916788aecc47f33ee9d/great_expectations/dataset/dataset.py#L658-L719
|
def expect_column_values_to_be_between(self,
column,
min_value=None,
max_value=None,
allow_cross_type_comparisons=None,
parse_strings_as_datetimes=None,
output_strftime_format=None,
mostly=None,
result_format=None, include_config=False, catch_exceptions=None, meta=None
):
"""Expect column entries to be between a minimum value and a maximum value (inclusive).
expect_column_values_to_be_between is a :func:`column_map_expectation <great_expectations.data_asset.dataset.Dataset.column_map_expectation>`.
Args:
column (str): \
The column name.
min_value (comparable type or None): The minimum value for a column entry.
max_value (comparable type or None): The maximum value for a column entry.
Keyword Args:
allow_cross_type_comparisons (boolean or None) : If True, allow comparisons between types (e.g. integer and\
string). Otherwise, attempting such comparisons will raise an exception.
parse_strings_as_datetimes (boolean or None) : If True, parse min_value, max_value, and all non-null column\
values to datetimes before making comparisons.
output_strftime_format (str or None): \
A valid strfime format for datetime output. Only used if parse_strings_as_datetimes=True.
mostly (None or a float between 0 and 1): \
Return `"success": True` if at least mostly percent of values match the expectation. \
For more detail, see :ref:`mostly`.
Other Parameters:
result_format (str or None): \
Which output mode to use: `BOOLEAN_ONLY`, `BASIC`, `COMPLETE`, or `SUMMARY`.
For more detail, see :ref:`result_format <result_format>`.
include_config (boolean): \
If True, then include the expectation config as part of the result object. \
For more detail, see :ref:`include_config`.
catch_exceptions (boolean or None): \
If True, then catch exceptions and include them as part of the result object. \
For more detail, see :ref:`catch_exceptions`.
meta (dict or None): \
A JSON-serializable dictionary (nesting allowed) that will be included in the output without modification. \
For more detail, see :ref:`meta`.
Returns:
A JSON-serializable expectation result object.
Exact fields vary depending on the values passed to :ref:`result_format <result_format>` and
:ref:`include_config`, :ref:`catch_exceptions`, and :ref:`meta`.
Notes:
* min_value and max_value are both inclusive.
* If min_value is None, then max_value is treated as an upper bound, and the number of acceptable rows has no minimum.
* If max_value is None, then min_value is treated as a lower bound, and the number of acceptable rows has no maximum.
See Also:
expect_column_value_lengths_to_be_between
"""
raise NotImplementedError
|
[
"def",
"expect_column_values_to_be_between",
"(",
"self",
",",
"column",
",",
"min_value",
"=",
"None",
",",
"max_value",
"=",
"None",
",",
"allow_cross_type_comparisons",
"=",
"None",
",",
"parse_strings_as_datetimes",
"=",
"None",
",",
"output_strftime_format",
"=",
"None",
",",
"mostly",
"=",
"None",
",",
"result_format",
"=",
"None",
",",
"include_config",
"=",
"False",
",",
"catch_exceptions",
"=",
"None",
",",
"meta",
"=",
"None",
")",
":",
"raise",
"NotImplementedError"
] |
Expect column entries to be between a minimum value and a maximum value (inclusive).
expect_column_values_to_be_between is a :func:`column_map_expectation <great_expectations.data_asset.dataset.Dataset.column_map_expectation>`.
Args:
column (str): \
The column name.
min_value (comparable type or None): The minimum value for a column entry.
max_value (comparable type or None): The maximum value for a column entry.
Keyword Args:
allow_cross_type_comparisons (boolean or None) : If True, allow comparisons between types (e.g. integer and\
string). Otherwise, attempting such comparisons will raise an exception.
parse_strings_as_datetimes (boolean or None) : If True, parse min_value, max_value, and all non-null column\
values to datetimes before making comparisons.
output_strftime_format (str or None): \
A valid strfime format for datetime output. Only used if parse_strings_as_datetimes=True.
mostly (None or a float between 0 and 1): \
Return `"success": True` if at least mostly percent of values match the expectation. \
For more detail, see :ref:`mostly`.
Other Parameters:
result_format (str or None): \
Which output mode to use: `BOOLEAN_ONLY`, `BASIC`, `COMPLETE`, or `SUMMARY`.
For more detail, see :ref:`result_format <result_format>`.
include_config (boolean): \
If True, then include the expectation config as part of the result object. \
For more detail, see :ref:`include_config`.
catch_exceptions (boolean or None): \
If True, then catch exceptions and include them as part of the result object. \
For more detail, see :ref:`catch_exceptions`.
meta (dict or None): \
A JSON-serializable dictionary (nesting allowed) that will be included in the output without modification. \
For more detail, see :ref:`meta`.
Returns:
A JSON-serializable expectation result object.
Exact fields vary depending on the values passed to :ref:`result_format <result_format>` and
:ref:`include_config`, :ref:`catch_exceptions`, and :ref:`meta`.
Notes:
* min_value and max_value are both inclusive.
* If min_value is None, then max_value is treated as an upper bound, and the number of acceptable rows has no minimum.
* If max_value is None, then min_value is treated as a lower bound, and the number of acceptable rows has no maximum.
See Also:
expect_column_value_lengths_to_be_between
|
[
"Expect",
"column",
"entries",
"to",
"be",
"between",
"a",
"minimum",
"value",
"and",
"a",
"maximum",
"value",
"(",
"inclusive",
")",
"."
] |
python
|
train
|
IdentityPython/pysaml2
|
src/saml2/server.py
|
https://github.com/IdentityPython/pysaml2/blob/d3aa78eeb7d37c12688f783cb4db1c7263a14ad6/src/saml2/server.py#L221-L231
|
def parse_authn_request(self, enc_request, binding=BINDING_HTTP_REDIRECT):
"""Parse a Authentication Request
:param enc_request: The request in its transport format
:param binding: Which binding that was used to transport the message
to this entity.
:return: A request instance
"""
return self._parse_request(enc_request, AuthnRequest,
"single_sign_on_service", binding)
|
[
"def",
"parse_authn_request",
"(",
"self",
",",
"enc_request",
",",
"binding",
"=",
"BINDING_HTTP_REDIRECT",
")",
":",
"return",
"self",
".",
"_parse_request",
"(",
"enc_request",
",",
"AuthnRequest",
",",
"\"single_sign_on_service\"",
",",
"binding",
")"
] |
Parse a Authentication Request
:param enc_request: The request in its transport format
:param binding: Which binding that was used to transport the message
to this entity.
:return: A request instance
|
[
"Parse",
"a",
"Authentication",
"Request"
] |
python
|
train
|
pmelchior/proxmin
|
examples/unmixing.py
|
https://github.com/pmelchior/proxmin/blob/60e49d90c67c46329cc1d3b5c484951dc8bd2c3f/examples/unmixing.py#L28-L30
|
def add_noise(Y, sigma):
"""Adds noise to Y"""
return Y + np.random.normal(0, sigma, Y.shape)
|
[
"def",
"add_noise",
"(",
"Y",
",",
"sigma",
")",
":",
"return",
"Y",
"+",
"np",
".",
"random",
".",
"normal",
"(",
"0",
",",
"sigma",
",",
"Y",
".",
"shape",
")"
] |
Adds noise to Y
|
[
"Adds",
"noise",
"to",
"Y"
] |
python
|
train
|
saltstack/salt
|
salt/modules/state.py
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/state.py#L2355-L2422
|
def event(tagmatch='*',
count=-1,
quiet=False,
sock_dir=None,
pretty=False,
node='minion'):
r'''
Watch Salt's event bus and block until the given tag is matched
.. versionadded:: 2016.3.0
.. versionchanged:: 2019.2.0
``tagmatch`` can now be either a glob or regular expression.
This is useful for utilizing Salt's event bus from shell scripts or for
taking simple actions directly from the CLI.
Enable debug logging to see ignored events.
:param tagmatch: the event is written to stdout for each tag that matches
this glob or regular expression.
:param count: this number is decremented for each event that matches the
``tagmatch`` parameter; pass ``-1`` to listen forever.
:param quiet: do not print to stdout; just block
:param sock_dir: path to the Salt master's event socket file.
:param pretty: Output the JSON all on a single line if ``False`` (useful
for shell tools); pretty-print the JSON output if ``True``.
:param node: Watch the minion-side or master-side event bus.
CLI Example:
.. code-block:: bash
salt-call --local state.event pretty=True
'''
sevent = salt.utils.event.get_event(
node,
sock_dir or __opts__['sock_dir'],
__opts__['transport'],
opts=__opts__,
listen=True)
while True:
ret = sevent.get_event(full=True, auto_reconnect=True)
if ret is None:
continue
if salt.utils.stringutils.expr_match(ret['tag'], tagmatch):
if not quiet:
salt.utils.stringutils.print_cli(
str('{0}\t{1}').format( # future lint: blacklisted-function
salt.utils.stringutils.to_str(ret['tag']),
salt.utils.json.dumps(
ret['data'],
sort_keys=pretty,
indent=None if not pretty else 4)
)
)
sys.stdout.flush()
if count > 0:
count -= 1
log.debug('Remaining event matches: %s', count)
if count == 0:
break
else:
log.debug('Skipping event tag: %s', ret['tag'])
continue
|
[
"def",
"event",
"(",
"tagmatch",
"=",
"'*'",
",",
"count",
"=",
"-",
"1",
",",
"quiet",
"=",
"False",
",",
"sock_dir",
"=",
"None",
",",
"pretty",
"=",
"False",
",",
"node",
"=",
"'minion'",
")",
":",
"sevent",
"=",
"salt",
".",
"utils",
".",
"event",
".",
"get_event",
"(",
"node",
",",
"sock_dir",
"or",
"__opts__",
"[",
"'sock_dir'",
"]",
",",
"__opts__",
"[",
"'transport'",
"]",
",",
"opts",
"=",
"__opts__",
",",
"listen",
"=",
"True",
")",
"while",
"True",
":",
"ret",
"=",
"sevent",
".",
"get_event",
"(",
"full",
"=",
"True",
",",
"auto_reconnect",
"=",
"True",
")",
"if",
"ret",
"is",
"None",
":",
"continue",
"if",
"salt",
".",
"utils",
".",
"stringutils",
".",
"expr_match",
"(",
"ret",
"[",
"'tag'",
"]",
",",
"tagmatch",
")",
":",
"if",
"not",
"quiet",
":",
"salt",
".",
"utils",
".",
"stringutils",
".",
"print_cli",
"(",
"str",
"(",
"'{0}\\t{1}'",
")",
".",
"format",
"(",
"# future lint: blacklisted-function",
"salt",
".",
"utils",
".",
"stringutils",
".",
"to_str",
"(",
"ret",
"[",
"'tag'",
"]",
")",
",",
"salt",
".",
"utils",
".",
"json",
".",
"dumps",
"(",
"ret",
"[",
"'data'",
"]",
",",
"sort_keys",
"=",
"pretty",
",",
"indent",
"=",
"None",
"if",
"not",
"pretty",
"else",
"4",
")",
")",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"if",
"count",
">",
"0",
":",
"count",
"-=",
"1",
"log",
".",
"debug",
"(",
"'Remaining event matches: %s'",
",",
"count",
")",
"if",
"count",
"==",
"0",
":",
"break",
"else",
":",
"log",
".",
"debug",
"(",
"'Skipping event tag: %s'",
",",
"ret",
"[",
"'tag'",
"]",
")",
"continue"
] |
r'''
Watch Salt's event bus and block until the given tag is matched
.. versionadded:: 2016.3.0
.. versionchanged:: 2019.2.0
``tagmatch`` can now be either a glob or regular expression.
This is useful for utilizing Salt's event bus from shell scripts or for
taking simple actions directly from the CLI.
Enable debug logging to see ignored events.
:param tagmatch: the event is written to stdout for each tag that matches
this glob or regular expression.
:param count: this number is decremented for each event that matches the
``tagmatch`` parameter; pass ``-1`` to listen forever.
:param quiet: do not print to stdout; just block
:param sock_dir: path to the Salt master's event socket file.
:param pretty: Output the JSON all on a single line if ``False`` (useful
for shell tools); pretty-print the JSON output if ``True``.
:param node: Watch the minion-side or master-side event bus.
CLI Example:
.. code-block:: bash
salt-call --local state.event pretty=True
|
[
"r",
"Watch",
"Salt",
"s",
"event",
"bus",
"and",
"block",
"until",
"the",
"given",
"tag",
"is",
"matched"
] |
python
|
train
|
etingof/pysnmp
|
pysnmp/hlapi/v3arch/asyncore/ntforg.py
|
https://github.com/etingof/pysnmp/blob/cde062dd42f67dfd2d7686286a322d40e9c3a4b7/pysnmp/hlapi/v3arch/asyncore/ntforg.py#L21-L174
|
def sendNotification(snmpEngine, authData, transportTarget, contextData,
notifyType, *varBinds, **options):
"""Send SNMP notification.
Based on passed parameters, prepares SNMP TRAP or INFORM
notification (:RFC:`1905#section-4.2.6`) and schedules its
transmission by I/O framework at a later point of time.
Parameters
----------
snmpEngine : :py:class:`~pysnmp.hlapi.SnmpEngine`
Class instance representing SNMP engine.
authData : :py:class:`~pysnmp.hlapi.CommunityData` or :py:class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget : :py:class:`~pysnmp.hlapi.asyncore.UdpTransportTarget` or :py:class:`~pysnmp.hlapi.asyncore.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer
address.
contextData : :py:class:`~pysnmp.hlapi.ContextData`
Class instance representing SNMP ContextEngineId and ContextName
values.
notifyType : str
Indicates type of notification to be sent. Recognized literal
values are *trap* or *inform*.
\*varBinds: :class:`tuple` of OID-value pairs or :py:class:`~pysnmp.smi.rfc1902.ObjectType` or :py:class:`~pysnmp.smi.rfc1902.NotificationType`
One or more objects representing MIB variables to place
into SNMP notification. It could be tuples of OID-values
or :py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances
of :py:class:`~pysnmp.smi.rfc1902.NotificationType` objects.
SNMP Notification PDU includes some housekeeping items that
are required for SNMP to function.
Agent information:
* SNMPv2-MIB::sysUpTime.0 = <agent uptime>
* SNMPv2-SMI::snmpTrapOID.0 = {SNMPv2-MIB::coldStart, ...}
Applicable to SNMP v1 TRAP:
* SNMP-COMMUNITY-MIB::snmpTrapAddress.0 = <agent-IP>
* SNMP-COMMUNITY-MIB::snmpTrapCommunity.0 = <snmp-community-name>
* SNMP-COMMUNITY-MIB::snmpTrapEnterprise.0 = <enterprise-OID>
.. note::
Unless user passes some of these variable-bindings, `.sendNotification()`
call will fill in the missing items.
User variable-bindings:
* SNMPv2-SMI::NOTIFICATION-TYPE
* SNMPv2-SMI::OBJECT-TYPE
.. note::
The :py:class:`~pysnmp.smi.rfc1902.NotificationType` object ensures
properly formed SNMP notification (to comply MIB definition). If you
build notification PDU out of :py:class:`~pysnmp.smi.rfc1902.ObjectType`
objects or simple tuples of OID-value objects, it is your responsibility
to provide well-formed notification payload.
Other Parameters
----------------
\*\*options:
* lookupMib: bool
`lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `True`.
* cbFun: callable
user-supplied callable that is invoked to pass SNMP response
to *INFORM* notification or error to user at a later point of
time. The `cbFun` callable is never invoked for *TRAP* notifications.
* cbCtx: object
user-supplied object passing additional parameters to/from
`cbFun`
Notes
-----
User-supplied `cbFun` callable must have the following call
signature:
* snmpEngine (:py:class:`~pysnmp.hlapi.SnmpEngine`):
Class instance representing SNMP engine.
* sendRequestHandle (int): Unique request identifier. Can be used
for matching multiple ongoing *INFORM* notifications with received
responses.
* errorIndication (str): True value indicates SNMP engine error.
* errorStatus (str): True value indicates SNMP PDU error.
* errorIndex (int): Non-zero value refers to `varBinds[errorIndex-1]`
* varBinds (tuple): A sequence of
:py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances
representing MIB variables returned in SNMP response in exactly
the same order as `varBinds` in request.
* `cbCtx` : Original user-supplied object.
Returns
-------
sendRequestHandle : int
Unique request identifier. Can be used for matching received
responses with ongoing *INFORM* requests. Returns `None` for
*TRAP* notifications.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Examples
--------
>>> from pysnmp.hlapi.asyncore import *
>>>
>>> snmpEngine = SnmpEngine()
>>> sendNotification(
... snmpEngine,
... CommunityData('public'),
... UdpTransportTarget(('demo.snmplabs.com', 162)),
... ContextData(),
... 'trap',
... NotificationType(ObjectIdentity('SNMPv2-MIB', 'coldStart')),
... )
>>> snmpEngine.transportDispatcher.runDispatcher()
>>>
"""
# noinspection PyShadowingNames
def __cbFun(snmpEngine, sendRequestHandle, errorIndication,
errorStatus, errorIndex, varBinds, cbCtx):
lookupMib, cbFun, cbCtx = cbCtx
varBinds = VB_PROCESSOR.unmakeVarBinds(
snmpEngine.cache, varBinds, lookupMib)
return cbFun and cbFun(
snmpEngine, sendRequestHandle, errorIndication,
errorStatus, errorIndex, varBinds, cbCtx)
notifyName = LCD.configure(snmpEngine, authData, transportTarget,
notifyType, contextData.contextName)
varBinds = VB_PROCESSOR.makeVarBinds(snmpEngine.cache, varBinds)
return ntforg.NotificationOriginator().sendVarBinds(
snmpEngine, notifyName,
contextData.contextEngineId, contextData.contextName,
varBinds, __cbFun, (options.get('lookupMib', True),
options.get('cbFun'), options.get('cbCtx')))
|
[
"def",
"sendNotification",
"(",
"snmpEngine",
",",
"authData",
",",
"transportTarget",
",",
"contextData",
",",
"notifyType",
",",
"*",
"varBinds",
",",
"*",
"*",
"options",
")",
":",
"# noinspection PyShadowingNames",
"def",
"__cbFun",
"(",
"snmpEngine",
",",
"sendRequestHandle",
",",
"errorIndication",
",",
"errorStatus",
",",
"errorIndex",
",",
"varBinds",
",",
"cbCtx",
")",
":",
"lookupMib",
",",
"cbFun",
",",
"cbCtx",
"=",
"cbCtx",
"varBinds",
"=",
"VB_PROCESSOR",
".",
"unmakeVarBinds",
"(",
"snmpEngine",
".",
"cache",
",",
"varBinds",
",",
"lookupMib",
")",
"return",
"cbFun",
"and",
"cbFun",
"(",
"snmpEngine",
",",
"sendRequestHandle",
",",
"errorIndication",
",",
"errorStatus",
",",
"errorIndex",
",",
"varBinds",
",",
"cbCtx",
")",
"notifyName",
"=",
"LCD",
".",
"configure",
"(",
"snmpEngine",
",",
"authData",
",",
"transportTarget",
",",
"notifyType",
",",
"contextData",
".",
"contextName",
")",
"varBinds",
"=",
"VB_PROCESSOR",
".",
"makeVarBinds",
"(",
"snmpEngine",
".",
"cache",
",",
"varBinds",
")",
"return",
"ntforg",
".",
"NotificationOriginator",
"(",
")",
".",
"sendVarBinds",
"(",
"snmpEngine",
",",
"notifyName",
",",
"contextData",
".",
"contextEngineId",
",",
"contextData",
".",
"contextName",
",",
"varBinds",
",",
"__cbFun",
",",
"(",
"options",
".",
"get",
"(",
"'lookupMib'",
",",
"True",
")",
",",
"options",
".",
"get",
"(",
"'cbFun'",
")",
",",
"options",
".",
"get",
"(",
"'cbCtx'",
")",
")",
")"
] |
Send SNMP notification.
Based on passed parameters, prepares SNMP TRAP or INFORM
notification (:RFC:`1905#section-4.2.6`) and schedules its
transmission by I/O framework at a later point of time.
Parameters
----------
snmpEngine : :py:class:`~pysnmp.hlapi.SnmpEngine`
Class instance representing SNMP engine.
authData : :py:class:`~pysnmp.hlapi.CommunityData` or :py:class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget : :py:class:`~pysnmp.hlapi.asyncore.UdpTransportTarget` or :py:class:`~pysnmp.hlapi.asyncore.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer
address.
contextData : :py:class:`~pysnmp.hlapi.ContextData`
Class instance representing SNMP ContextEngineId and ContextName
values.
notifyType : str
Indicates type of notification to be sent. Recognized literal
values are *trap* or *inform*.
\*varBinds: :class:`tuple` of OID-value pairs or :py:class:`~pysnmp.smi.rfc1902.ObjectType` or :py:class:`~pysnmp.smi.rfc1902.NotificationType`
One or more objects representing MIB variables to place
into SNMP notification. It could be tuples of OID-values
or :py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances
of :py:class:`~pysnmp.smi.rfc1902.NotificationType` objects.
SNMP Notification PDU includes some housekeeping items that
are required for SNMP to function.
Agent information:
* SNMPv2-MIB::sysUpTime.0 = <agent uptime>
* SNMPv2-SMI::snmpTrapOID.0 = {SNMPv2-MIB::coldStart, ...}
Applicable to SNMP v1 TRAP:
* SNMP-COMMUNITY-MIB::snmpTrapAddress.0 = <agent-IP>
* SNMP-COMMUNITY-MIB::snmpTrapCommunity.0 = <snmp-community-name>
* SNMP-COMMUNITY-MIB::snmpTrapEnterprise.0 = <enterprise-OID>
.. note::
Unless user passes some of these variable-bindings, `.sendNotification()`
call will fill in the missing items.
User variable-bindings:
* SNMPv2-SMI::NOTIFICATION-TYPE
* SNMPv2-SMI::OBJECT-TYPE
.. note::
The :py:class:`~pysnmp.smi.rfc1902.NotificationType` object ensures
properly formed SNMP notification (to comply MIB definition). If you
build notification PDU out of :py:class:`~pysnmp.smi.rfc1902.ObjectType`
objects or simple tuples of OID-value objects, it is your responsibility
to provide well-formed notification payload.
Other Parameters
----------------
\*\*options:
* lookupMib: bool
`lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `True`.
* cbFun: callable
user-supplied callable that is invoked to pass SNMP response
to *INFORM* notification or error to user at a later point of
time. The `cbFun` callable is never invoked for *TRAP* notifications.
* cbCtx: object
user-supplied object passing additional parameters to/from
`cbFun`
Notes
-----
User-supplied `cbFun` callable must have the following call
signature:
* snmpEngine (:py:class:`~pysnmp.hlapi.SnmpEngine`):
Class instance representing SNMP engine.
* sendRequestHandle (int): Unique request identifier. Can be used
for matching multiple ongoing *INFORM* notifications with received
responses.
* errorIndication (str): True value indicates SNMP engine error.
* errorStatus (str): True value indicates SNMP PDU error.
* errorIndex (int): Non-zero value refers to `varBinds[errorIndex-1]`
* varBinds (tuple): A sequence of
:py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances
representing MIB variables returned in SNMP response in exactly
the same order as `varBinds` in request.
* `cbCtx` : Original user-supplied object.
Returns
-------
sendRequestHandle : int
Unique request identifier. Can be used for matching received
responses with ongoing *INFORM* requests. Returns `None` for
*TRAP* notifications.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Examples
--------
>>> from pysnmp.hlapi.asyncore import *
>>>
>>> snmpEngine = SnmpEngine()
>>> sendNotification(
... snmpEngine,
... CommunityData('public'),
... UdpTransportTarget(('demo.snmplabs.com', 162)),
... ContextData(),
... 'trap',
... NotificationType(ObjectIdentity('SNMPv2-MIB', 'coldStart')),
... )
>>> snmpEngine.transportDispatcher.runDispatcher()
>>>
|
[
"Send",
"SNMP",
"notification",
"."
] |
python
|
train
|
librosa/librosa
|
librosa/feature/utils.py
|
https://github.com/librosa/librosa/blob/180e8e6eb8f958fa6b20b8cba389f7945d508247/librosa/feature/utils.py#L119-L252
|
def stack_memory(data, n_steps=2, delay=1, **kwargs):
"""Short-term history embedding: vertically concatenate a data
vector or matrix with delayed copies of itself.
Each column `data[:, i]` is mapped to::
data[:, i] -> [data[:, i],
data[:, i - delay],
...
data[:, i - (n_steps-1)*delay]]
For columns `i < (n_steps - 1) * delay` , the data will be padded.
By default, the data is padded with zeros, but this behavior can be
overridden by supplying additional keyword arguments which are passed
to `np.pad()`.
Parameters
----------
data : np.ndarray [shape=(t,) or (d, t)]
Input data matrix. If `data` is a vector (`data.ndim == 1`),
it will be interpreted as a row matrix and reshaped to `(1, t)`.
n_steps : int > 0 [scalar]
embedding dimension, the number of steps back in time to stack
delay : int != 0 [scalar]
the number of columns to step.
Positive values embed from the past (previous columns).
Negative values embed from the future (subsequent columns).
kwargs : additional keyword arguments
Additional arguments to pass to `np.pad`.
Returns
-------
data_history : np.ndarray [shape=(m * d, t)]
data augmented with lagged copies of itself,
where `m == n_steps - 1`.
Notes
-----
This function caches at level 40.
Examples
--------
Keep two steps (current and previous)
>>> data = np.arange(-3, 3)
>>> librosa.feature.stack_memory(data)
array([[-3, -2, -1, 0, 1, 2],
[ 0, -3, -2, -1, 0, 1]])
Or three steps
>>> librosa.feature.stack_memory(data, n_steps=3)
array([[-3, -2, -1, 0, 1, 2],
[ 0, -3, -2, -1, 0, 1],
[ 0, 0, -3, -2, -1, 0]])
Use reflection padding instead of zero-padding
>>> librosa.feature.stack_memory(data, n_steps=3, mode='reflect')
array([[-3, -2, -1, 0, 1, 2],
[-2, -3, -2, -1, 0, 1],
[-1, -2, -3, -2, -1, 0]])
Or pad with edge-values, and delay by 2
>>> librosa.feature.stack_memory(data, n_steps=3, delay=2, mode='edge')
array([[-3, -2, -1, 0, 1, 2],
[-3, -3, -3, -2, -1, 0],
[-3, -3, -3, -3, -3, -2]])
Stack time-lagged beat-synchronous chroma edge padding
>>> y, sr = librosa.load(librosa.util.example_audio_file())
>>> chroma = librosa.feature.chroma_stft(y=y, sr=sr)
>>> tempo, beats = librosa.beat.beat_track(y=y, sr=sr, hop_length=512)
>>> beats = librosa.util.fix_frames(beats, x_min=0, x_max=chroma.shape[1])
>>> chroma_sync = librosa.util.sync(chroma, beats)
>>> chroma_lag = librosa.feature.stack_memory(chroma_sync, n_steps=3,
... mode='edge')
Plot the result
>>> import matplotlib.pyplot as plt
>>> beat_times = librosa.frames_to_time(beats, sr=sr, hop_length=512)
>>> librosa.display.specshow(chroma_lag, y_axis='chroma', x_axis='time',
... x_coords=beat_times)
>>> plt.yticks([0, 12, 24], ['Lag=0', 'Lag=1', 'Lag=2'])
>>> plt.title('Time-lagged chroma')
>>> plt.colorbar()
>>> plt.tight_layout()
"""
if n_steps < 1:
raise ParameterError('n_steps must be a positive integer')
if delay == 0:
raise ParameterError('delay must be a non-zero integer')
data = np.atleast_2d(data)
t = data.shape[1]
kwargs.setdefault('mode', 'constant')
if kwargs['mode'] == 'constant':
kwargs.setdefault('constant_values', [0])
# Pad the end with zeros, which will roll to the front below
if delay > 0:
padding = (int((n_steps - 1) * delay), 0)
else:
padding = (0, int((n_steps - 1) * -delay))
data = np.pad(data, [(0, 0), padding], **kwargs)
history = data
for i in range(1, n_steps):
history = np.vstack([np.roll(data, -i * delay, axis=1), history])
# Trim to original width
if delay > 0:
history = history[:, :t]
else:
history = history[:, -t:]
# Make contiguous
return np.ascontiguousarray(history.T).T
|
[
"def",
"stack_memory",
"(",
"data",
",",
"n_steps",
"=",
"2",
",",
"delay",
"=",
"1",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"n_steps",
"<",
"1",
":",
"raise",
"ParameterError",
"(",
"'n_steps must be a positive integer'",
")",
"if",
"delay",
"==",
"0",
":",
"raise",
"ParameterError",
"(",
"'delay must be a non-zero integer'",
")",
"data",
"=",
"np",
".",
"atleast_2d",
"(",
"data",
")",
"t",
"=",
"data",
".",
"shape",
"[",
"1",
"]",
"kwargs",
".",
"setdefault",
"(",
"'mode'",
",",
"'constant'",
")",
"if",
"kwargs",
"[",
"'mode'",
"]",
"==",
"'constant'",
":",
"kwargs",
".",
"setdefault",
"(",
"'constant_values'",
",",
"[",
"0",
"]",
")",
"# Pad the end with zeros, which will roll to the front below",
"if",
"delay",
">",
"0",
":",
"padding",
"=",
"(",
"int",
"(",
"(",
"n_steps",
"-",
"1",
")",
"*",
"delay",
")",
",",
"0",
")",
"else",
":",
"padding",
"=",
"(",
"0",
",",
"int",
"(",
"(",
"n_steps",
"-",
"1",
")",
"*",
"-",
"delay",
")",
")",
"data",
"=",
"np",
".",
"pad",
"(",
"data",
",",
"[",
"(",
"0",
",",
"0",
")",
",",
"padding",
"]",
",",
"*",
"*",
"kwargs",
")",
"history",
"=",
"data",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"n_steps",
")",
":",
"history",
"=",
"np",
".",
"vstack",
"(",
"[",
"np",
".",
"roll",
"(",
"data",
",",
"-",
"i",
"*",
"delay",
",",
"axis",
"=",
"1",
")",
",",
"history",
"]",
")",
"# Trim to original width",
"if",
"delay",
">",
"0",
":",
"history",
"=",
"history",
"[",
":",
",",
":",
"t",
"]",
"else",
":",
"history",
"=",
"history",
"[",
":",
",",
"-",
"t",
":",
"]",
"# Make contiguous",
"return",
"np",
".",
"ascontiguousarray",
"(",
"history",
".",
"T",
")",
".",
"T"
] |
Short-term history embedding: vertically concatenate a data
vector or matrix with delayed copies of itself.
Each column `data[:, i]` is mapped to::
data[:, i] -> [data[:, i],
data[:, i - delay],
...
data[:, i - (n_steps-1)*delay]]
For columns `i < (n_steps - 1) * delay` , the data will be padded.
By default, the data is padded with zeros, but this behavior can be
overridden by supplying additional keyword arguments which are passed
to `np.pad()`.
Parameters
----------
data : np.ndarray [shape=(t,) or (d, t)]
Input data matrix. If `data` is a vector (`data.ndim == 1`),
it will be interpreted as a row matrix and reshaped to `(1, t)`.
n_steps : int > 0 [scalar]
embedding dimension, the number of steps back in time to stack
delay : int != 0 [scalar]
the number of columns to step.
Positive values embed from the past (previous columns).
Negative values embed from the future (subsequent columns).
kwargs : additional keyword arguments
Additional arguments to pass to `np.pad`.
Returns
-------
data_history : np.ndarray [shape=(m * d, t)]
data augmented with lagged copies of itself,
where `m == n_steps - 1`.
Notes
-----
This function caches at level 40.
Examples
--------
Keep two steps (current and previous)
>>> data = np.arange(-3, 3)
>>> librosa.feature.stack_memory(data)
array([[-3, -2, -1, 0, 1, 2],
[ 0, -3, -2, -1, 0, 1]])
Or three steps
>>> librosa.feature.stack_memory(data, n_steps=3)
array([[-3, -2, -1, 0, 1, 2],
[ 0, -3, -2, -1, 0, 1],
[ 0, 0, -3, -2, -1, 0]])
Use reflection padding instead of zero-padding
>>> librosa.feature.stack_memory(data, n_steps=3, mode='reflect')
array([[-3, -2, -1, 0, 1, 2],
[-2, -3, -2, -1, 0, 1],
[-1, -2, -3, -2, -1, 0]])
Or pad with edge-values, and delay by 2
>>> librosa.feature.stack_memory(data, n_steps=3, delay=2, mode='edge')
array([[-3, -2, -1, 0, 1, 2],
[-3, -3, -3, -2, -1, 0],
[-3, -3, -3, -3, -3, -2]])
Stack time-lagged beat-synchronous chroma edge padding
>>> y, sr = librosa.load(librosa.util.example_audio_file())
>>> chroma = librosa.feature.chroma_stft(y=y, sr=sr)
>>> tempo, beats = librosa.beat.beat_track(y=y, sr=sr, hop_length=512)
>>> beats = librosa.util.fix_frames(beats, x_min=0, x_max=chroma.shape[1])
>>> chroma_sync = librosa.util.sync(chroma, beats)
>>> chroma_lag = librosa.feature.stack_memory(chroma_sync, n_steps=3,
... mode='edge')
Plot the result
>>> import matplotlib.pyplot as plt
>>> beat_times = librosa.frames_to_time(beats, sr=sr, hop_length=512)
>>> librosa.display.specshow(chroma_lag, y_axis='chroma', x_axis='time',
... x_coords=beat_times)
>>> plt.yticks([0, 12, 24], ['Lag=0', 'Lag=1', 'Lag=2'])
>>> plt.title('Time-lagged chroma')
>>> plt.colorbar()
>>> plt.tight_layout()
|
[
"Short",
"-",
"term",
"history",
"embedding",
":",
"vertically",
"concatenate",
"a",
"data",
"vector",
"or",
"matrix",
"with",
"delayed",
"copies",
"of",
"itself",
"."
] |
python
|
test
|
jtwhite79/pyemu
|
pyemu/pst/pst_handler.py
|
https://github.com/jtwhite79/pyemu/blob/c504d8e7a4097cec07655a6318d275739bd8148a/pyemu/pst/pst_handler.py#L448-L462
|
def adj_par_names(self):
""" get the adjustable (not fixed or tied) parameter names
Returns
-------
adj_par_names : list
list of adjustable (not fixed or tied) parameter names
"""
adj_names = []
for t,n in zip(self.parameter_data.partrans,
self.parameter_data.parnme):
if t.lower() not in ["tied","fixed"]:
adj_names.append(n)
return adj_names
|
[
"def",
"adj_par_names",
"(",
"self",
")",
":",
"adj_names",
"=",
"[",
"]",
"for",
"t",
",",
"n",
"in",
"zip",
"(",
"self",
".",
"parameter_data",
".",
"partrans",
",",
"self",
".",
"parameter_data",
".",
"parnme",
")",
":",
"if",
"t",
".",
"lower",
"(",
")",
"not",
"in",
"[",
"\"tied\"",
",",
"\"fixed\"",
"]",
":",
"adj_names",
".",
"append",
"(",
"n",
")",
"return",
"adj_names"
] |
get the adjustable (not fixed or tied) parameter names
Returns
-------
adj_par_names : list
list of adjustable (not fixed or tied) parameter names
|
[
"get",
"the",
"adjustable",
"(",
"not",
"fixed",
"or",
"tied",
")",
"parameter",
"names"
] |
python
|
train
|
juju/python-libjuju
|
juju/client/_client5.py
|
https://github.com/juju/python-libjuju/blob/58f0011f4c57cd68830258952fa952eaadca6b38/juju/client/_client5.py#L3012-L3035
|
async def CreateModel(self, cloud_tag, config, credential, name, owner_tag, region):
'''
cloud_tag : str
config : typing.Mapping[str, typing.Any]
credential : str
name : str
owner_tag : str
region : str
Returns -> typing.Union[_ForwardRef('Number'), str, typing.Sequence[~ModelMachineInfo], _ForwardRef('ModelMigrationStatus'), _ForwardRef('ModelSLAInfo'), _ForwardRef('EntityStatus'), typing.Sequence[~ModelUserInfo]]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='CreateModel',
version=5,
params=_params)
_params['cloud-tag'] = cloud_tag
_params['config'] = config
_params['credential'] = credential
_params['name'] = name
_params['owner-tag'] = owner_tag
_params['region'] = region
reply = await self.rpc(msg)
return reply
|
[
"async",
"def",
"CreateModel",
"(",
"self",
",",
"cloud_tag",
",",
"config",
",",
"credential",
",",
"name",
",",
"owner_tag",
",",
"region",
")",
":",
"# map input types to rpc msg",
"_params",
"=",
"dict",
"(",
")",
"msg",
"=",
"dict",
"(",
"type",
"=",
"'ModelManager'",
",",
"request",
"=",
"'CreateModel'",
",",
"version",
"=",
"5",
",",
"params",
"=",
"_params",
")",
"_params",
"[",
"'cloud-tag'",
"]",
"=",
"cloud_tag",
"_params",
"[",
"'config'",
"]",
"=",
"config",
"_params",
"[",
"'credential'",
"]",
"=",
"credential",
"_params",
"[",
"'name'",
"]",
"=",
"name",
"_params",
"[",
"'owner-tag'",
"]",
"=",
"owner_tag",
"_params",
"[",
"'region'",
"]",
"=",
"region",
"reply",
"=",
"await",
"self",
".",
"rpc",
"(",
"msg",
")",
"return",
"reply"
] |
cloud_tag : str
config : typing.Mapping[str, typing.Any]
credential : str
name : str
owner_tag : str
region : str
Returns -> typing.Union[_ForwardRef('Number'), str, typing.Sequence[~ModelMachineInfo], _ForwardRef('ModelMigrationStatus'), _ForwardRef('ModelSLAInfo'), _ForwardRef('EntityStatus'), typing.Sequence[~ModelUserInfo]]
|
[
"cloud_tag",
":",
"str",
"config",
":",
"typing",
".",
"Mapping",
"[",
"str",
"typing",
".",
"Any",
"]",
"credential",
":",
"str",
"name",
":",
"str",
"owner_tag",
":",
"str",
"region",
":",
"str",
"Returns",
"-",
">",
"typing",
".",
"Union",
"[",
"_ForwardRef",
"(",
"Number",
")",
"str",
"typing",
".",
"Sequence",
"[",
"~ModelMachineInfo",
"]",
"_ForwardRef",
"(",
"ModelMigrationStatus",
")",
"_ForwardRef",
"(",
"ModelSLAInfo",
")",
"_ForwardRef",
"(",
"EntityStatus",
")",
"typing",
".",
"Sequence",
"[",
"~ModelUserInfo",
"]]"
] |
python
|
train
|
buildbot/buildbot_travis
|
buildbot_travis/steps/create_steps.py
|
https://github.com/buildbot/buildbot_travis/blob/350c657b7aabaf5bc6a9fdb55febdd9d8eabd60c/buildbot_travis/steps/create_steps.py#L84-L92
|
def setupEnvironment(self, cmd):
""" Turn all build properties into environment variables """
shell.ShellCommand.setupEnvironment(self, cmd)
env = {}
for k, v in self.build.getProperties().properties.items():
env[str(k)] = str(v[0])
if cmd.args['env'] is None:
cmd.args['env'] = {}
cmd.args['env'].update(env)
|
[
"def",
"setupEnvironment",
"(",
"self",
",",
"cmd",
")",
":",
"shell",
".",
"ShellCommand",
".",
"setupEnvironment",
"(",
"self",
",",
"cmd",
")",
"env",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"self",
".",
"build",
".",
"getProperties",
"(",
")",
".",
"properties",
".",
"items",
"(",
")",
":",
"env",
"[",
"str",
"(",
"k",
")",
"]",
"=",
"str",
"(",
"v",
"[",
"0",
"]",
")",
"if",
"cmd",
".",
"args",
"[",
"'env'",
"]",
"is",
"None",
":",
"cmd",
".",
"args",
"[",
"'env'",
"]",
"=",
"{",
"}",
"cmd",
".",
"args",
"[",
"'env'",
"]",
".",
"update",
"(",
"env",
")"
] |
Turn all build properties into environment variables
|
[
"Turn",
"all",
"build",
"properties",
"into",
"environment",
"variables"
] |
python
|
train
|
linkhub-sdk/popbill.py
|
popbill/taxinvoiceService.py
|
https://github.com/linkhub-sdk/popbill.py/blob/68a0dd7f7a937603318e93be321fde73c50b96cc/popbill/taxinvoiceService.py#L514-L533
|
def sendToNTS(self, CorpNum, MgtKeyType, MgtKey, UserID=None):
""" 국세청 즉시전송
args
CorpNum : 회원 사업자 번호
MgtKeyType : 관리번호 유형 one of ['SELL','BUY','TRUSTEE']
MgtKey : 파트너 관리번호
UserID : 팝빌 회원아이디
return
처리결과. consist of code and message
raise
PopbillException
"""
if MgtKeyType not in self.__MgtKeyTypes:
raise PopbillException(-99999999, "관리번호 형태가 올바르지 않습니다.")
if MgtKey == None or MgtKey == "":
raise PopbillException(-99999999, "관리번호가 입력되지 않았습니다.")
postData = ''
return self._httppost('/Taxinvoice/' + MgtKeyType + "/" + MgtKey, postData, CorpNum, UserID, "NTS")
|
[
"def",
"sendToNTS",
"(",
"self",
",",
"CorpNum",
",",
"MgtKeyType",
",",
"MgtKey",
",",
"UserID",
"=",
"None",
")",
":",
"if",
"MgtKeyType",
"not",
"in",
"self",
".",
"__MgtKeyTypes",
":",
"raise",
"PopbillException",
"(",
"-",
"99999999",
",",
"\"관리번호 형태가 올바르지 않습니다.\")",
"",
"if",
"MgtKey",
"==",
"None",
"or",
"MgtKey",
"==",
"\"\"",
":",
"raise",
"PopbillException",
"(",
"-",
"99999999",
",",
"\"관리번호가 입력되지 않았습니다.\")",
"",
"postData",
"=",
"''",
"return",
"self",
".",
"_httppost",
"(",
"'/Taxinvoice/'",
"+",
"MgtKeyType",
"+",
"\"/\"",
"+",
"MgtKey",
",",
"postData",
",",
"CorpNum",
",",
"UserID",
",",
"\"NTS\"",
")"
] |
국세청 즉시전송
args
CorpNum : 회원 사업자 번호
MgtKeyType : 관리번호 유형 one of ['SELL','BUY','TRUSTEE']
MgtKey : 파트너 관리번호
UserID : 팝빌 회원아이디
return
처리결과. consist of code and message
raise
PopbillException
|
[
"국세청",
"즉시전송",
"args",
"CorpNum",
":",
"회원",
"사업자",
"번호",
"MgtKeyType",
":",
"관리번호",
"유형",
"one",
"of",
"[",
"SELL",
"BUY",
"TRUSTEE",
"]",
"MgtKey",
":",
"파트너",
"관리번호",
"UserID",
":",
"팝빌",
"회원아이디",
"return",
"처리결과",
".",
"consist",
"of",
"code",
"and",
"message",
"raise",
"PopbillException"
] |
python
|
train
|
vatlab/SoS
|
src/sos/utils.py
|
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/utils.py#L598-L608
|
def dehtml(text):
'''Remove HTML tag in input text and format the texts
accordingly. '''
try:
parser = _DeHTMLParser()
parser.feed(text)
parser.close()
return parser.text()
except Exception as e:
env.logger.warning(f'Failed to dehtml text: {e}')
return text
|
[
"def",
"dehtml",
"(",
"text",
")",
":",
"try",
":",
"parser",
"=",
"_DeHTMLParser",
"(",
")",
"parser",
".",
"feed",
"(",
"text",
")",
"parser",
".",
"close",
"(",
")",
"return",
"parser",
".",
"text",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"env",
".",
"logger",
".",
"warning",
"(",
"f'Failed to dehtml text: {e}'",
")",
"return",
"text"
] |
Remove HTML tag in input text and format the texts
accordingly.
|
[
"Remove",
"HTML",
"tag",
"in",
"input",
"text",
"and",
"format",
"the",
"texts",
"accordingly",
"."
] |
python
|
train
|
tgalal/yowsup
|
yowsup/config/base/serialize.py
|
https://github.com/tgalal/yowsup/blob/b0739461ba962bf221fc76047d9d60d8ce61bc3e/yowsup/config/base/serialize.py#L6-L15
|
def serialize(self, config):
"""
:param config:
:type config: yowsup.config.base.config.Config
:return:
:rtype: bytes
"""
for transform in self._transforms:
config = transform.transform(config)
return config
|
[
"def",
"serialize",
"(",
"self",
",",
"config",
")",
":",
"for",
"transform",
"in",
"self",
".",
"_transforms",
":",
"config",
"=",
"transform",
".",
"transform",
"(",
"config",
")",
"return",
"config"
] |
:param config:
:type config: yowsup.config.base.config.Config
:return:
:rtype: bytes
|
[
":",
"param",
"config",
":",
":",
"type",
"config",
":",
"yowsup",
".",
"config",
".",
"base",
".",
"config",
".",
"Config",
":",
"return",
":",
":",
"rtype",
":",
"bytes"
] |
python
|
train
|
Esri/ArcREST
|
src/arcrest/common/general.py
|
https://github.com/Esri/ArcREST/blob/ab240fde2b0200f61d4a5f6df033516e53f2f416/src/arcrest/common/general.py#L609-L636
|
def fromJSON(jsonValue):
"""returns a featureset from a JSON string"""
jd = json.loads(jsonValue)
features = []
if 'fields' in jd:
fields = jd['fields']
else:
fields = {'fields':[]}
if 'features' in jd:
for feat in jd['features']:
wkid = None
spatialReference =None
if 'spatialReference' in jd:
spatialReference = jd['spatialReference']
if 'wkid' in jd['spatialReference']:
wkid = jd['spatialReference']['wkid']
elif 'latestWkid' in jd['spatialReference']: # kept for compatibility
wkid = jd['spatialReference']['latestWkid']
features.append(Feature(json_string=feat, wkid=wkid, spatialReference=spatialReference))
return FeatureSet(fields,
features,
hasZ=jd['hasZ'] if 'hasZ' in jd else False,
hasM=jd['hasM'] if 'hasM' in jd else False,
geometryType=jd['geometryType'] if 'geometryType' in jd else None,
objectIdFieldName=jd['objectIdFieldName'] if 'objectIdFieldName' in jd else None,
globalIdFieldName=jd['globalIdFieldName'] if 'globalIdFieldName' in jd else None,
displayFieldName=jd['displayFieldName'] if 'displayFieldName' in jd else None,
spatialReference=jd['spatialReference'] if 'spatialReference' in jd else None)
|
[
"def",
"fromJSON",
"(",
"jsonValue",
")",
":",
"jd",
"=",
"json",
".",
"loads",
"(",
"jsonValue",
")",
"features",
"=",
"[",
"]",
"if",
"'fields'",
"in",
"jd",
":",
"fields",
"=",
"jd",
"[",
"'fields'",
"]",
"else",
":",
"fields",
"=",
"{",
"'fields'",
":",
"[",
"]",
"}",
"if",
"'features'",
"in",
"jd",
":",
"for",
"feat",
"in",
"jd",
"[",
"'features'",
"]",
":",
"wkid",
"=",
"None",
"spatialReference",
"=",
"None",
"if",
"'spatialReference'",
"in",
"jd",
":",
"spatialReference",
"=",
"jd",
"[",
"'spatialReference'",
"]",
"if",
"'wkid'",
"in",
"jd",
"[",
"'spatialReference'",
"]",
":",
"wkid",
"=",
"jd",
"[",
"'spatialReference'",
"]",
"[",
"'wkid'",
"]",
"elif",
"'latestWkid'",
"in",
"jd",
"[",
"'spatialReference'",
"]",
":",
"# kept for compatibility",
"wkid",
"=",
"jd",
"[",
"'spatialReference'",
"]",
"[",
"'latestWkid'",
"]",
"features",
".",
"append",
"(",
"Feature",
"(",
"json_string",
"=",
"feat",
",",
"wkid",
"=",
"wkid",
",",
"spatialReference",
"=",
"spatialReference",
")",
")",
"return",
"FeatureSet",
"(",
"fields",
",",
"features",
",",
"hasZ",
"=",
"jd",
"[",
"'hasZ'",
"]",
"if",
"'hasZ'",
"in",
"jd",
"else",
"False",
",",
"hasM",
"=",
"jd",
"[",
"'hasM'",
"]",
"if",
"'hasM'",
"in",
"jd",
"else",
"False",
",",
"geometryType",
"=",
"jd",
"[",
"'geometryType'",
"]",
"if",
"'geometryType'",
"in",
"jd",
"else",
"None",
",",
"objectIdFieldName",
"=",
"jd",
"[",
"'objectIdFieldName'",
"]",
"if",
"'objectIdFieldName'",
"in",
"jd",
"else",
"None",
",",
"globalIdFieldName",
"=",
"jd",
"[",
"'globalIdFieldName'",
"]",
"if",
"'globalIdFieldName'",
"in",
"jd",
"else",
"None",
",",
"displayFieldName",
"=",
"jd",
"[",
"'displayFieldName'",
"]",
"if",
"'displayFieldName'",
"in",
"jd",
"else",
"None",
",",
"spatialReference",
"=",
"jd",
"[",
"'spatialReference'",
"]",
"if",
"'spatialReference'",
"in",
"jd",
"else",
"None",
")"
] |
returns a featureset from a JSON string
|
[
"returns",
"a",
"featureset",
"from",
"a",
"JSON",
"string"
] |
python
|
train
|
mitsei/dlkit
|
dlkit/handcar/relationship/managers.py
|
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/handcar/relationship/managers.py#L535-L567
|
def get_relationship_admin_session_for_family(self, family_id=None):
"""Gets the ``OsidSession`` associated with the relationship administration service for the given family.
arg: family_id (osid.id.Id): the ``Id`` of the ``Family``
return: (osid.relationship.RelationshipAdminSession) - a
``RelationshipAdminSession``
raise: NotFound - no family found by the given ``Id``
raise: NullArgument - ``family_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_relationship_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if ``supports_relationship_admin()``
and ``supports_visible_federation()`` are ``true``*
"""
if not family_id:
raise NullArgument
if not self.supports_relationship_admin():
raise Unimplemented()
##
# Need to include check to see if the familyId is found otherwise raise NotFound
##
try:
from . import sessions
except ImportError:
raise OperationFailed()
try:
session = sessions.RelationshipAdminSession(family_id,
proxy=self._proxy,
runtime=self._runtime)
except AttributeError:
raise OperationFailed()
return session
|
[
"def",
"get_relationship_admin_session_for_family",
"(",
"self",
",",
"family_id",
"=",
"None",
")",
":",
"if",
"not",
"family_id",
":",
"raise",
"NullArgument",
"if",
"not",
"self",
".",
"supports_relationship_admin",
"(",
")",
":",
"raise",
"Unimplemented",
"(",
")",
"##",
"# Need to include check to see if the familyId is found otherwise raise NotFound",
"##",
"try",
":",
"from",
".",
"import",
"sessions",
"except",
"ImportError",
":",
"raise",
"OperationFailed",
"(",
")",
"try",
":",
"session",
"=",
"sessions",
".",
"RelationshipAdminSession",
"(",
"family_id",
",",
"proxy",
"=",
"self",
".",
"_proxy",
",",
"runtime",
"=",
"self",
".",
"_runtime",
")",
"except",
"AttributeError",
":",
"raise",
"OperationFailed",
"(",
")",
"return",
"session"
] |
Gets the ``OsidSession`` associated with the relationship administration service for the given family.
arg: family_id (osid.id.Id): the ``Id`` of the ``Family``
return: (osid.relationship.RelationshipAdminSession) - a
``RelationshipAdminSession``
raise: NotFound - no family found by the given ``Id``
raise: NullArgument - ``family_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_relationship_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if ``supports_relationship_admin()``
and ``supports_visible_federation()`` are ``true``*
|
[
"Gets",
"the",
"OsidSession",
"associated",
"with",
"the",
"relationship",
"administration",
"service",
"for",
"the",
"given",
"family",
"."
] |
python
|
train
|
klen/peewee_migrate
|
peewee_migrate/router.py
|
https://github.com/klen/peewee_migrate/blob/b77895ab1c9be3121bc127e0c2dfb047eed8b24c/peewee_migrate/router.py#L134-L168
|
def run_one(self, name, migrator, fake=True, downgrade=False, force=False):
"""Run/emulate a migration with given name."""
try:
migrate, rollback = self.read(name)
if fake:
with mock.patch('peewee.Model.select'):
with mock.patch('peewee.Query._execute'):
migrate(migrator, self.database, fake=fake)
if force:
self.model.create(name=name)
self.logger.info('Done %s', name)
migrator.clean()
return migrator
with self.database.transaction():
if not downgrade:
self.logger.info('Migrate "%s"', name)
migrate(migrator, self.database, fake=fake)
migrator.run()
self.model.create(name=name)
else:
self.logger.info('Rolling back %s', name)
rollback(migrator, self.database, fake=fake)
migrator.run()
self.model.delete().where(self.model.name == name).execute()
self.logger.info('Done %s', name)
except Exception:
self.database.rollback()
operation = 'Migration' if not downgrade else 'Rollback'
self.logger.exception('%s failed: %s', operation, name)
raise
|
[
"def",
"run_one",
"(",
"self",
",",
"name",
",",
"migrator",
",",
"fake",
"=",
"True",
",",
"downgrade",
"=",
"False",
",",
"force",
"=",
"False",
")",
":",
"try",
":",
"migrate",
",",
"rollback",
"=",
"self",
".",
"read",
"(",
"name",
")",
"if",
"fake",
":",
"with",
"mock",
".",
"patch",
"(",
"'peewee.Model.select'",
")",
":",
"with",
"mock",
".",
"patch",
"(",
"'peewee.Query._execute'",
")",
":",
"migrate",
"(",
"migrator",
",",
"self",
".",
"database",
",",
"fake",
"=",
"fake",
")",
"if",
"force",
":",
"self",
".",
"model",
".",
"create",
"(",
"name",
"=",
"name",
")",
"self",
".",
"logger",
".",
"info",
"(",
"'Done %s'",
",",
"name",
")",
"migrator",
".",
"clean",
"(",
")",
"return",
"migrator",
"with",
"self",
".",
"database",
".",
"transaction",
"(",
")",
":",
"if",
"not",
"downgrade",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'Migrate \"%s\"'",
",",
"name",
")",
"migrate",
"(",
"migrator",
",",
"self",
".",
"database",
",",
"fake",
"=",
"fake",
")",
"migrator",
".",
"run",
"(",
")",
"self",
".",
"model",
".",
"create",
"(",
"name",
"=",
"name",
")",
"else",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'Rolling back %s'",
",",
"name",
")",
"rollback",
"(",
"migrator",
",",
"self",
".",
"database",
",",
"fake",
"=",
"fake",
")",
"migrator",
".",
"run",
"(",
")",
"self",
".",
"model",
".",
"delete",
"(",
")",
".",
"where",
"(",
"self",
".",
"model",
".",
"name",
"==",
"name",
")",
".",
"execute",
"(",
")",
"self",
".",
"logger",
".",
"info",
"(",
"'Done %s'",
",",
"name",
")",
"except",
"Exception",
":",
"self",
".",
"database",
".",
"rollback",
"(",
")",
"operation",
"=",
"'Migration'",
"if",
"not",
"downgrade",
"else",
"'Rollback'",
"self",
".",
"logger",
".",
"exception",
"(",
"'%s failed: %s'",
",",
"operation",
",",
"name",
")",
"raise"
] |
Run/emulate a migration with given name.
|
[
"Run",
"/",
"emulate",
"a",
"migration",
"with",
"given",
"name",
"."
] |
python
|
train
|
DataONEorg/d1_python
|
lib_client/src/d1_client/cnclient.py
|
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_client/src/d1_client/cnclient.py#L100-L111
|
def getFormat(self, formatId, vendorSpecific=None):
"""See Also: getFormatResponse()
Args:
formatId:
vendorSpecific:
Returns:
"""
response = self.getFormatResponse(formatId, vendorSpecific)
return self._read_dataone_type_response(response, 'ObjectFormat')
|
[
"def",
"getFormat",
"(",
"self",
",",
"formatId",
",",
"vendorSpecific",
"=",
"None",
")",
":",
"response",
"=",
"self",
".",
"getFormatResponse",
"(",
"formatId",
",",
"vendorSpecific",
")",
"return",
"self",
".",
"_read_dataone_type_response",
"(",
"response",
",",
"'ObjectFormat'",
")"
] |
See Also: getFormatResponse()
Args:
formatId:
vendorSpecific:
Returns:
|
[
"See",
"Also",
":",
"getFormatResponse",
"()"
] |
python
|
train
|
gem/oq-engine
|
openquake/hmtk/plotting/seismicity/catalogue_plots.py
|
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L261-L342
|
def plot_magnitude_time_density(
catalogue, mag_int, time_int, completeness=None,
normalisation=False, logscale=True, bootstrap=None, xlim=[], ylim=[],
filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None):
"""
Creates a plot of magnitude-time density
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param float mag_int:
Width of the histogram for the magnitude bins
:param float time_int:
Width of the histogram for the time bin (in decimal years)
:param bool normalisation:
Normalise the histogram to give output as PMF (True) or count (False)
:param int bootstrap:
To sample magnitude and depth uncertainties choose number of samples
"""
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
# Create the magnitude bins
if isinstance(mag_int, (np.ndarray, list)):
mag_bins = mag_int
else:
mag_bins = np.arange(
np.min(catalogue.data['magnitude']),
np.max(catalogue.data['magnitude']) + mag_int / 2.,
mag_int)
# Creates the time bins
if isinstance(time_int, (np.ndarray, list)):
time_bins = time_int
else:
time_bins = np.arange(
float(np.min(catalogue.data['year'])),
float(np.max(catalogue.data['year'])) + 1.,
float(time_int))
# Get magnitude-time distribution
mag_time_dist = catalogue.get_magnitude_time_distribution(
mag_bins,
time_bins,
normalisation,
bootstrap)
# Get smallest non-zero value
vmin_val = np.min(mag_time_dist[mag_time_dist > 0.])
# Create plot
if logscale:
norm_data = LogNorm(vmin=vmin_val, vmax=np.max(mag_time_dist))
else:
if normalisation:
norm_data = Normalize(vmin=vmin_val, vmax=np.max(mag_time_dist))
else:
norm_data = Normalize(vmin=1.0, vmax=np.max(mag_time_dist))
im = ax.pcolor(time_bins[:-1],
mag_bins[:-1],
mag_time_dist.T,
norm=norm_data)
ax.set_xlabel('Time (year)')
ax.set_ylabel('Magnitude')
if len(xlim) == 2:
ax.set_xlim(xlim[0], xlim[1])
else:
ax.set_xlim(time_bins[0], time_bins[-1])
if len(ylim) == 2:
ax.set_ylim(ylim[0], ylim[1])
else:
ax.set_ylim(mag_bins[0], mag_bins[-1] + (mag_bins[-1] - mag_bins[-2]))
# Fix the title
if normalisation:
fig.colorbar(im, label='Event Density', shrink=0.9, ax=ax)
else:
fig.colorbar(im, label='Event Count', shrink=0.9, ax=ax)
ax.grid(True)
# Plot completeness
if completeness is not None:
_plot_completeness(ax, completeness, time_bins[0], time_bins[-1])
_save_image(fig, filename, filetype, dpi)
|
[
"def",
"plot_magnitude_time_density",
"(",
"catalogue",
",",
"mag_int",
",",
"time_int",
",",
"completeness",
"=",
"None",
",",
"normalisation",
"=",
"False",
",",
"logscale",
"=",
"True",
",",
"bootstrap",
"=",
"None",
",",
"xlim",
"=",
"[",
"]",
",",
"ylim",
"=",
"[",
"]",
",",
"filename",
"=",
"None",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"if",
"ax",
"is",
"None",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"figure_size",
")",
"else",
":",
"fig",
"=",
"ax",
".",
"get_figure",
"(",
")",
"# Create the magnitude bins",
"if",
"isinstance",
"(",
"mag_int",
",",
"(",
"np",
".",
"ndarray",
",",
"list",
")",
")",
":",
"mag_bins",
"=",
"mag_int",
"else",
":",
"mag_bins",
"=",
"np",
".",
"arange",
"(",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
")",
",",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'magnitude'",
"]",
")",
"+",
"mag_int",
"/",
"2.",
",",
"mag_int",
")",
"# Creates the time bins",
"if",
"isinstance",
"(",
"time_int",
",",
"(",
"np",
".",
"ndarray",
",",
"list",
")",
")",
":",
"time_bins",
"=",
"time_int",
"else",
":",
"time_bins",
"=",
"np",
".",
"arange",
"(",
"float",
"(",
"np",
".",
"min",
"(",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
")",
")",
",",
"float",
"(",
"np",
".",
"max",
"(",
"catalogue",
".",
"data",
"[",
"'year'",
"]",
")",
")",
"+",
"1.",
",",
"float",
"(",
"time_int",
")",
")",
"# Get magnitude-time distribution",
"mag_time_dist",
"=",
"catalogue",
".",
"get_magnitude_time_distribution",
"(",
"mag_bins",
",",
"time_bins",
",",
"normalisation",
",",
"bootstrap",
")",
"# Get smallest non-zero value",
"vmin_val",
"=",
"np",
".",
"min",
"(",
"mag_time_dist",
"[",
"mag_time_dist",
">",
"0.",
"]",
")",
"# Create plot",
"if",
"logscale",
":",
"norm_data",
"=",
"LogNorm",
"(",
"vmin",
"=",
"vmin_val",
",",
"vmax",
"=",
"np",
".",
"max",
"(",
"mag_time_dist",
")",
")",
"else",
":",
"if",
"normalisation",
":",
"norm_data",
"=",
"Normalize",
"(",
"vmin",
"=",
"vmin_val",
",",
"vmax",
"=",
"np",
".",
"max",
"(",
"mag_time_dist",
")",
")",
"else",
":",
"norm_data",
"=",
"Normalize",
"(",
"vmin",
"=",
"1.0",
",",
"vmax",
"=",
"np",
".",
"max",
"(",
"mag_time_dist",
")",
")",
"im",
"=",
"ax",
".",
"pcolor",
"(",
"time_bins",
"[",
":",
"-",
"1",
"]",
",",
"mag_bins",
"[",
":",
"-",
"1",
"]",
",",
"mag_time_dist",
".",
"T",
",",
"norm",
"=",
"norm_data",
")",
"ax",
".",
"set_xlabel",
"(",
"'Time (year)'",
")",
"ax",
".",
"set_ylabel",
"(",
"'Magnitude'",
")",
"if",
"len",
"(",
"xlim",
")",
"==",
"2",
":",
"ax",
".",
"set_xlim",
"(",
"xlim",
"[",
"0",
"]",
",",
"xlim",
"[",
"1",
"]",
")",
"else",
":",
"ax",
".",
"set_xlim",
"(",
"time_bins",
"[",
"0",
"]",
",",
"time_bins",
"[",
"-",
"1",
"]",
")",
"if",
"len",
"(",
"ylim",
")",
"==",
"2",
":",
"ax",
".",
"set_ylim",
"(",
"ylim",
"[",
"0",
"]",
",",
"ylim",
"[",
"1",
"]",
")",
"else",
":",
"ax",
".",
"set_ylim",
"(",
"mag_bins",
"[",
"0",
"]",
",",
"mag_bins",
"[",
"-",
"1",
"]",
"+",
"(",
"mag_bins",
"[",
"-",
"1",
"]",
"-",
"mag_bins",
"[",
"-",
"2",
"]",
")",
")",
"# Fix the title",
"if",
"normalisation",
":",
"fig",
".",
"colorbar",
"(",
"im",
",",
"label",
"=",
"'Event Density'",
",",
"shrink",
"=",
"0.9",
",",
"ax",
"=",
"ax",
")",
"else",
":",
"fig",
".",
"colorbar",
"(",
"im",
",",
"label",
"=",
"'Event Count'",
",",
"shrink",
"=",
"0.9",
",",
"ax",
"=",
"ax",
")",
"ax",
".",
"grid",
"(",
"True",
")",
"# Plot completeness",
"if",
"completeness",
"is",
"not",
"None",
":",
"_plot_completeness",
"(",
"ax",
",",
"completeness",
",",
"time_bins",
"[",
"0",
"]",
",",
"time_bins",
"[",
"-",
"1",
"]",
")",
"_save_image",
"(",
"fig",
",",
"filename",
",",
"filetype",
",",
"dpi",
")"
] |
Creates a plot of magnitude-time density
:param catalogue:
Earthquake catalogue as instance of :class:
openquake.hmtk.seismicity.catalogue.Catalogue
:param float mag_int:
Width of the histogram for the magnitude bins
:param float time_int:
Width of the histogram for the time bin (in decimal years)
:param bool normalisation:
Normalise the histogram to give output as PMF (True) or count (False)
:param int bootstrap:
To sample magnitude and depth uncertainties choose number of samples
|
[
"Creates",
"a",
"plot",
"of",
"magnitude",
"-",
"time",
"density"
] |
python
|
train
|
ceph/ceph-deploy
|
ceph_deploy/config.py
|
https://github.com/ceph/ceph-deploy/blob/86943fcc454cd4c99a86e3493e9e93a59c661fef/ceph_deploy/config.py#L81-L111
|
def make(parser):
"""
Copy ceph.conf to/from remote host(s)
"""
config_parser = parser.add_subparsers(dest='subcommand')
config_parser.required = True
config_push = config_parser.add_parser(
'push',
help='push Ceph config file to one or more remote hosts'
)
config_push.add_argument(
'client',
metavar='HOST',
nargs='+',
help='host(s) to push the config file to',
)
config_pull = config_parser.add_parser(
'pull',
help='pull Ceph config file from one or more remote hosts'
)
config_pull.add_argument(
'client',
metavar='HOST',
nargs='+',
help='host(s) to pull the config file from',
)
parser.set_defaults(
func=config,
)
|
[
"def",
"make",
"(",
"parser",
")",
":",
"config_parser",
"=",
"parser",
".",
"add_subparsers",
"(",
"dest",
"=",
"'subcommand'",
")",
"config_parser",
".",
"required",
"=",
"True",
"config_push",
"=",
"config_parser",
".",
"add_parser",
"(",
"'push'",
",",
"help",
"=",
"'push Ceph config file to one or more remote hosts'",
")",
"config_push",
".",
"add_argument",
"(",
"'client'",
",",
"metavar",
"=",
"'HOST'",
",",
"nargs",
"=",
"'+'",
",",
"help",
"=",
"'host(s) to push the config file to'",
",",
")",
"config_pull",
"=",
"config_parser",
".",
"add_parser",
"(",
"'pull'",
",",
"help",
"=",
"'pull Ceph config file from one or more remote hosts'",
")",
"config_pull",
".",
"add_argument",
"(",
"'client'",
",",
"metavar",
"=",
"'HOST'",
",",
"nargs",
"=",
"'+'",
",",
"help",
"=",
"'host(s) to pull the config file from'",
",",
")",
"parser",
".",
"set_defaults",
"(",
"func",
"=",
"config",
",",
")"
] |
Copy ceph.conf to/from remote host(s)
|
[
"Copy",
"ceph",
".",
"conf",
"to",
"/",
"from",
"remote",
"host",
"(",
"s",
")"
] |
python
|
train
|
rsgalloway/grit
|
grit/server/git_http_backend.py
|
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/server/git_http_backend.py#L162-L212
|
def add(self, path, default_handler = None, **http_methods):
"""
Add a selector mapping.
add(path, default_handler, **named_handlers)
Adding order is important. Firt added = first matched.
If you want to hand special case URI handled by one app and shorter
version of the same regex string by anoter app,
.add() special case first.
Inputs:
path - A regex string. We will compile it.
Highly recommend using grouping of type: "(?P<groupname>.+)"
These will be exposed to WSGI app through environment key
per http://www.wsgi.org/wsgi/Specifications/routing_args
default_handler - (optional) A pointer to the function / iterable
class instance that will handle ALL HTTP methods (verbs)
**named_handlers - (optional) An unlimited list of named args or
an unpacked dict of handlers allocated to handle specific HTTP
methods (HTTP verbs). See "Examples" below.
Matched named method handlers override default handler.
If neither default_handler nor named_handlers point to any methods,
"Method not implemented" is returned for the requests on this URI.
Examples:
selectorInstance.add('^(?P<working_path>.*)$',generic_handler,
POST=post_handler, HEAD=head_handler)
custom_assembled_dict = {'GET':wsgi_app_a,'POST':wsgi_app_b}:
## note the unpacking - "**" - of the dict in this case.
selectorInstance.add('^(?P<working_path>.*)$', **custom_assembled_dict)
If the string contains '\?' (escaped ?, which translates to '?' in
non-regex strings) we understand that as "do regex matching on
QUERY_PATH + '?' + QUERY_STRING"
When lookup matches are met, results are injected into
environ['wsgiorg.routing_args'] per
http://www.wsgi.org/wsgi/Specifications/routing_args
"""
if default_handler:
methods = defaultdict(lambda: default_handler, http_methods.copy())
else:
methods = http_methods.copy()
self.mappings.append((re.compile(path.decode('utf8')), methods, (path.find(r'\?')>-1) ))
|
[
"def",
"add",
"(",
"self",
",",
"path",
",",
"default_handler",
"=",
"None",
",",
"*",
"*",
"http_methods",
")",
":",
"if",
"default_handler",
":",
"methods",
"=",
"defaultdict",
"(",
"lambda",
":",
"default_handler",
",",
"http_methods",
".",
"copy",
"(",
")",
")",
"else",
":",
"methods",
"=",
"http_methods",
".",
"copy",
"(",
")",
"self",
".",
"mappings",
".",
"append",
"(",
"(",
"re",
".",
"compile",
"(",
"path",
".",
"decode",
"(",
"'utf8'",
")",
")",
",",
"methods",
",",
"(",
"path",
".",
"find",
"(",
"r'\\?'",
")",
">",
"-",
"1",
")",
")",
")"
] |
Add a selector mapping.
add(path, default_handler, **named_handlers)
Adding order is important. Firt added = first matched.
If you want to hand special case URI handled by one app and shorter
version of the same regex string by anoter app,
.add() special case first.
Inputs:
path - A regex string. We will compile it.
Highly recommend using grouping of type: "(?P<groupname>.+)"
These will be exposed to WSGI app through environment key
per http://www.wsgi.org/wsgi/Specifications/routing_args
default_handler - (optional) A pointer to the function / iterable
class instance that will handle ALL HTTP methods (verbs)
**named_handlers - (optional) An unlimited list of named args or
an unpacked dict of handlers allocated to handle specific HTTP
methods (HTTP verbs). See "Examples" below.
Matched named method handlers override default handler.
If neither default_handler nor named_handlers point to any methods,
"Method not implemented" is returned for the requests on this URI.
Examples:
selectorInstance.add('^(?P<working_path>.*)$',generic_handler,
POST=post_handler, HEAD=head_handler)
custom_assembled_dict = {'GET':wsgi_app_a,'POST':wsgi_app_b}:
## note the unpacking - "**" - of the dict in this case.
selectorInstance.add('^(?P<working_path>.*)$', **custom_assembled_dict)
If the string contains '\?' (escaped ?, which translates to '?' in
non-regex strings) we understand that as "do regex matching on
QUERY_PATH + '?' + QUERY_STRING"
When lookup matches are met, results are injected into
environ['wsgiorg.routing_args'] per
http://www.wsgi.org/wsgi/Specifications/routing_args
|
[
"Add",
"a",
"selector",
"mapping",
".",
"add",
"(",
"path",
"default_handler",
"**",
"named_handlers",
")",
"Adding",
"order",
"is",
"important",
".",
"Firt",
"added",
"=",
"first",
"matched",
".",
"If",
"you",
"want",
"to",
"hand",
"special",
"case",
"URI",
"handled",
"by",
"one",
"app",
"and",
"shorter",
"version",
"of",
"the",
"same",
"regex",
"string",
"by",
"anoter",
"app",
".",
"add",
"()",
"special",
"case",
"first",
".",
"Inputs",
":",
"path",
"-",
"A",
"regex",
"string",
".",
"We",
"will",
"compile",
"it",
".",
"Highly",
"recommend",
"using",
"grouping",
"of",
"type",
":",
"(",
"?P<groupname",
">",
".",
"+",
")",
"These",
"will",
"be",
"exposed",
"to",
"WSGI",
"app",
"through",
"environment",
"key",
"per",
"http",
":",
"//",
"www",
".",
"wsgi",
".",
"org",
"/",
"wsgi",
"/",
"Specifications",
"/",
"routing_args",
"default_handler",
"-",
"(",
"optional",
")",
"A",
"pointer",
"to",
"the",
"function",
"/",
"iterable",
"class",
"instance",
"that",
"will",
"handle",
"ALL",
"HTTP",
"methods",
"(",
"verbs",
")",
"**",
"named_handlers",
"-",
"(",
"optional",
")",
"An",
"unlimited",
"list",
"of",
"named",
"args",
"or",
"an",
"unpacked",
"dict",
"of",
"handlers",
"allocated",
"to",
"handle",
"specific",
"HTTP",
"methods",
"(",
"HTTP",
"verbs",
")",
".",
"See",
"Examples",
"below",
".",
"Matched",
"named",
"method",
"handlers",
"override",
"default",
"handler",
".",
"If",
"neither",
"default_handler",
"nor",
"named_handlers",
"point",
"to",
"any",
"methods",
"Method",
"not",
"implemented",
"is",
"returned",
"for",
"the",
"requests",
"on",
"this",
"URI",
".",
"Examples",
":",
"selectorInstance",
".",
"add",
"(",
"^",
"(",
"?P<working_path",
">",
".",
"*",
")",
"$",
"generic_handler",
"POST",
"=",
"post_handler",
"HEAD",
"=",
"head_handler",
")",
"custom_assembled_dict",
"=",
"{",
"GET",
":",
"wsgi_app_a",
"POST",
":",
"wsgi_app_b",
"}",
":",
"##",
"note",
"the",
"unpacking",
"-",
"**",
"-",
"of",
"the",
"dict",
"in",
"this",
"case",
".",
"selectorInstance",
".",
"add",
"(",
"^",
"(",
"?P<working_path",
">",
".",
"*",
")",
"$",
"**",
"custom_assembled_dict",
")",
"If",
"the",
"string",
"contains",
"\\",
"?",
"(",
"escaped",
"?",
"which",
"translates",
"to",
"?",
"in",
"non",
"-",
"regex",
"strings",
")",
"we",
"understand",
"that",
"as",
"do",
"regex",
"matching",
"on",
"QUERY_PATH",
"+",
"?",
"+",
"QUERY_STRING",
"When",
"lookup",
"matches",
"are",
"met",
"results",
"are",
"injected",
"into",
"environ",
"[",
"wsgiorg",
".",
"routing_args",
"]",
"per",
"http",
":",
"//",
"www",
".",
"wsgi",
".",
"org",
"/",
"wsgi",
"/",
"Specifications",
"/",
"routing_args"
] |
python
|
train
|
numenta/nupic
|
src/nupic/swarming/dummy_model_runner.py
|
https://github.com/numenta/nupic/blob/5922fafffdccc8812e72b3324965ad2f7d4bbdad/src/nupic/swarming/dummy_model_runner.py#L339-L349
|
def _loadDummyModelParameters(self, params):
""" Loads all the parameters for this dummy model. For any paramters
specified as lists, read the appropriate value for this model using the model
index """
for key, value in params.iteritems():
if type(value) == list:
index = self.modelIndex % len(params[key])
self._params[key] = params[key][index]
else:
self._params[key] = params[key]
|
[
"def",
"_loadDummyModelParameters",
"(",
"self",
",",
"params",
")",
":",
"for",
"key",
",",
"value",
"in",
"params",
".",
"iteritems",
"(",
")",
":",
"if",
"type",
"(",
"value",
")",
"==",
"list",
":",
"index",
"=",
"self",
".",
"modelIndex",
"%",
"len",
"(",
"params",
"[",
"key",
"]",
")",
"self",
".",
"_params",
"[",
"key",
"]",
"=",
"params",
"[",
"key",
"]",
"[",
"index",
"]",
"else",
":",
"self",
".",
"_params",
"[",
"key",
"]",
"=",
"params",
"[",
"key",
"]"
] |
Loads all the parameters for this dummy model. For any paramters
specified as lists, read the appropriate value for this model using the model
index
|
[
"Loads",
"all",
"the",
"parameters",
"for",
"this",
"dummy",
"model",
".",
"For",
"any",
"paramters",
"specified",
"as",
"lists",
"read",
"the",
"appropriate",
"value",
"for",
"this",
"model",
"using",
"the",
"model",
"index"
] |
python
|
valid
|
klen/makesite
|
makesite/core.py
|
https://github.com/klen/makesite/blob/f6f77a43a04a256189e8fffbeac1ffd63f35a10c/makesite/core.py#L109-L114
|
def get_base_modules():
" Get list of installed modules. "
return sorted(filter(
lambda x: op.isdir(op.join(MOD_DIR, x)),
listdir(MOD_DIR)))
|
[
"def",
"get_base_modules",
"(",
")",
":",
"return",
"sorted",
"(",
"filter",
"(",
"lambda",
"x",
":",
"op",
".",
"isdir",
"(",
"op",
".",
"join",
"(",
"MOD_DIR",
",",
"x",
")",
")",
",",
"listdir",
"(",
"MOD_DIR",
")",
")",
")"
] |
Get list of installed modules.
|
[
"Get",
"list",
"of",
"installed",
"modules",
"."
] |
python
|
train
|
vicalloy/lbutils
|
lbutils/qs.py
|
https://github.com/vicalloy/lbutils/blob/66ae7e73bc939f073cdc1b91602a95e67caf4ba6/lbutils/qs.py#L52-L72
|
def do_filter(qs, qdata, quick_query_fields=[], int_quick_query_fields=[]):
"""
auto filter queryset by dict.
qs: queryset need to filter.
qdata:
quick_query_fields:
int_quick_query_fields:
"""
try:
qs = qs.filter(
__gen_quick_query_params(
qdata.get('q_quick_search_kw'), quick_query_fields,
int_quick_query_fields)
)
q, kw_query_params = __gen_query_params(qdata)
qs = qs.filter(q, **kw_query_params)
except:
import traceback
traceback.print_exc()
return qs
|
[
"def",
"do_filter",
"(",
"qs",
",",
"qdata",
",",
"quick_query_fields",
"=",
"[",
"]",
",",
"int_quick_query_fields",
"=",
"[",
"]",
")",
":",
"try",
":",
"qs",
"=",
"qs",
".",
"filter",
"(",
"__gen_quick_query_params",
"(",
"qdata",
".",
"get",
"(",
"'q_quick_search_kw'",
")",
",",
"quick_query_fields",
",",
"int_quick_query_fields",
")",
")",
"q",
",",
"kw_query_params",
"=",
"__gen_query_params",
"(",
"qdata",
")",
"qs",
"=",
"qs",
".",
"filter",
"(",
"q",
",",
"*",
"*",
"kw_query_params",
")",
"except",
":",
"import",
"traceback",
"traceback",
".",
"print_exc",
"(",
")",
"return",
"qs"
] |
auto filter queryset by dict.
qs: queryset need to filter.
qdata:
quick_query_fields:
int_quick_query_fields:
|
[
"auto",
"filter",
"queryset",
"by",
"dict",
"."
] |
python
|
train
|
GeorgeArgyros/symautomata
|
symautomata/pdastring.py
|
https://github.com/GeorgeArgyros/symautomata/blob/f5d66533573b27e155bec3f36b8c00b8e3937cb3/symautomata/pdastring.py#L437-L501
|
def _stage(self, accepted, count=0):
"""This is a repeated state in the state removal algorithm"""
new5 = self._combine_rest_push()
new1 = self._combine_push_pop()
new2 = self._combine_push_rest()
new3 = self._combine_pop_rest()
new4 = self._combine_rest_rest()
new = new1 + new2 + new3 + new4 + new5
del new1
del new2
del new3
del new4
del new5
if len(new) == 0:
# self.printer()
# print 'PDA is empty'
# logging.debug('PDA is empty')
return None
self.statediag = self.statediag + new
del new
# print 'cleaning...'
# It is cheaper to create a new array than to use the old one and
# delete a key
newstates = []
for key in self.statediag:
if len(key.trans) == 0 or key.trans == {}:
# rint 'delete '+`key.id`
# self.statediag.remove(key)
pass
else:
newstates.append(key)
del self.statediag
self.statediag = newstates
self.quickresponse = {}
self.quickresponse_types = {}
self.quickresponse_types[0] = []
self.quickresponse_types[1] = []
self.quickresponse_types[2] = []
self.quickresponse_types[3] = []
self.quickresponse_types[4] = []
for state in self.statediag:
if state.id not in self.quickresponse:
self.quickresponse[state.id] = [state]
else:
self.quickresponse[state.id].append(state)
self.quickresponse_types[state.type].append(state)
# else:
# print `key.id`+' (type: '+`key.type`+' and sym:'+`key.sym`+')'
# print key.trans
# print 'checking...'
exists = self._check(accepted)
if exists == -1:
# DEBUGself.printer()
# raw_input('next step?')
return self._stage(accepted, count + 1)
else:
# DEBUGself.printer()
# print 'Found '
print exists
# return self._stage(accepted, count+1)
return exists
|
[
"def",
"_stage",
"(",
"self",
",",
"accepted",
",",
"count",
"=",
"0",
")",
":",
"new5",
"=",
"self",
".",
"_combine_rest_push",
"(",
")",
"new1",
"=",
"self",
".",
"_combine_push_pop",
"(",
")",
"new2",
"=",
"self",
".",
"_combine_push_rest",
"(",
")",
"new3",
"=",
"self",
".",
"_combine_pop_rest",
"(",
")",
"new4",
"=",
"self",
".",
"_combine_rest_rest",
"(",
")",
"new",
"=",
"new1",
"+",
"new2",
"+",
"new3",
"+",
"new4",
"+",
"new5",
"del",
"new1",
"del",
"new2",
"del",
"new3",
"del",
"new4",
"del",
"new5",
"if",
"len",
"(",
"new",
")",
"==",
"0",
":",
"# self.printer()",
"# print 'PDA is empty'",
"# logging.debug('PDA is empty')",
"return",
"None",
"self",
".",
"statediag",
"=",
"self",
".",
"statediag",
"+",
"new",
"del",
"new",
"# print 'cleaning...'",
"# It is cheaper to create a new array than to use the old one and",
"# delete a key",
"newstates",
"=",
"[",
"]",
"for",
"key",
"in",
"self",
".",
"statediag",
":",
"if",
"len",
"(",
"key",
".",
"trans",
")",
"==",
"0",
"or",
"key",
".",
"trans",
"==",
"{",
"}",
":",
"# rint 'delete '+`key.id`",
"# self.statediag.remove(key)",
"pass",
"else",
":",
"newstates",
".",
"append",
"(",
"key",
")",
"del",
"self",
".",
"statediag",
"self",
".",
"statediag",
"=",
"newstates",
"self",
".",
"quickresponse",
"=",
"{",
"}",
"self",
".",
"quickresponse_types",
"=",
"{",
"}",
"self",
".",
"quickresponse_types",
"[",
"0",
"]",
"=",
"[",
"]",
"self",
".",
"quickresponse_types",
"[",
"1",
"]",
"=",
"[",
"]",
"self",
".",
"quickresponse_types",
"[",
"2",
"]",
"=",
"[",
"]",
"self",
".",
"quickresponse_types",
"[",
"3",
"]",
"=",
"[",
"]",
"self",
".",
"quickresponse_types",
"[",
"4",
"]",
"=",
"[",
"]",
"for",
"state",
"in",
"self",
".",
"statediag",
":",
"if",
"state",
".",
"id",
"not",
"in",
"self",
".",
"quickresponse",
":",
"self",
".",
"quickresponse",
"[",
"state",
".",
"id",
"]",
"=",
"[",
"state",
"]",
"else",
":",
"self",
".",
"quickresponse",
"[",
"state",
".",
"id",
"]",
".",
"append",
"(",
"state",
")",
"self",
".",
"quickresponse_types",
"[",
"state",
".",
"type",
"]",
".",
"append",
"(",
"state",
")",
"# else:",
"# print `key.id`+' (type: '+`key.type`+' and sym:'+`key.sym`+')'",
"# print key.trans",
"# print 'checking...'",
"exists",
"=",
"self",
".",
"_check",
"(",
"accepted",
")",
"if",
"exists",
"==",
"-",
"1",
":",
"# DEBUGself.printer()",
"# raw_input('next step?')",
"return",
"self",
".",
"_stage",
"(",
"accepted",
",",
"count",
"+",
"1",
")",
"else",
":",
"# DEBUGself.printer()",
"# print 'Found '",
"print",
"exists",
"# return self._stage(accepted, count+1)",
"return",
"exists"
] |
This is a repeated state in the state removal algorithm
|
[
"This",
"is",
"a",
"repeated",
"state",
"in",
"the",
"state",
"removal",
"algorithm"
] |
python
|
train
|
childsish/lhc-python
|
lhc/graph/hyper_graph.py
|
https://github.com/childsish/lhc-python/blob/0a669f46a40a39f24d28665e8b5b606dc7e86beb/lhc/graph/hyper_graph.py#L25-L31
|
def add_vertex(self, v):
""" Add a vertex to the graph
:param v: The vertex name.
"""
self.graph.add_vertex(v)
self.vs.add(v)
|
[
"def",
"add_vertex",
"(",
"self",
",",
"v",
")",
":",
"self",
".",
"graph",
".",
"add_vertex",
"(",
"v",
")",
"self",
".",
"vs",
".",
"add",
"(",
"v",
")"
] |
Add a vertex to the graph
:param v: The vertex name.
|
[
"Add",
"a",
"vertex",
"to",
"the",
"graph"
] |
python
|
train
|
SHTOOLS/SHTOOLS
|
pyshtools/shclasses/shcoeffsgrid.py
|
https://github.com/SHTOOLS/SHTOOLS/blob/9a115cf83002df2ddec6b7f41aeb6be688e285de/pyshtools/shclasses/shcoeffsgrid.py#L2876-L2879
|
def _lons(self):
"""Return the longitudes (in degrees) of the gridded data."""
lons = _np.linspace(0.0, 360.0 - 360.0 / self.nlon, num=self.nlon)
return lons
|
[
"def",
"_lons",
"(",
"self",
")",
":",
"lons",
"=",
"_np",
".",
"linspace",
"(",
"0.0",
",",
"360.0",
"-",
"360.0",
"/",
"self",
".",
"nlon",
",",
"num",
"=",
"self",
".",
"nlon",
")",
"return",
"lons"
] |
Return the longitudes (in degrees) of the gridded data.
|
[
"Return",
"the",
"longitudes",
"(",
"in",
"degrees",
")",
"of",
"the",
"gridded",
"data",
"."
] |
python
|
train
|
gmr/rejected
|
rejected/mixins.py
|
https://github.com/gmr/rejected/blob/610a3e1401122ecb98d891b6795cca0255e5b044/rejected/mixins.py#L32-L40
|
def collection_cycle(self, value):
"""Set the number of messages to process before invoking ``gc.collect``
:param int value: Cycle size
"""
if value is not None:
self._collection_cycle = value
self._cycles_left = min(self._cycles_left, self._collection_cycle)
|
[
"def",
"collection_cycle",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
"is",
"not",
"None",
":",
"self",
".",
"_collection_cycle",
"=",
"value",
"self",
".",
"_cycles_left",
"=",
"min",
"(",
"self",
".",
"_cycles_left",
",",
"self",
".",
"_collection_cycle",
")"
] |
Set the number of messages to process before invoking ``gc.collect``
:param int value: Cycle size
|
[
"Set",
"the",
"number",
"of",
"messages",
"to",
"process",
"before",
"invoking",
"gc",
".",
"collect"
] |
python
|
train
|
TrafficSenseMSD/SumoTools
|
traci/_vehicle.py
|
https://github.com/TrafficSenseMSD/SumoTools/blob/8607b4f885f1d1798e43240be643efe6dccccdaa/traci/_vehicle.py#L989-L999
|
def setColor(self, vehID, color):
"""setColor(string, (integer, integer, integer, integer))
sets color for vehicle with the given ID.
i.e. (255,0,0,0) for the color red.
The fourth integer (alpha) is only used when drawing vehicles with raster images
"""
self._connection._beginMessage(
tc.CMD_SET_VEHICLE_VARIABLE, tc.VAR_COLOR, vehID, 1 + 1 + 1 + 1 + 1)
self._connection._string += struct.pack("!BBBBB", tc.TYPE_COLOR, int(
color[0]), int(color[1]), int(color[2]), int(color[3]))
self._connection._sendExact()
|
[
"def",
"setColor",
"(",
"self",
",",
"vehID",
",",
"color",
")",
":",
"self",
".",
"_connection",
".",
"_beginMessage",
"(",
"tc",
".",
"CMD_SET_VEHICLE_VARIABLE",
",",
"tc",
".",
"VAR_COLOR",
",",
"vehID",
",",
"1",
"+",
"1",
"+",
"1",
"+",
"1",
"+",
"1",
")",
"self",
".",
"_connection",
".",
"_string",
"+=",
"struct",
".",
"pack",
"(",
"\"!BBBBB\"",
",",
"tc",
".",
"TYPE_COLOR",
",",
"int",
"(",
"color",
"[",
"0",
"]",
")",
",",
"int",
"(",
"color",
"[",
"1",
"]",
")",
",",
"int",
"(",
"color",
"[",
"2",
"]",
")",
",",
"int",
"(",
"color",
"[",
"3",
"]",
")",
")",
"self",
".",
"_connection",
".",
"_sendExact",
"(",
")"
] |
setColor(string, (integer, integer, integer, integer))
sets color for vehicle with the given ID.
i.e. (255,0,0,0) for the color red.
The fourth integer (alpha) is only used when drawing vehicles with raster images
|
[
"setColor",
"(",
"string",
"(",
"integer",
"integer",
"integer",
"integer",
"))",
"sets",
"color",
"for",
"vehicle",
"with",
"the",
"given",
"ID",
".",
"i",
".",
"e",
".",
"(",
"255",
"0",
"0",
"0",
")",
"for",
"the",
"color",
"red",
".",
"The",
"fourth",
"integer",
"(",
"alpha",
")",
"is",
"only",
"used",
"when",
"drawing",
"vehicles",
"with",
"raster",
"images"
] |
python
|
train
|
PyGithub/PyGithub
|
github/Branch.py
|
https://github.com/PyGithub/PyGithub/blob/f716df86bbe7dc276c6596699fa9712b61ef974c/github/Branch.py#L243-L276
|
def edit_required_pull_request_reviews(self, dismissal_users=github.GithubObject.NotSet, dismissal_teams=github.GithubObject.NotSet, dismiss_stale_reviews=github.GithubObject.NotSet, require_code_owner_reviews=github.GithubObject.NotSet, required_approving_review_count=github.GithubObject.NotSet):
"""
:calls: `PATCH /repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews <https://developer.github.com/v3/repos/branches>`_
:dismissal_users: list of strings
:dismissal_teams: list of strings
:dismiss_stale_reviews: bool
:require_code_owner_reviews: bool
:required_approving_review_count: int
"""
assert dismissal_users is github.GithubObject.NotSet or all(isinstance(element, (str, unicode)) or isinstance(element, (str, unicode)) for element in dismissal_users), dismissal_users
assert dismissal_teams is github.GithubObject.NotSet or all(isinstance(element, (str, unicode)) or isinstance(element, (str, unicode)) for element in dismissal_teams), dismissal_teams
assert dismiss_stale_reviews is github.GithubObject.NotSet or isinstance(dismiss_stale_reviews, bool), dismiss_stale_reviews
assert require_code_owner_reviews is github.GithubObject.NotSet or isinstance(require_code_owner_reviews, bool), require_code_owner_reviews
assert required_approving_review_count is github.GithubObject.NotSet or isinstance(required_approving_review_count, int), required_approving_review_count
post_parameters = {}
if dismissal_users is not github.GithubObject.NotSet:
post_parameters["dismissal_restrictions"] = {"users": dismissal_users}
if dismissal_teams is not github.GithubObject.NotSet:
if "dismissal_restrictions" not in post_parameters:
post_parameters["dismissal_restrictions"] = {}
post_parameters["dismissal_restrictions"]["teams"] = dismissal_teams
if dismiss_stale_reviews is not github.GithubObject.NotSet:
post_parameters["dismiss_stale_reviews"] = dismiss_stale_reviews
if require_code_owner_reviews is not github.GithubObject.NotSet:
post_parameters["require_code_owner_reviews"] = require_code_owner_reviews
if required_approving_review_count is not github.GithubObject.NotSet:
post_parameters["required_approving_review_count"] = required_approving_review_count
headers, data = self._requester.requestJsonAndCheck(
"PATCH",
self.protection_url + "/required_pull_request_reviews",
headers={'Accept': Consts.mediaTypeRequireMultipleApprovingReviews},
input=post_parameters
)
|
[
"def",
"edit_required_pull_request_reviews",
"(",
"self",
",",
"dismissal_users",
"=",
"github",
".",
"GithubObject",
".",
"NotSet",
",",
"dismissal_teams",
"=",
"github",
".",
"GithubObject",
".",
"NotSet",
",",
"dismiss_stale_reviews",
"=",
"github",
".",
"GithubObject",
".",
"NotSet",
",",
"require_code_owner_reviews",
"=",
"github",
".",
"GithubObject",
".",
"NotSet",
",",
"required_approving_review_count",
"=",
"github",
".",
"GithubObject",
".",
"NotSet",
")",
":",
"assert",
"dismissal_users",
"is",
"github",
".",
"GithubObject",
".",
"NotSet",
"or",
"all",
"(",
"isinstance",
"(",
"element",
",",
"(",
"str",
",",
"unicode",
")",
")",
"or",
"isinstance",
"(",
"element",
",",
"(",
"str",
",",
"unicode",
")",
")",
"for",
"element",
"in",
"dismissal_users",
")",
",",
"dismissal_users",
"assert",
"dismissal_teams",
"is",
"github",
".",
"GithubObject",
".",
"NotSet",
"or",
"all",
"(",
"isinstance",
"(",
"element",
",",
"(",
"str",
",",
"unicode",
")",
")",
"or",
"isinstance",
"(",
"element",
",",
"(",
"str",
",",
"unicode",
")",
")",
"for",
"element",
"in",
"dismissal_teams",
")",
",",
"dismissal_teams",
"assert",
"dismiss_stale_reviews",
"is",
"github",
".",
"GithubObject",
".",
"NotSet",
"or",
"isinstance",
"(",
"dismiss_stale_reviews",
",",
"bool",
")",
",",
"dismiss_stale_reviews",
"assert",
"require_code_owner_reviews",
"is",
"github",
".",
"GithubObject",
".",
"NotSet",
"or",
"isinstance",
"(",
"require_code_owner_reviews",
",",
"bool",
")",
",",
"require_code_owner_reviews",
"assert",
"required_approving_review_count",
"is",
"github",
".",
"GithubObject",
".",
"NotSet",
"or",
"isinstance",
"(",
"required_approving_review_count",
",",
"int",
")",
",",
"required_approving_review_count",
"post_parameters",
"=",
"{",
"}",
"if",
"dismissal_users",
"is",
"not",
"github",
".",
"GithubObject",
".",
"NotSet",
":",
"post_parameters",
"[",
"\"dismissal_restrictions\"",
"]",
"=",
"{",
"\"users\"",
":",
"dismissal_users",
"}",
"if",
"dismissal_teams",
"is",
"not",
"github",
".",
"GithubObject",
".",
"NotSet",
":",
"if",
"\"dismissal_restrictions\"",
"not",
"in",
"post_parameters",
":",
"post_parameters",
"[",
"\"dismissal_restrictions\"",
"]",
"=",
"{",
"}",
"post_parameters",
"[",
"\"dismissal_restrictions\"",
"]",
"[",
"\"teams\"",
"]",
"=",
"dismissal_teams",
"if",
"dismiss_stale_reviews",
"is",
"not",
"github",
".",
"GithubObject",
".",
"NotSet",
":",
"post_parameters",
"[",
"\"dismiss_stale_reviews\"",
"]",
"=",
"dismiss_stale_reviews",
"if",
"require_code_owner_reviews",
"is",
"not",
"github",
".",
"GithubObject",
".",
"NotSet",
":",
"post_parameters",
"[",
"\"require_code_owner_reviews\"",
"]",
"=",
"require_code_owner_reviews",
"if",
"required_approving_review_count",
"is",
"not",
"github",
".",
"GithubObject",
".",
"NotSet",
":",
"post_parameters",
"[",
"\"required_approving_review_count\"",
"]",
"=",
"required_approving_review_count",
"headers",
",",
"data",
"=",
"self",
".",
"_requester",
".",
"requestJsonAndCheck",
"(",
"\"PATCH\"",
",",
"self",
".",
"protection_url",
"+",
"\"/required_pull_request_reviews\"",
",",
"headers",
"=",
"{",
"'Accept'",
":",
"Consts",
".",
"mediaTypeRequireMultipleApprovingReviews",
"}",
",",
"input",
"=",
"post_parameters",
")"
] |
:calls: `PATCH /repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews <https://developer.github.com/v3/repos/branches>`_
:dismissal_users: list of strings
:dismissal_teams: list of strings
:dismiss_stale_reviews: bool
:require_code_owner_reviews: bool
:required_approving_review_count: int
|
[
":",
"calls",
":",
"PATCH",
"/",
"repos",
"/",
":",
"owner",
"/",
":",
"repo",
"/",
"branches",
"/",
":",
"branch",
"/",
"protection",
"/",
"required_pull_request_reviews",
"<https",
":",
"//",
"developer",
".",
"github",
".",
"com",
"/",
"v3",
"/",
"repos",
"/",
"branches",
">",
"_",
":",
"dismissal_users",
":",
"list",
"of",
"strings",
":",
"dismissal_teams",
":",
"list",
"of",
"strings",
":",
"dismiss_stale_reviews",
":",
"bool",
":",
"require_code_owner_reviews",
":",
"bool",
":",
"required_approving_review_count",
":",
"int"
] |
python
|
train
|
emc-openstack/storops
|
storops/unity/resource/replication_session.py
|
https://github.com/emc-openstack/storops/blob/24b4b13bf065c0ef0538dd0b5ebb8f25d24176bd/storops/unity/resource/replication_session.py#L159-L236
|
def create_with_dst_resource_provisioning(
cls, cli, src_resource_id, dst_resource_config,
max_time_out_of_sync, name=None, remote_system=None,
src_spa_interface=None, src_spb_interface=None,
dst_spa_interface=None, dst_spb_interface=None,
dst_resource_element_configs=None, auto_initiate=None,
hourly_snap_replication_policy=None,
daily_snap_replication_policy=None, replicate_existing_snaps=None):
"""
Create a replication session along with destination resource
provisioning.
:param cli: the rest cli.
:param src_resource_id: id of the replication source, could be
lun/fs/cg.
:param dst_resource_config: `UnityResourceConfig` object. The user
chosen config for destination resource provisioning. `pool_id` and
`size` are required for creation.
:param max_time_out_of_sync: maximum time to wait before syncing the
source and destination. Value `-1` means the automatic sync is not
performed. `0` means it is a sync replication.
:param name: name of the replication.
:param remote_system: `UnityRemoteSystem` object. The remote system to
which the replication is being configured. When not specified, it
defaults to local system.
:param src_spa_interface: `UnityRemoteInterface` object. The
replication interface for source SPA.
:param src_spb_interface: `UnityRemoteInterface` object. The
replication interface for source SPB.
:param dst_spa_interface: `UnityRemoteInterface` object. The
replication interface for destination SPA.
:param dst_spb_interface: `UnityRemoteInterface` object. The
replication interface for destination SPB.
:param dst_resource_element_configs: List of `UnityResourceConfig`
objects. The user chose config for each of the member element of
the destination resource.
:param auto_initiate: indicates whether to perform the first
replication sync automatically.
True - perform the first replication sync automatically.
False - perform the first replication sync manually.
:param hourly_snap_replication_policy: `UnitySnapReplicationPolicy`
object. The policy for replicating hourly scheduled snaps of the
source resource.
:param daily_snap_replication_policy: `UnitySnapReplicationPolicy`
object. The policy for replicating daily scheduled snaps of the
source resource.
:param replicate_existing_snaps: indicates whether or not to replicate
snapshots already existing on the resource.
:return: the newly created replication session.
"""
req_body = cli.make_body(
srcResourceId=src_resource_id,
dstResourceConfig=dst_resource_config,
maxTimeOutOfSync=max_time_out_of_sync,
name=name, remoteSystem=remote_system,
srcSPAInterface=src_spa_interface,
srcSPBInterface=src_spb_interface,
dstSPAInterface=dst_spa_interface,
dstSPBInterface=dst_spb_interface,
dstResourceElementConfigs=dst_resource_element_configs,
autoInitiate=auto_initiate,
hourlySnapReplicationPolicy=hourly_snap_replication_policy,
dailySnapReplicationPolicy=daily_snap_replication_policy,
replicateExistingSnaps=replicate_existing_snaps)
resp = cli.type_action(
cls().resource_class,
'createReplicationSessionWDestResProvisioning',
**req_body)
resp.raise_if_err()
# response is like:
# "content": {
# "id": {
# "id": "42949676351_FNM00150600267_xxxx"
# }
session_resp = resp.first_content['id']
return cls.get(cli, _id=session_resp['id'])
|
[
"def",
"create_with_dst_resource_provisioning",
"(",
"cls",
",",
"cli",
",",
"src_resource_id",
",",
"dst_resource_config",
",",
"max_time_out_of_sync",
",",
"name",
"=",
"None",
",",
"remote_system",
"=",
"None",
",",
"src_spa_interface",
"=",
"None",
",",
"src_spb_interface",
"=",
"None",
",",
"dst_spa_interface",
"=",
"None",
",",
"dst_spb_interface",
"=",
"None",
",",
"dst_resource_element_configs",
"=",
"None",
",",
"auto_initiate",
"=",
"None",
",",
"hourly_snap_replication_policy",
"=",
"None",
",",
"daily_snap_replication_policy",
"=",
"None",
",",
"replicate_existing_snaps",
"=",
"None",
")",
":",
"req_body",
"=",
"cli",
".",
"make_body",
"(",
"srcResourceId",
"=",
"src_resource_id",
",",
"dstResourceConfig",
"=",
"dst_resource_config",
",",
"maxTimeOutOfSync",
"=",
"max_time_out_of_sync",
",",
"name",
"=",
"name",
",",
"remoteSystem",
"=",
"remote_system",
",",
"srcSPAInterface",
"=",
"src_spa_interface",
",",
"srcSPBInterface",
"=",
"src_spb_interface",
",",
"dstSPAInterface",
"=",
"dst_spa_interface",
",",
"dstSPBInterface",
"=",
"dst_spb_interface",
",",
"dstResourceElementConfigs",
"=",
"dst_resource_element_configs",
",",
"autoInitiate",
"=",
"auto_initiate",
",",
"hourlySnapReplicationPolicy",
"=",
"hourly_snap_replication_policy",
",",
"dailySnapReplicationPolicy",
"=",
"daily_snap_replication_policy",
",",
"replicateExistingSnaps",
"=",
"replicate_existing_snaps",
")",
"resp",
"=",
"cli",
".",
"type_action",
"(",
"cls",
"(",
")",
".",
"resource_class",
",",
"'createReplicationSessionWDestResProvisioning'",
",",
"*",
"*",
"req_body",
")",
"resp",
".",
"raise_if_err",
"(",
")",
"# response is like:",
"# \"content\": {",
"# \"id\": {",
"# \"id\": \"42949676351_FNM00150600267_xxxx\"",
"# }",
"session_resp",
"=",
"resp",
".",
"first_content",
"[",
"'id'",
"]",
"return",
"cls",
".",
"get",
"(",
"cli",
",",
"_id",
"=",
"session_resp",
"[",
"'id'",
"]",
")"
] |
Create a replication session along with destination resource
provisioning.
:param cli: the rest cli.
:param src_resource_id: id of the replication source, could be
lun/fs/cg.
:param dst_resource_config: `UnityResourceConfig` object. The user
chosen config for destination resource provisioning. `pool_id` and
`size` are required for creation.
:param max_time_out_of_sync: maximum time to wait before syncing the
source and destination. Value `-1` means the automatic sync is not
performed. `0` means it is a sync replication.
:param name: name of the replication.
:param remote_system: `UnityRemoteSystem` object. The remote system to
which the replication is being configured. When not specified, it
defaults to local system.
:param src_spa_interface: `UnityRemoteInterface` object. The
replication interface for source SPA.
:param src_spb_interface: `UnityRemoteInterface` object. The
replication interface for source SPB.
:param dst_spa_interface: `UnityRemoteInterface` object. The
replication interface for destination SPA.
:param dst_spb_interface: `UnityRemoteInterface` object. The
replication interface for destination SPB.
:param dst_resource_element_configs: List of `UnityResourceConfig`
objects. The user chose config for each of the member element of
the destination resource.
:param auto_initiate: indicates whether to perform the first
replication sync automatically.
True - perform the first replication sync automatically.
False - perform the first replication sync manually.
:param hourly_snap_replication_policy: `UnitySnapReplicationPolicy`
object. The policy for replicating hourly scheduled snaps of the
source resource.
:param daily_snap_replication_policy: `UnitySnapReplicationPolicy`
object. The policy for replicating daily scheduled snaps of the
source resource.
:param replicate_existing_snaps: indicates whether or not to replicate
snapshots already existing on the resource.
:return: the newly created replication session.
|
[
"Create",
"a",
"replication",
"session",
"along",
"with",
"destination",
"resource",
"provisioning",
"."
] |
python
|
train
|
jaredLunde/vital-tools
|
vital/tools/__init__.py
|
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/tools/__init__.py#L20-L32
|
def getattr_in(obj, name):
""" Finds an in @obj via a period-delimited string @name.
@obj: (#object)
@name: (#str) |.|-separated keys to search @obj in
..
obj.deep.attr = 'deep value'
getattr_in(obj, 'obj.deep.attr')
..
|'deep value'|
"""
for part in name.split('.'):
obj = getattr(obj, part)
return obj
|
[
"def",
"getattr_in",
"(",
"obj",
",",
"name",
")",
":",
"for",
"part",
"in",
"name",
".",
"split",
"(",
"'.'",
")",
":",
"obj",
"=",
"getattr",
"(",
"obj",
",",
"part",
")",
"return",
"obj"
] |
Finds an in @obj via a period-delimited string @name.
@obj: (#object)
@name: (#str) |.|-separated keys to search @obj in
..
obj.deep.attr = 'deep value'
getattr_in(obj, 'obj.deep.attr')
..
|'deep value'|
|
[
"Finds",
"an",
"in"
] |
python
|
train
|
klen/graphite-beacon
|
graphite_beacon/alerts.py
|
https://github.com/klen/graphite-beacon/blob/c1f071e9f557693bc90f6acbc314994985dc3b77/graphite_beacon/alerts.py#L259-L285
|
def load(self):
"""Load data from Graphite."""
LOGGER.debug('%s: start checking: %s', self.name, self.query)
if self.waiting:
self.notify('warning', 'Process takes too much time', target='waiting', ntype='common')
else:
self.waiting = True
try:
response = yield self.client.fetch(self.url, auth_username=self.auth_username,
auth_password=self.auth_password,
request_timeout=self.request_timeout,
connect_timeout=self.connect_timeout,
validate_cert=self.validate_cert)
records = (
GraphiteRecord(line, self.default_nan_value, self.ignore_nan)
for line in response.buffer)
data = [
(None if record.empty else getattr(record, self.method), record.target)
for record in records]
if len(data) == 0:
raise ValueError('No data')
self.check(data)
self.notify('normal', 'Metrics are loaded', target='loading', ntype='common')
except Exception as e:
self.notify(
self.loading_error, 'Loading error: %s' % e, target='loading', ntype='common')
self.waiting = False
|
[
"def",
"load",
"(",
"self",
")",
":",
"LOGGER",
".",
"debug",
"(",
"'%s: start checking: %s'",
",",
"self",
".",
"name",
",",
"self",
".",
"query",
")",
"if",
"self",
".",
"waiting",
":",
"self",
".",
"notify",
"(",
"'warning'",
",",
"'Process takes too much time'",
",",
"target",
"=",
"'waiting'",
",",
"ntype",
"=",
"'common'",
")",
"else",
":",
"self",
".",
"waiting",
"=",
"True",
"try",
":",
"response",
"=",
"yield",
"self",
".",
"client",
".",
"fetch",
"(",
"self",
".",
"url",
",",
"auth_username",
"=",
"self",
".",
"auth_username",
",",
"auth_password",
"=",
"self",
".",
"auth_password",
",",
"request_timeout",
"=",
"self",
".",
"request_timeout",
",",
"connect_timeout",
"=",
"self",
".",
"connect_timeout",
",",
"validate_cert",
"=",
"self",
".",
"validate_cert",
")",
"records",
"=",
"(",
"GraphiteRecord",
"(",
"line",
",",
"self",
".",
"default_nan_value",
",",
"self",
".",
"ignore_nan",
")",
"for",
"line",
"in",
"response",
".",
"buffer",
")",
"data",
"=",
"[",
"(",
"None",
"if",
"record",
".",
"empty",
"else",
"getattr",
"(",
"record",
",",
"self",
".",
"method",
")",
",",
"record",
".",
"target",
")",
"for",
"record",
"in",
"records",
"]",
"if",
"len",
"(",
"data",
")",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"'No data'",
")",
"self",
".",
"check",
"(",
"data",
")",
"self",
".",
"notify",
"(",
"'normal'",
",",
"'Metrics are loaded'",
",",
"target",
"=",
"'loading'",
",",
"ntype",
"=",
"'common'",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"notify",
"(",
"self",
".",
"loading_error",
",",
"'Loading error: %s'",
"%",
"e",
",",
"target",
"=",
"'loading'",
",",
"ntype",
"=",
"'common'",
")",
"self",
".",
"waiting",
"=",
"False"
] |
Load data from Graphite.
|
[
"Load",
"data",
"from",
"Graphite",
"."
] |
python
|
train
|
numenta/nupic
|
src/nupic/regions/sp_region.py
|
https://github.com/numenta/nupic/blob/5922fafffdccc8812e72b3324965ad2f7d4bbdad/src/nupic/regions/sp_region.py#L750-L762
|
def getSpec(cls):
"""
Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getSpec`.
The parameters collection is constructed based on the parameters specified
by the various components (spatialSpec, temporalSpec and otherSpec)
"""
spec = cls.getBaseSpec()
s, o = _getAdditionalSpecs(spatialImp=getDefaultSPImp())
spec['parameters'].update(s)
spec['parameters'].update(o)
return spec
|
[
"def",
"getSpec",
"(",
"cls",
")",
":",
"spec",
"=",
"cls",
".",
"getBaseSpec",
"(",
")",
"s",
",",
"o",
"=",
"_getAdditionalSpecs",
"(",
"spatialImp",
"=",
"getDefaultSPImp",
"(",
")",
")",
"spec",
"[",
"'parameters'",
"]",
".",
"update",
"(",
"s",
")",
"spec",
"[",
"'parameters'",
"]",
".",
"update",
"(",
"o",
")",
"return",
"spec"
] |
Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getSpec`.
The parameters collection is constructed based on the parameters specified
by the various components (spatialSpec, temporalSpec and otherSpec)
|
[
"Overrides",
":",
"meth",
":",
"~nupic",
".",
"bindings",
".",
"regions",
".",
"PyRegion",
".",
"PyRegion",
".",
"getSpec",
"."
] |
python
|
valid
|
ejeschke/ginga
|
ginga/rv/plugins/PlotTable.py
|
https://github.com/ejeschke/ginga/blob/a78c893ec6f37a837de851947e9bb4625c597915/ginga/rv/plugins/PlotTable.py#L231-L235
|
def clear_plot(self):
"""Clear plot display."""
self.tab_plot.clear()
self.tab_plot.draw()
self.save_plot.set_enabled(False)
|
[
"def",
"clear_plot",
"(",
"self",
")",
":",
"self",
".",
"tab_plot",
".",
"clear",
"(",
")",
"self",
".",
"tab_plot",
".",
"draw",
"(",
")",
"self",
".",
"save_plot",
".",
"set_enabled",
"(",
"False",
")"
] |
Clear plot display.
|
[
"Clear",
"plot",
"display",
"."
] |
python
|
train
|
chimera0/accel-brain-code
|
Reinforcement-Learning/demo/demo_maze_deep_q_network.py
|
https://github.com/chimera0/accel-brain-code/blob/03661f6f544bed656269fcd4b3c23c9061629daa/Reinforcement-Learning/demo/demo_maze_deep_q_network.py#L166-L193
|
def observe_reward_value(self, state_arr, action_arr):
'''
Compute the reward value.
Args:
state_arr: `np.ndarray` of state.
action_arr: `np.ndarray` of action.
Returns:
Reward value.
'''
if self.__check_goal_flag(action_arr) is True:
return 1.0
else:
x, y = np.where(action_arr[-1] == 1)
x, y = x[0], y[0]
goal_x, goal_y = self.__goal_pos
if x == goal_x and y == goal_y:
distance = 0.0
else:
distance = np.sqrt(((x - goal_x) ** 2) + (y - goal_y) ** 2)
if (x, y) in self.__route_long_memory_list:
repeating_penalty = self.__repeating_penalty
else:
repeating_penalty = 0.0
return 1.0 - distance - repeating_penalty
|
[
"def",
"observe_reward_value",
"(",
"self",
",",
"state_arr",
",",
"action_arr",
")",
":",
"if",
"self",
".",
"__check_goal_flag",
"(",
"action_arr",
")",
"is",
"True",
":",
"return",
"1.0",
"else",
":",
"x",
",",
"y",
"=",
"np",
".",
"where",
"(",
"action_arr",
"[",
"-",
"1",
"]",
"==",
"1",
")",
"x",
",",
"y",
"=",
"x",
"[",
"0",
"]",
",",
"y",
"[",
"0",
"]",
"goal_x",
",",
"goal_y",
"=",
"self",
".",
"__goal_pos",
"if",
"x",
"==",
"goal_x",
"and",
"y",
"==",
"goal_y",
":",
"distance",
"=",
"0.0",
"else",
":",
"distance",
"=",
"np",
".",
"sqrt",
"(",
"(",
"(",
"x",
"-",
"goal_x",
")",
"**",
"2",
")",
"+",
"(",
"y",
"-",
"goal_y",
")",
"**",
"2",
")",
"if",
"(",
"x",
",",
"y",
")",
"in",
"self",
".",
"__route_long_memory_list",
":",
"repeating_penalty",
"=",
"self",
".",
"__repeating_penalty",
"else",
":",
"repeating_penalty",
"=",
"0.0",
"return",
"1.0",
"-",
"distance",
"-",
"repeating_penalty"
] |
Compute the reward value.
Args:
state_arr: `np.ndarray` of state.
action_arr: `np.ndarray` of action.
Returns:
Reward value.
|
[
"Compute",
"the",
"reward",
"value",
".",
"Args",
":",
"state_arr",
":",
"np",
".",
"ndarray",
"of",
"state",
".",
"action_arr",
":",
"np",
".",
"ndarray",
"of",
"action",
".",
"Returns",
":",
"Reward",
"value",
"."
] |
python
|
train
|
google/grumpy
|
third_party/pypy/_sre.py
|
https://github.com/google/grumpy/blob/3ec87959189cfcdeae82eb68a47648ac25ceb10b/third_party/pypy/_sre.py#L155-L181
|
def split(self, string, maxsplit=0):
"""Split string by the occurrences of pattern."""
splitlist = []
state = _State(string, 0, sys.maxint, self.flags)
n = 0
last = state.start
while not maxsplit or n < maxsplit:
state.reset()
state.string_position = state.start
if not state.search(self._code):
break
if state.start == state.string_position: # zero-width match
if last == state.end: # or end of string
break
state.start += 1
continue
splitlist.append(string[last:state.start])
# add groups (if any)
if self.groups:
match = SRE_Match(self, state)
# TODO: Use .extend once it is implemented.
# splitlist.extend(list(match.groups(None)))
splitlist += (list(match.groups(None)))
n += 1
last = state.start = state.string_position
splitlist.append(string[last:state.end])
return splitlist
|
[
"def",
"split",
"(",
"self",
",",
"string",
",",
"maxsplit",
"=",
"0",
")",
":",
"splitlist",
"=",
"[",
"]",
"state",
"=",
"_State",
"(",
"string",
",",
"0",
",",
"sys",
".",
"maxint",
",",
"self",
".",
"flags",
")",
"n",
"=",
"0",
"last",
"=",
"state",
".",
"start",
"while",
"not",
"maxsplit",
"or",
"n",
"<",
"maxsplit",
":",
"state",
".",
"reset",
"(",
")",
"state",
".",
"string_position",
"=",
"state",
".",
"start",
"if",
"not",
"state",
".",
"search",
"(",
"self",
".",
"_code",
")",
":",
"break",
"if",
"state",
".",
"start",
"==",
"state",
".",
"string_position",
":",
"# zero-width match",
"if",
"last",
"==",
"state",
".",
"end",
":",
"# or end of string",
"break",
"state",
".",
"start",
"+=",
"1",
"continue",
"splitlist",
".",
"append",
"(",
"string",
"[",
"last",
":",
"state",
".",
"start",
"]",
")",
"# add groups (if any)",
"if",
"self",
".",
"groups",
":",
"match",
"=",
"SRE_Match",
"(",
"self",
",",
"state",
")",
"# TODO: Use .extend once it is implemented.",
"# splitlist.extend(list(match.groups(None)))",
"splitlist",
"+=",
"(",
"list",
"(",
"match",
".",
"groups",
"(",
"None",
")",
")",
")",
"n",
"+=",
"1",
"last",
"=",
"state",
".",
"start",
"=",
"state",
".",
"string_position",
"splitlist",
".",
"append",
"(",
"string",
"[",
"last",
":",
"state",
".",
"end",
"]",
")",
"return",
"splitlist"
] |
Split string by the occurrences of pattern.
|
[
"Split",
"string",
"by",
"the",
"occurrences",
"of",
"pattern",
"."
] |
python
|
valid
|
tradenity/python-sdk
|
tradenity/resources/customer_group.py
|
https://github.com/tradenity/python-sdk/blob/d13fbe23f4d6ff22554c6d8d2deaf209371adaf1/tradenity/resources/customer_group.py#L375-L395
|
def delete_customer_group_by_id(cls, customer_group_id, **kwargs):
"""Delete CustomerGroup
Delete an instance of CustomerGroup by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_customer_group_by_id(customer_group_id, async=True)
>>> result = thread.get()
:param async bool
:param str customer_group_id: ID of customerGroup to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._delete_customer_group_by_id_with_http_info(customer_group_id, **kwargs)
else:
(data) = cls._delete_customer_group_by_id_with_http_info(customer_group_id, **kwargs)
return data
|
[
"def",
"delete_customer_group_by_id",
"(",
"cls",
",",
"customer_group_id",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async'",
")",
":",
"return",
"cls",
".",
"_delete_customer_group_by_id_with_http_info",
"(",
"customer_group_id",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"(",
"data",
")",
"=",
"cls",
".",
"_delete_customer_group_by_id_with_http_info",
"(",
"customer_group_id",
",",
"*",
"*",
"kwargs",
")",
"return",
"data"
] |
Delete CustomerGroup
Delete an instance of CustomerGroup by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_customer_group_by_id(customer_group_id, async=True)
>>> result = thread.get()
:param async bool
:param str customer_group_id: ID of customerGroup to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
|
[
"Delete",
"CustomerGroup"
] |
python
|
train
|
emory-libraries/eulfedora
|
eulfedora/api.py
|
https://github.com/emory-libraries/eulfedora/blob/161826f3fdcdab4007f6fa7dfd9f1ecabc4bcbe4/eulfedora/api.py#L1077-L1085
|
def sparql_count(self, query, flush=None):
"""
Count results for a Sparql query.
:param query: sparql query string
:rtype: int
"""
return self.count_statements(query, language='sparql', type='tuples',
flush=flush)
|
[
"def",
"sparql_count",
"(",
"self",
",",
"query",
",",
"flush",
"=",
"None",
")",
":",
"return",
"self",
".",
"count_statements",
"(",
"query",
",",
"language",
"=",
"'sparql'",
",",
"type",
"=",
"'tuples'",
",",
"flush",
"=",
"flush",
")"
] |
Count results for a Sparql query.
:param query: sparql query string
:rtype: int
|
[
"Count",
"results",
"for",
"a",
"Sparql",
"query",
"."
] |
python
|
train
|
getnikola/coil
|
coil/web.py
|
https://github.com/getnikola/coil/blob/80ef1827460b0691cf2c98351a14d88e235c9899/coil/web.py#L925-L935
|
def serve_assets(path):
"""Serve Nikola assets.
This is meant to be used ONLY by the internal dev server.
Please configure your web server to handle requests to this URL::
/assets/ => output/assets
"""
res = os.path.join(app.config['NIKOLA_ROOT'],
_site.config["OUTPUT_FOLDER"], 'assets')
return send_from_directory(res, path)
|
[
"def",
"serve_assets",
"(",
"path",
")",
":",
"res",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app",
".",
"config",
"[",
"'NIKOLA_ROOT'",
"]",
",",
"_site",
".",
"config",
"[",
"\"OUTPUT_FOLDER\"",
"]",
",",
"'assets'",
")",
"return",
"send_from_directory",
"(",
"res",
",",
"path",
")"
] |
Serve Nikola assets.
This is meant to be used ONLY by the internal dev server.
Please configure your web server to handle requests to this URL::
/assets/ => output/assets
|
[
"Serve",
"Nikola",
"assets",
"."
] |
python
|
train
|
JoelBender/bacpypes
|
py25/bacpypes/constructeddata.py
|
https://github.com/JoelBender/bacpypes/blob/4111b8604a16fa2b7f80d8104a43b9f3e28dfc78/py25/bacpypes/constructeddata.py#L1483-L1504
|
def cast_out(self, klass):
"""Interpret the content as a particular class."""
if _debug: SequenceOfAny._debug("cast_out %r", klass)
# make sure it is a list
if not issubclass(klass, List):
raise DecodingError("%r is not a list" % (klass,))
# build a helper
helper = klass()
# make a copy of the tag list
t = TagList(self.tagList[:])
# let it decode itself
helper.decode(t)
# make sure everything was consumed
if len(t) != 0:
raise DecodingError("incomplete cast")
return helper.value
|
[
"def",
"cast_out",
"(",
"self",
",",
"klass",
")",
":",
"if",
"_debug",
":",
"SequenceOfAny",
".",
"_debug",
"(",
"\"cast_out %r\"",
",",
"klass",
")",
"# make sure it is a list",
"if",
"not",
"issubclass",
"(",
"klass",
",",
"List",
")",
":",
"raise",
"DecodingError",
"(",
"\"%r is not a list\"",
"%",
"(",
"klass",
",",
")",
")",
"# build a helper",
"helper",
"=",
"klass",
"(",
")",
"# make a copy of the tag list",
"t",
"=",
"TagList",
"(",
"self",
".",
"tagList",
"[",
":",
"]",
")",
"# let it decode itself",
"helper",
".",
"decode",
"(",
"t",
")",
"# make sure everything was consumed",
"if",
"len",
"(",
"t",
")",
"!=",
"0",
":",
"raise",
"DecodingError",
"(",
"\"incomplete cast\"",
")",
"return",
"helper",
".",
"value"
] |
Interpret the content as a particular class.
|
[
"Interpret",
"the",
"content",
"as",
"a",
"particular",
"class",
"."
] |
python
|
train
|
edx/XBlock
|
xblock/fields.py
|
https://github.com/edx/XBlock/blob/368bf46e2c0ee69bbb21817f428c4684936e18ee/xblock/fields.py#L110-L117
|
def scopes(cls):
"""
Return a list of valid/understood class scopes.
"""
# Why do we need this? This should either
# * Be bubbled to the places where it is used (AcidXBlock).
# * Be automatic. Look for all members of a type.
return [cls.USAGE, cls.DEFINITION, cls.TYPE, cls.ALL]
|
[
"def",
"scopes",
"(",
"cls",
")",
":",
"# Why do we need this? This should either",
"# * Be bubbled to the places where it is used (AcidXBlock).",
"# * Be automatic. Look for all members of a type.",
"return",
"[",
"cls",
".",
"USAGE",
",",
"cls",
".",
"DEFINITION",
",",
"cls",
".",
"TYPE",
",",
"cls",
".",
"ALL",
"]"
] |
Return a list of valid/understood class scopes.
|
[
"Return",
"a",
"list",
"of",
"valid",
"/",
"understood",
"class",
"scopes",
"."
] |
python
|
train
|
dwkim78/upsilon
|
upsilon/extract_features/extract_features.py
|
https://github.com/dwkim78/upsilon/blob/5f381453f26582ef56e62fb8fed7317ce67861af/upsilon/extract_features/extract_features.py#L510-L543
|
def slope_percentile(self, date, mag):
"""
Return 10% and 90% percentile of slope.
Parameters
----------
date : array_like
An array of phase-folded date. Sorted.
mag : array_like
An array of phase-folded magnitudes. Sorted by date.
Returns
-------
per_10 : float
10% percentile values of slope.
per_90 : float
90% percentile values of slope.
"""
date_diff = date[1:] - date[:len(date) - 1]
mag_diff = mag[1:] - mag[:len(mag) - 1]
# Remove zero mag_diff.
index = np.where(mag_diff != 0.)
date_diff = date_diff[index]
mag_diff = mag_diff[index]
# Derive slope.
slope = date_diff / mag_diff
percentile_10 = np.percentile(slope, 10.)
percentile_90 = np.percentile(slope, 90.)
return percentile_10, percentile_90
|
[
"def",
"slope_percentile",
"(",
"self",
",",
"date",
",",
"mag",
")",
":",
"date_diff",
"=",
"date",
"[",
"1",
":",
"]",
"-",
"date",
"[",
":",
"len",
"(",
"date",
")",
"-",
"1",
"]",
"mag_diff",
"=",
"mag",
"[",
"1",
":",
"]",
"-",
"mag",
"[",
":",
"len",
"(",
"mag",
")",
"-",
"1",
"]",
"# Remove zero mag_diff.",
"index",
"=",
"np",
".",
"where",
"(",
"mag_diff",
"!=",
"0.",
")",
"date_diff",
"=",
"date_diff",
"[",
"index",
"]",
"mag_diff",
"=",
"mag_diff",
"[",
"index",
"]",
"# Derive slope.",
"slope",
"=",
"date_diff",
"/",
"mag_diff",
"percentile_10",
"=",
"np",
".",
"percentile",
"(",
"slope",
",",
"10.",
")",
"percentile_90",
"=",
"np",
".",
"percentile",
"(",
"slope",
",",
"90.",
")",
"return",
"percentile_10",
",",
"percentile_90"
] |
Return 10% and 90% percentile of slope.
Parameters
----------
date : array_like
An array of phase-folded date. Sorted.
mag : array_like
An array of phase-folded magnitudes. Sorted by date.
Returns
-------
per_10 : float
10% percentile values of slope.
per_90 : float
90% percentile values of slope.
|
[
"Return",
"10%",
"and",
"90%",
"percentile",
"of",
"slope",
"."
] |
python
|
train
|
ajyoon/blur
|
blur/markov/node.py
|
https://github.com/ajyoon/blur/blob/25fcf083af112bb003956a7a7e1c6ff7d8fef279/blur/markov/node.py#L200-L232
|
def add_reciprocal_link(self, target, weight):
"""
Add links pointing in either direction between ``self`` and ``target``.
This creates a ``Link`` from ``self`` to ``target`` and a ``Link``
from ``target`` to ``self`` of equal weight. If ``target`` is a list
of ``Node`` 's, repeat this for each one.
Args:
target (Node or list[Node]):
weight (int or float):
Returns: None
Example:
>>> node_1 = Node('One')
>>> node_2 = Node('Two')
>>> node_1.add_reciprocal_link(node_2, 5)
>>> new_link_1 = node_1.link_list[0]
>>> new_link_2 = node_2.link_list[0]
>>> print(new_link_1)
node.Link instance pointing to node with value "Two" with weight 5
>>> print(new_link_2)
node.Link instance pointing to node with value "One" with weight 5
"""
# Generalize ``target`` to a list
if not isinstance(target, list):
target_list = [target]
else:
target_list = target
for t in target_list:
self.add_link(t, weight)
t.add_link(self, weight)
|
[
"def",
"add_reciprocal_link",
"(",
"self",
",",
"target",
",",
"weight",
")",
":",
"# Generalize ``target`` to a list",
"if",
"not",
"isinstance",
"(",
"target",
",",
"list",
")",
":",
"target_list",
"=",
"[",
"target",
"]",
"else",
":",
"target_list",
"=",
"target",
"for",
"t",
"in",
"target_list",
":",
"self",
".",
"add_link",
"(",
"t",
",",
"weight",
")",
"t",
".",
"add_link",
"(",
"self",
",",
"weight",
")"
] |
Add links pointing in either direction between ``self`` and ``target``.
This creates a ``Link`` from ``self`` to ``target`` and a ``Link``
from ``target`` to ``self`` of equal weight. If ``target`` is a list
of ``Node`` 's, repeat this for each one.
Args:
target (Node or list[Node]):
weight (int or float):
Returns: None
Example:
>>> node_1 = Node('One')
>>> node_2 = Node('Two')
>>> node_1.add_reciprocal_link(node_2, 5)
>>> new_link_1 = node_1.link_list[0]
>>> new_link_2 = node_2.link_list[0]
>>> print(new_link_1)
node.Link instance pointing to node with value "Two" with weight 5
>>> print(new_link_2)
node.Link instance pointing to node with value "One" with weight 5
|
[
"Add",
"links",
"pointing",
"in",
"either",
"direction",
"between",
"self",
"and",
"target",
"."
] |
python
|
train
|
happyleavesaoc/python-motorparts
|
motorparts/__init__.py
|
https://github.com/happyleavesaoc/python-motorparts/blob/4a6b4dc72dd45524dd64a7a079478bd98c55215c/motorparts/__init__.py#L159-L165
|
def get_tow_guide(session, vehicle_index):
"""Get tow guide information."""
profile = get_profile(session)
_validate_vehicle(vehicle_index, profile)
return session.post(TOW_URL, {
'vin': profile['vehicles'][vehicle_index]['vin']
}).json()
|
[
"def",
"get_tow_guide",
"(",
"session",
",",
"vehicle_index",
")",
":",
"profile",
"=",
"get_profile",
"(",
"session",
")",
"_validate_vehicle",
"(",
"vehicle_index",
",",
"profile",
")",
"return",
"session",
".",
"post",
"(",
"TOW_URL",
",",
"{",
"'vin'",
":",
"profile",
"[",
"'vehicles'",
"]",
"[",
"vehicle_index",
"]",
"[",
"'vin'",
"]",
"}",
")",
".",
"json",
"(",
")"
] |
Get tow guide information.
|
[
"Get",
"tow",
"guide",
"information",
"."
] |
python
|
train
|
FNNDSC/med2image
|
med2image/systemMisc.py
|
https://github.com/FNNDSC/med2image/blob/638d5d230de47608af20f9764acf8e382c2bf2ff/med2image/systemMisc.py#L271-L324
|
def b10_convertFrom(anum10, aradix, *args):
"""
ARGS
anum10 in number in base 10
aradix in convert <anum10> to number in base
+ <aradix>
OPTIONAL
forcelength in if nonzero, indicates the length
+ of the return array. Useful if
+ array needs to be zero padded.
DESC
Converts a scalar from base 10 to base radix. Return
an array.
NOTE:
"Translated" from a MatLAB script of the same name.
"""
i = 0;
k = 0;
# Cycle up in powers of radix until the largest exponent is found.
# This is required to determine the word length
while (pow(aradix, i)) <= anum10:
i = i + 1;
forcelength = i
# Optionally, allow user to specify word length
if len(args): forcelength = args[0]
# Check that word length is valid
if(forcelength and (forcelength < i)):
error_exit('b10_convertFrom',
'checking on requested return array',
'specified length is too small',
1)
numm = anum10;
num_r = zeros((forcelength));
if(i):
k = forcelength - i;
else:
k = forcelength - 1;
if(anum10 == 1):
num_r[(k)] = 1;
return num_r;
for j in arange(i, 0, -1):
num_r[(k)] = fix(numm / pow(aradix, (j - 1)));
numm = numm % pow(aradix, (j - 1));
k = k + 1;
return num_r
|
[
"def",
"b10_convertFrom",
"(",
"anum10",
",",
"aradix",
",",
"*",
"args",
")",
":",
"i",
"=",
"0",
"k",
"=",
"0",
"# Cycle up in powers of radix until the largest exponent is found.",
"# This is required to determine the word length",
"while",
"(",
"pow",
"(",
"aradix",
",",
"i",
")",
")",
"<=",
"anum10",
":",
"i",
"=",
"i",
"+",
"1",
"forcelength",
"=",
"i",
"# Optionally, allow user to specify word length",
"if",
"len",
"(",
"args",
")",
":",
"forcelength",
"=",
"args",
"[",
"0",
"]",
"# Check that word length is valid",
"if",
"(",
"forcelength",
"and",
"(",
"forcelength",
"<",
"i",
")",
")",
":",
"error_exit",
"(",
"'b10_convertFrom'",
",",
"'checking on requested return array'",
",",
"'specified length is too small'",
",",
"1",
")",
"numm",
"=",
"anum10",
"num_r",
"=",
"zeros",
"(",
"(",
"forcelength",
")",
")",
"if",
"(",
"i",
")",
":",
"k",
"=",
"forcelength",
"-",
"i",
"else",
":",
"k",
"=",
"forcelength",
"-",
"1",
"if",
"(",
"anum10",
"==",
"1",
")",
":",
"num_r",
"[",
"(",
"k",
")",
"]",
"=",
"1",
"return",
"num_r",
"for",
"j",
"in",
"arange",
"(",
"i",
",",
"0",
",",
"-",
"1",
")",
":",
"num_r",
"[",
"(",
"k",
")",
"]",
"=",
"fix",
"(",
"numm",
"/",
"pow",
"(",
"aradix",
",",
"(",
"j",
"-",
"1",
")",
")",
")",
"numm",
"=",
"numm",
"%",
"pow",
"(",
"aradix",
",",
"(",
"j",
"-",
"1",
")",
")",
"k",
"=",
"k",
"+",
"1",
"return",
"num_r"
] |
ARGS
anum10 in number in base 10
aradix in convert <anum10> to number in base
+ <aradix>
OPTIONAL
forcelength in if nonzero, indicates the length
+ of the return array. Useful if
+ array needs to be zero padded.
DESC
Converts a scalar from base 10 to base radix. Return
an array.
NOTE:
"Translated" from a MatLAB script of the same name.
|
[
"ARGS",
"anum10",
"in",
"number",
"in",
"base",
"10",
"aradix",
"in",
"convert",
"<anum10",
">",
"to",
"number",
"in",
"base",
"+",
"<aradix",
">"
] |
python
|
train
|
qualisys/qualisys_python_sdk
|
qtm/qrt.py
|
https://github.com/qualisys/qualisys_python_sdk/blob/127d7eeebc2b38b5cafdfa5d1d0198437fedd274/qtm/qrt.py#L230-L236
|
async def start(self, rtfromfile=False):
"""Start RT from file. You need to be in control of QTM to be able to do this.
"""
cmd = "start" + (" rtfromfile" if rtfromfile else "")
return await asyncio.wait_for(
self._protocol.send_command(cmd), timeout=self._timeout
)
|
[
"async",
"def",
"start",
"(",
"self",
",",
"rtfromfile",
"=",
"False",
")",
":",
"cmd",
"=",
"\"start\"",
"+",
"(",
"\" rtfromfile\"",
"if",
"rtfromfile",
"else",
"\"\"",
")",
"return",
"await",
"asyncio",
".",
"wait_for",
"(",
"self",
".",
"_protocol",
".",
"send_command",
"(",
"cmd",
")",
",",
"timeout",
"=",
"self",
".",
"_timeout",
")"
] |
Start RT from file. You need to be in control of QTM to be able to do this.
|
[
"Start",
"RT",
"from",
"file",
".",
"You",
"need",
"to",
"be",
"in",
"control",
"of",
"QTM",
"to",
"be",
"able",
"to",
"do",
"this",
"."
] |
python
|
valid
|
junzis/pyModeS
|
pyModeS/decoder/bds/bds44.py
|
https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/bds/bds44.py#L121-L138
|
def p44(msg):
"""Static pressure.
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
int: static pressure in hPa
"""
d = hex2bin(data(msg))
if d[34] == '0':
return None
p = bin2int(d[35:46]) # hPa
return p
|
[
"def",
"p44",
"(",
"msg",
")",
":",
"d",
"=",
"hex2bin",
"(",
"data",
"(",
"msg",
")",
")",
"if",
"d",
"[",
"34",
"]",
"==",
"'0'",
":",
"return",
"None",
"p",
"=",
"bin2int",
"(",
"d",
"[",
"35",
":",
"46",
"]",
")",
"# hPa",
"return",
"p"
] |
Static pressure.
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
int: static pressure in hPa
|
[
"Static",
"pressure",
"."
] |
python
|
train
|
log2timeline/plaso
|
plaso/cli/psort_tool.py
|
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/cli/psort_tool.py#L102-L127
|
def _CheckStorageFile(self, storage_file_path): # pylint: disable=arguments-differ
"""Checks if the storage file path is valid.
Args:
storage_file_path (str): path of the storage file.
Raises:
BadConfigOption: if the storage file path is invalid.
"""
if os.path.exists(storage_file_path):
if not os.path.isfile(storage_file_path):
raise errors.BadConfigOption(
'Storage file: {0:s} already exists and is not a file.'.format(
storage_file_path))
logger.warning('Appending to an already existing storage file.')
dirname = os.path.dirname(storage_file_path)
if not dirname:
dirname = '.'
# TODO: add a more thorough check to see if the storage file really is
# a plaso storage file.
if not os.access(dirname, os.W_OK):
raise errors.BadConfigOption(
'Unable to write to storage file: {0:s}'.format(storage_file_path))
|
[
"def",
"_CheckStorageFile",
"(",
"self",
",",
"storage_file_path",
")",
":",
"# pylint: disable=arguments-differ",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"storage_file_path",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"storage_file_path",
")",
":",
"raise",
"errors",
".",
"BadConfigOption",
"(",
"'Storage file: {0:s} already exists and is not a file.'",
".",
"format",
"(",
"storage_file_path",
")",
")",
"logger",
".",
"warning",
"(",
"'Appending to an already existing storage file.'",
")",
"dirname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"storage_file_path",
")",
"if",
"not",
"dirname",
":",
"dirname",
"=",
"'.'",
"# TODO: add a more thorough check to see if the storage file really is",
"# a plaso storage file.",
"if",
"not",
"os",
".",
"access",
"(",
"dirname",
",",
"os",
".",
"W_OK",
")",
":",
"raise",
"errors",
".",
"BadConfigOption",
"(",
"'Unable to write to storage file: {0:s}'",
".",
"format",
"(",
"storage_file_path",
")",
")"
] |
Checks if the storage file path is valid.
Args:
storage_file_path (str): path of the storage file.
Raises:
BadConfigOption: if the storage file path is invalid.
|
[
"Checks",
"if",
"the",
"storage",
"file",
"path",
"is",
"valid",
"."
] |
python
|
train
|
nuagenetworks/bambou
|
bambou/nurest_fetcher.py
|
https://github.com/nuagenetworks/bambou/blob/d334fea23e384d3df8e552fe1849ad707941c666/bambou/nurest_fetcher.py#L256-L291
|
def fetch(self, filter=None, order_by=None, group_by=[], page=None, page_size=None, query_parameters=None, commit=True, async=False, callback=None):
""" Fetch objects according to given filter and page.
Note:
This method fetches all managed class objects and store them
in local_name of the served object. which means that the parent
object will hold them in a list. You can prevent this behavior
by setting commit to False. In that case, the fetched children
won't be added in the parent object cache.
Args:
filter (string): string that represents a predicate filter
order_by (string): string that represents an order by clause
group_by (string): list of names for grouping
page (int): number of the page to load
page_size (int): number of results per page
commit (bool): boolean to update current object
callback (function): Callback that should be called in case of a async request
Returns:
tuple: Returns a tuple of information (fetcher, served object, fetched objects, connection)
Example:
>>> entity.children.fetch()
(<NUChildrenFetcher at aaaa>, <NUEntity at bbbb>, [<NUChildren at ccc>, <NUChildren at ddd>], <NURESTConnection at zzz>)
"""
request = NURESTRequest(method=HTTP_METHOD_GET, url=self._prepare_url(), params=query_parameters)
self._prepare_headers(request=request, filter=filter, order_by=order_by, group_by=group_by, page=page, page_size=page_size)
if async:
return self.parent_object.send_request(request=request, async=async, local_callback=self._did_fetch, remote_callback=callback, user_info={'commit': commit})
connection = self.parent_object.send_request(request=request, user_info={'commit': commit})
return self._did_fetch(connection=connection)
|
[
"def",
"fetch",
"(",
"self",
",",
"filter",
"=",
"None",
",",
"order_by",
"=",
"None",
",",
"group_by",
"=",
"[",
"]",
",",
"page",
"=",
"None",
",",
"page_size",
"=",
"None",
",",
"query_parameters",
"=",
"None",
",",
"commit",
"=",
"True",
",",
"async",
"=",
"False",
",",
"callback",
"=",
"None",
")",
":",
"request",
"=",
"NURESTRequest",
"(",
"method",
"=",
"HTTP_METHOD_GET",
",",
"url",
"=",
"self",
".",
"_prepare_url",
"(",
")",
",",
"params",
"=",
"query_parameters",
")",
"self",
".",
"_prepare_headers",
"(",
"request",
"=",
"request",
",",
"filter",
"=",
"filter",
",",
"order_by",
"=",
"order_by",
",",
"group_by",
"=",
"group_by",
",",
"page",
"=",
"page",
",",
"page_size",
"=",
"page_size",
")",
"if",
"async",
":",
"return",
"self",
".",
"parent_object",
".",
"send_request",
"(",
"request",
"=",
"request",
",",
"async",
"=",
"async",
",",
"local_callback",
"=",
"self",
".",
"_did_fetch",
",",
"remote_callback",
"=",
"callback",
",",
"user_info",
"=",
"{",
"'commit'",
":",
"commit",
"}",
")",
"connection",
"=",
"self",
".",
"parent_object",
".",
"send_request",
"(",
"request",
"=",
"request",
",",
"user_info",
"=",
"{",
"'commit'",
":",
"commit",
"}",
")",
"return",
"self",
".",
"_did_fetch",
"(",
"connection",
"=",
"connection",
")"
] |
Fetch objects according to given filter and page.
Note:
This method fetches all managed class objects and store them
in local_name of the served object. which means that the parent
object will hold them in a list. You can prevent this behavior
by setting commit to False. In that case, the fetched children
won't be added in the parent object cache.
Args:
filter (string): string that represents a predicate filter
order_by (string): string that represents an order by clause
group_by (string): list of names for grouping
page (int): number of the page to load
page_size (int): number of results per page
commit (bool): boolean to update current object
callback (function): Callback that should be called in case of a async request
Returns:
tuple: Returns a tuple of information (fetcher, served object, fetched objects, connection)
Example:
>>> entity.children.fetch()
(<NUChildrenFetcher at aaaa>, <NUEntity at bbbb>, [<NUChildren at ccc>, <NUChildren at ddd>], <NURESTConnection at zzz>)
|
[
"Fetch",
"objects",
"according",
"to",
"given",
"filter",
"and",
"page",
"."
] |
python
|
train
|
Azure/msrest-for-python
|
msrest/paging.py
|
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/paging.py#L96-L107
|
def get(self, url):
# type: (str) -> List[Model]
"""Get an arbitrary page.
This resets the iterator and then fully consumes it to return the
specific page **only**.
:param str url: URL to arbitrary page results.
"""
self.reset()
self.next_link = url
return self.advance_page()
|
[
"def",
"get",
"(",
"self",
",",
"url",
")",
":",
"# type: (str) -> List[Model]",
"self",
".",
"reset",
"(",
")",
"self",
".",
"next_link",
"=",
"url",
"return",
"self",
".",
"advance_page",
"(",
")"
] |
Get an arbitrary page.
This resets the iterator and then fully consumes it to return the
specific page **only**.
:param str url: URL to arbitrary page results.
|
[
"Get",
"an",
"arbitrary",
"page",
"."
] |
python
|
train
|
ctuning/ck
|
ck/kernel.py
|
https://github.com/ctuning/ck/blob/7e009814e975f8742790d3106340088a46223714/ck/kernel.py#L1135-L1186
|
def get_os_ck(i):
"""
Input: {
(bits) - force OS bits
}
Output: {
return - return code = 0
platform - 'win' or 'linux'
bits - OS bits in string: 32 or 64
python_bits - Python installation bits in string: 32 or 64
}
"""
import os
import platform
import struct
pbits=str(8 * struct.calcsize("P"))
plat='linux'
if platform.system().lower().startswith('win'): # pragma: no cover
plat='win'
obits=i.get('bits','')
if obits=='':
obits='32'
if plat=='win':
# Trying to get fast way to detect bits
if os.environ.get('ProgramW6432','')!='' or os.environ.get('ProgramFiles(x86)','')!='': # pragma: no cover
obits='64'
else:
# On Linux use first getconf LONG_BIT and if doesn't work use python bits
obits=pbits
r=gen_tmp_file({})
if r['return']>0: return r
fn=r['file_name']
cmd='getconf LONG_BIT > '+fn
rx=os.system(cmd)
if rx==0:
r=load_text_file({'text_file':fn,
'delete_after_read':'yes'})
if r['return']==0:
s=r['string'].strip()
if len(s)>0 and len(s)<4:
obits=s
return {'return':0, 'platform':plat, 'bits':obits, 'python_bits':pbits}
|
[
"def",
"get_os_ck",
"(",
"i",
")",
":",
"import",
"os",
"import",
"platform",
"import",
"struct",
"pbits",
"=",
"str",
"(",
"8",
"*",
"struct",
".",
"calcsize",
"(",
"\"P\"",
")",
")",
"plat",
"=",
"'linux'",
"if",
"platform",
".",
"system",
"(",
")",
".",
"lower",
"(",
")",
".",
"startswith",
"(",
"'win'",
")",
":",
"# pragma: no cover",
"plat",
"=",
"'win'",
"obits",
"=",
"i",
".",
"get",
"(",
"'bits'",
",",
"''",
")",
"if",
"obits",
"==",
"''",
":",
"obits",
"=",
"'32'",
"if",
"plat",
"==",
"'win'",
":",
"# Trying to get fast way to detect bits",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"'ProgramW6432'",
",",
"''",
")",
"!=",
"''",
"or",
"os",
".",
"environ",
".",
"get",
"(",
"'ProgramFiles(x86)'",
",",
"''",
")",
"!=",
"''",
":",
"# pragma: no cover",
"obits",
"=",
"'64'",
"else",
":",
"# On Linux use first getconf LONG_BIT and if doesn't work use python bits",
"obits",
"=",
"pbits",
"r",
"=",
"gen_tmp_file",
"(",
"{",
"}",
")",
"if",
"r",
"[",
"'return'",
"]",
">",
"0",
":",
"return",
"r",
"fn",
"=",
"r",
"[",
"'file_name'",
"]",
"cmd",
"=",
"'getconf LONG_BIT > '",
"+",
"fn",
"rx",
"=",
"os",
".",
"system",
"(",
"cmd",
")",
"if",
"rx",
"==",
"0",
":",
"r",
"=",
"load_text_file",
"(",
"{",
"'text_file'",
":",
"fn",
",",
"'delete_after_read'",
":",
"'yes'",
"}",
")",
"if",
"r",
"[",
"'return'",
"]",
"==",
"0",
":",
"s",
"=",
"r",
"[",
"'string'",
"]",
".",
"strip",
"(",
")",
"if",
"len",
"(",
"s",
")",
">",
"0",
"and",
"len",
"(",
"s",
")",
"<",
"4",
":",
"obits",
"=",
"s",
"return",
"{",
"'return'",
":",
"0",
",",
"'platform'",
":",
"plat",
",",
"'bits'",
":",
"obits",
",",
"'python_bits'",
":",
"pbits",
"}"
] |
Input: {
(bits) - force OS bits
}
Output: {
return - return code = 0
platform - 'win' or 'linux'
bits - OS bits in string: 32 or 64
python_bits - Python installation bits in string: 32 or 64
}
|
[
"Input",
":",
"{",
"(",
"bits",
")",
"-",
"force",
"OS",
"bits",
"}"
] |
python
|
train
|
mitodl/pylti
|
pylti/common.py
|
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L311-L353
|
def generate_request_xml(message_identifier_id, operation,
lis_result_sourcedid, score):
# pylint: disable=too-many-locals
"""
Generates LTI 1.1 XML for posting result to LTI consumer.
:param message_identifier_id:
:param operation:
:param lis_result_sourcedid:
:param score:
:return: XML string
"""
root = etree.Element(u'imsx_POXEnvelopeRequest',
xmlns=u'http://www.imsglobal.org/services/'
u'ltiv1p1/xsd/imsoms_v1p0')
header = etree.SubElement(root, 'imsx_POXHeader')
header_info = etree.SubElement(header, 'imsx_POXRequestHeaderInfo')
version = etree.SubElement(header_info, 'imsx_version')
version.text = 'V1.0'
message_identifier = etree.SubElement(header_info,
'imsx_messageIdentifier')
message_identifier.text = message_identifier_id
body = etree.SubElement(root, 'imsx_POXBody')
xml_request = etree.SubElement(body, '%s%s' % (operation, 'Request'))
record = etree.SubElement(xml_request, 'resultRecord')
guid = etree.SubElement(record, 'sourcedGUID')
sourcedid = etree.SubElement(guid, 'sourcedId')
sourcedid.text = lis_result_sourcedid
if score is not None:
result = etree.SubElement(record, 'result')
result_score = etree.SubElement(result, 'resultScore')
language = etree.SubElement(result_score, 'language')
language.text = 'en'
text_string = etree.SubElement(result_score, 'textString')
text_string.text = score.__str__()
ret = "<?xml version='1.0' encoding='utf-8'?>\n{}".format(
etree.tostring(root, encoding='utf-8').decode('utf-8'))
log.debug("XML Response: \n%s", ret)
return ret
|
[
"def",
"generate_request_xml",
"(",
"message_identifier_id",
",",
"operation",
",",
"lis_result_sourcedid",
",",
"score",
")",
":",
"# pylint: disable=too-many-locals",
"root",
"=",
"etree",
".",
"Element",
"(",
"u'imsx_POXEnvelopeRequest'",
",",
"xmlns",
"=",
"u'http://www.imsglobal.org/services/'",
"u'ltiv1p1/xsd/imsoms_v1p0'",
")",
"header",
"=",
"etree",
".",
"SubElement",
"(",
"root",
",",
"'imsx_POXHeader'",
")",
"header_info",
"=",
"etree",
".",
"SubElement",
"(",
"header",
",",
"'imsx_POXRequestHeaderInfo'",
")",
"version",
"=",
"etree",
".",
"SubElement",
"(",
"header_info",
",",
"'imsx_version'",
")",
"version",
".",
"text",
"=",
"'V1.0'",
"message_identifier",
"=",
"etree",
".",
"SubElement",
"(",
"header_info",
",",
"'imsx_messageIdentifier'",
")",
"message_identifier",
".",
"text",
"=",
"message_identifier_id",
"body",
"=",
"etree",
".",
"SubElement",
"(",
"root",
",",
"'imsx_POXBody'",
")",
"xml_request",
"=",
"etree",
".",
"SubElement",
"(",
"body",
",",
"'%s%s'",
"%",
"(",
"operation",
",",
"'Request'",
")",
")",
"record",
"=",
"etree",
".",
"SubElement",
"(",
"xml_request",
",",
"'resultRecord'",
")",
"guid",
"=",
"etree",
".",
"SubElement",
"(",
"record",
",",
"'sourcedGUID'",
")",
"sourcedid",
"=",
"etree",
".",
"SubElement",
"(",
"guid",
",",
"'sourcedId'",
")",
"sourcedid",
".",
"text",
"=",
"lis_result_sourcedid",
"if",
"score",
"is",
"not",
"None",
":",
"result",
"=",
"etree",
".",
"SubElement",
"(",
"record",
",",
"'result'",
")",
"result_score",
"=",
"etree",
".",
"SubElement",
"(",
"result",
",",
"'resultScore'",
")",
"language",
"=",
"etree",
".",
"SubElement",
"(",
"result_score",
",",
"'language'",
")",
"language",
".",
"text",
"=",
"'en'",
"text_string",
"=",
"etree",
".",
"SubElement",
"(",
"result_score",
",",
"'textString'",
")",
"text_string",
".",
"text",
"=",
"score",
".",
"__str__",
"(",
")",
"ret",
"=",
"\"<?xml version='1.0' encoding='utf-8'?>\\n{}\"",
".",
"format",
"(",
"etree",
".",
"tostring",
"(",
"root",
",",
"encoding",
"=",
"'utf-8'",
")",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"log",
".",
"debug",
"(",
"\"XML Response: \\n%s\"",
",",
"ret",
")",
"return",
"ret"
] |
Generates LTI 1.1 XML for posting result to LTI consumer.
:param message_identifier_id:
:param operation:
:param lis_result_sourcedid:
:param score:
:return: XML string
|
[
"Generates",
"LTI",
"1",
".",
"1",
"XML",
"for",
"posting",
"result",
"to",
"LTI",
"consumer",
"."
] |
python
|
train
|
PyCQA/pyflakes
|
pyflakes/checker.py
|
https://github.com/PyCQA/pyflakes/blob/232cb1d27ee134bf96adc8f37e53589dc259b159/pyflakes/checker.py#L712-L719
|
def runDeferred(self, deferred):
"""
Run the callables in C{deferred} using their associated scope stack.
"""
for handler, scope, offset in deferred:
self.scopeStack = scope
self.offset = offset
handler()
|
[
"def",
"runDeferred",
"(",
"self",
",",
"deferred",
")",
":",
"for",
"handler",
",",
"scope",
",",
"offset",
"in",
"deferred",
":",
"self",
".",
"scopeStack",
"=",
"scope",
"self",
".",
"offset",
"=",
"offset",
"handler",
"(",
")"
] |
Run the callables in C{deferred} using their associated scope stack.
|
[
"Run",
"the",
"callables",
"in",
"C",
"{",
"deferred",
"}",
"using",
"their",
"associated",
"scope",
"stack",
"."
] |
python
|
train
|
spyder-ide/spyder
|
spyder/plugins/variableexplorer/widgets/importwizard.py
|
https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/plugins/variableexplorer/widgets/importwizard.py#L558-L563
|
def _focus_tab(self, tab_idx):
"""Change tab focus"""
for i in range(self.tab_widget.count()):
self.tab_widget.setTabEnabled(i, False)
self.tab_widget.setTabEnabled(tab_idx, True)
self.tab_widget.setCurrentIndex(tab_idx)
|
[
"def",
"_focus_tab",
"(",
"self",
",",
"tab_idx",
")",
":",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"tab_widget",
".",
"count",
"(",
")",
")",
":",
"self",
".",
"tab_widget",
".",
"setTabEnabled",
"(",
"i",
",",
"False",
")",
"self",
".",
"tab_widget",
".",
"setTabEnabled",
"(",
"tab_idx",
",",
"True",
")",
"self",
".",
"tab_widget",
".",
"setCurrentIndex",
"(",
"tab_idx",
")"
] |
Change tab focus
|
[
"Change",
"tab",
"focus"
] |
python
|
train
|
ANTsX/ANTsPy
|
ants/registration/reorient_image.py
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/registration/reorient_image.py#L56-L82
|
def reorient_image2(image, orientation='RAS'):
"""
Reorient an image.
Example
-------
>>> import ants
>>> mni = ants.image_read(ants.get_data('mni'))
>>> mni2 = mni.reorient_image2()
"""
if image.dimension != 3:
raise ValueError('image must have 3 dimensions')
inpixeltype = image.pixeltype
ndim = image.dimension
if image.pixeltype != 'float':
image = image.clone('float')
libfn = utils.get_lib_fn('reorientImage2')
itkimage = libfn(image.pointer, orientation)
new_img = iio.ANTsImage(pixeltype='float', dimension=ndim,
components=image.components, pointer=itkimage)#.clone(inpixeltype)
if inpixeltype != 'float':
new_img = new_img.clone(inpixeltype)
return new_img
|
[
"def",
"reorient_image2",
"(",
"image",
",",
"orientation",
"=",
"'RAS'",
")",
":",
"if",
"image",
".",
"dimension",
"!=",
"3",
":",
"raise",
"ValueError",
"(",
"'image must have 3 dimensions'",
")",
"inpixeltype",
"=",
"image",
".",
"pixeltype",
"ndim",
"=",
"image",
".",
"dimension",
"if",
"image",
".",
"pixeltype",
"!=",
"'float'",
":",
"image",
"=",
"image",
".",
"clone",
"(",
"'float'",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'reorientImage2'",
")",
"itkimage",
"=",
"libfn",
"(",
"image",
".",
"pointer",
",",
"orientation",
")",
"new_img",
"=",
"iio",
".",
"ANTsImage",
"(",
"pixeltype",
"=",
"'float'",
",",
"dimension",
"=",
"ndim",
",",
"components",
"=",
"image",
".",
"components",
",",
"pointer",
"=",
"itkimage",
")",
"#.clone(inpixeltype)",
"if",
"inpixeltype",
"!=",
"'float'",
":",
"new_img",
"=",
"new_img",
".",
"clone",
"(",
"inpixeltype",
")",
"return",
"new_img"
] |
Reorient an image.
Example
-------
>>> import ants
>>> mni = ants.image_read(ants.get_data('mni'))
>>> mni2 = mni.reorient_image2()
|
[
"Reorient",
"an",
"image",
"."
] |
python
|
train
|
materialsproject/pymatgen
|
pymatgen/io/abinit/nodes.py
|
https://github.com/materialsproject/pymatgen/blob/4ca558cf72f8d5f8a1f21dfdfc0181a971c186da/pymatgen/io/abinit/nodes.py#L1189-L1191
|
def critical(self, msg, *args, **kwargs):
"""Log 'msg % args' with the critical severity level"""
self._log("CRITICAL", msg, args, kwargs)
|
[
"def",
"critical",
"(",
"self",
",",
"msg",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_log",
"(",
"\"CRITICAL\"",
",",
"msg",
",",
"args",
",",
"kwargs",
")"
] |
Log 'msg % args' with the critical severity level
|
[
"Log",
"msg",
"%",
"args",
"with",
"the",
"critical",
"severity",
"level"
] |
python
|
train
|
mushkevych/scheduler
|
synergy/db/model/queue_context_entry.py
|
https://github.com/mushkevych/scheduler/blob/6740331360f49083c208085fb5a60ce80ebf418b/synergy/db/model/queue_context_entry.py#L28-L38
|
def queue_context_entry(exchange,
queue_name,
routing=None):
""" forms queue's context entry """
if routing is None:
routing = queue_name
queue_entry = QueueContextEntry(mq_queue=queue_name,
mq_exchange=exchange,
mq_routing_key=routing)
return queue_entry
|
[
"def",
"queue_context_entry",
"(",
"exchange",
",",
"queue_name",
",",
"routing",
"=",
"None",
")",
":",
"if",
"routing",
"is",
"None",
":",
"routing",
"=",
"queue_name",
"queue_entry",
"=",
"QueueContextEntry",
"(",
"mq_queue",
"=",
"queue_name",
",",
"mq_exchange",
"=",
"exchange",
",",
"mq_routing_key",
"=",
"routing",
")",
"return",
"queue_entry"
] |
forms queue's context entry
|
[
"forms",
"queue",
"s",
"context",
"entry"
] |
python
|
train
|
d0c-s4vage/pfp
|
pfp/bitwrap.py
|
https://github.com/d0c-s4vage/pfp/blob/32f2d34fdec1c70019fa83c7006d5e3be0f92fcd/pfp/bitwrap.py#L82-L87
|
def close(self):
"""Close the stream
"""
self.closed = True
self._flush_bits_to_stream()
self._stream.close()
|
[
"def",
"close",
"(",
"self",
")",
":",
"self",
".",
"closed",
"=",
"True",
"self",
".",
"_flush_bits_to_stream",
"(",
")",
"self",
".",
"_stream",
".",
"close",
"(",
")"
] |
Close the stream
|
[
"Close",
"the",
"stream"
] |
python
|
train
|
dfm/george
|
george/modeling.py
|
https://github.com/dfm/george/blob/44819680036387625ee89f81c55104f3c1600759/george/modeling.py#L176-L188
|
def get_parameter_dict(self, include_frozen=False):
"""
Get an ordered dictionary of the parameters
Args:
include_frozen (Optional[bool]): Should the frozen parameters be
included in the returned value? (default: ``False``)
"""
return OrderedDict(zip(
self.get_parameter_names(include_frozen=include_frozen),
self.get_parameter_vector(include_frozen=include_frozen),
))
|
[
"def",
"get_parameter_dict",
"(",
"self",
",",
"include_frozen",
"=",
"False",
")",
":",
"return",
"OrderedDict",
"(",
"zip",
"(",
"self",
".",
"get_parameter_names",
"(",
"include_frozen",
"=",
"include_frozen",
")",
",",
"self",
".",
"get_parameter_vector",
"(",
"include_frozen",
"=",
"include_frozen",
")",
",",
")",
")"
] |
Get an ordered dictionary of the parameters
Args:
include_frozen (Optional[bool]): Should the frozen parameters be
included in the returned value? (default: ``False``)
|
[
"Get",
"an",
"ordered",
"dictionary",
"of",
"the",
"parameters"
] |
python
|
train
|
phoebe-project/phoebe2
|
phoebe/backend/universe.py
|
https://github.com/phoebe-project/phoebe2/blob/e64b8be683977064e2d55dd1b3ac400f64c3e379/phoebe/backend/universe.py#L1608-L1622
|
def _fill_albedos(self, mesh=None, irrad_frac_refl=0.0):
"""
TODO: add documentation
"""
logger.debug("{}._fill_albedos".format(self.component))
if mesh is None:
mesh = self.mesh
mesh.update_columns(irrad_frac_refl=irrad_frac_refl)
if not self.needs_recompute_instantaneous:
logger.debug("{}._fill_albedos: copying albedos to standard mesh".format(self.component))
theta = 0.0
self._standard_meshes[theta].update_columns(irrad_frac_refl=irrad_frac_refl)
|
[
"def",
"_fill_albedos",
"(",
"self",
",",
"mesh",
"=",
"None",
",",
"irrad_frac_refl",
"=",
"0.0",
")",
":",
"logger",
".",
"debug",
"(",
"\"{}._fill_albedos\"",
".",
"format",
"(",
"self",
".",
"component",
")",
")",
"if",
"mesh",
"is",
"None",
":",
"mesh",
"=",
"self",
".",
"mesh",
"mesh",
".",
"update_columns",
"(",
"irrad_frac_refl",
"=",
"irrad_frac_refl",
")",
"if",
"not",
"self",
".",
"needs_recompute_instantaneous",
":",
"logger",
".",
"debug",
"(",
"\"{}._fill_albedos: copying albedos to standard mesh\"",
".",
"format",
"(",
"self",
".",
"component",
")",
")",
"theta",
"=",
"0.0",
"self",
".",
"_standard_meshes",
"[",
"theta",
"]",
".",
"update_columns",
"(",
"irrad_frac_refl",
"=",
"irrad_frac_refl",
")"
] |
TODO: add documentation
|
[
"TODO",
":",
"add",
"documentation"
] |
python
|
train
|
Yipit/ejson
|
ejson/__init__.py
|
https://github.com/Yipit/ejson/blob/6665703f1534923d1c30849e08339f0ff97d8230/ejson/__init__.py#L179-L194
|
def _converter(data):
"""Internal function that will be passed to the native `json.dumps`.
This function uses the `REGISTRY` of serializers and try to convert
a given instance to an object that json.dumps can understand.
"""
handler = REGISTRY.get(data.__class__)
if handler:
full_name = '{}.{}'.format(
data.__class__.__module__,
data.__class__.__name__)
return {
'__class__': full_name,
'__value__': handler(data),
}
raise TypeError(repr(data) + " is not JSON serializable")
|
[
"def",
"_converter",
"(",
"data",
")",
":",
"handler",
"=",
"REGISTRY",
".",
"get",
"(",
"data",
".",
"__class__",
")",
"if",
"handler",
":",
"full_name",
"=",
"'{}.{}'",
".",
"format",
"(",
"data",
".",
"__class__",
".",
"__module__",
",",
"data",
".",
"__class__",
".",
"__name__",
")",
"return",
"{",
"'__class__'",
":",
"full_name",
",",
"'__value__'",
":",
"handler",
"(",
"data",
")",
",",
"}",
"raise",
"TypeError",
"(",
"repr",
"(",
"data",
")",
"+",
"\" is not JSON serializable\"",
")"
] |
Internal function that will be passed to the native `json.dumps`.
This function uses the `REGISTRY` of serializers and try to convert
a given instance to an object that json.dumps can understand.
|
[
"Internal",
"function",
"that",
"will",
"be",
"passed",
"to",
"the",
"native",
"json",
".",
"dumps",
"."
] |
python
|
train
|
metric-learn/metric-learn
|
metric_learn/mmc.py
|
https://github.com/metric-learn/metric-learn/blob/d945df1342c69012608bb70b92520392a0853de6/metric_learn/mmc.py#L212-L271
|
def _fit_diag(self, pairs, y):
"""Learn diagonal metric using MMC.
Parameters
----------
X : (n x d) data matrix
each row corresponds to a single instance
constraints : 4-tuple of arrays
(a,b,c,d) indices into X, with (a,b) specifying similar and (c,d)
dissimilar pairs
"""
num_dim = pairs.shape[2]
pos_pairs, neg_pairs = pairs[y == 1], pairs[y == -1]
s_sum = np.sum((pos_pairs[:, 0, :] - pos_pairs[:, 1, :]) ** 2, axis=0)
it = 0
error = 1.0
eps = 1e-6
reduction = 2.0
w = np.diag(self.A_).copy()
while error > self.convergence_threshold and it < self.max_iter:
fD0, fD_1st_d, fD_2nd_d = self._D_constraint(neg_pairs, w)
obj_initial = np.dot(s_sum, w) + self.diagonal_c * fD0
fS_1st_d = s_sum # first derivative of the similarity constraints
gradient = fS_1st_d - self.diagonal_c * fD_1st_d # gradient of the objective
hessian = -self.diagonal_c * fD_2nd_d + eps * np.eye(num_dim) # Hessian of the objective
step = np.dot(np.linalg.inv(hessian), gradient)
# Newton-Rapshon update
# search over optimal lambda
lambd = 1 # initial step-size
w_tmp = np.maximum(0, w - lambd * step)
obj = (np.dot(s_sum, w_tmp) + self.diagonal_c *
self._D_objective(neg_pairs, w_tmp))
assert_all_finite(obj)
obj_previous = obj + 1 # just to get the while-loop started
inner_it = 0
while obj < obj_previous:
obj_previous = obj
w_previous = w_tmp.copy()
lambd /= reduction
w_tmp = np.maximum(0, w - lambd * step)
obj = (np.dot(s_sum, w_tmp) + self.diagonal_c *
self._D_objective(neg_pairs, w_tmp))
inner_it += 1
assert_all_finite(obj)
w[:] = w_previous
error = np.abs((obj_previous - obj_initial) / obj_previous)
if self.verbose:
print('mmc iter: %d, conv = %f' % (it, error))
it += 1
self.A_ = np.diag(w)
self.transformer_ = transformer_from_metric(self.A_)
return self
|
[
"def",
"_fit_diag",
"(",
"self",
",",
"pairs",
",",
"y",
")",
":",
"num_dim",
"=",
"pairs",
".",
"shape",
"[",
"2",
"]",
"pos_pairs",
",",
"neg_pairs",
"=",
"pairs",
"[",
"y",
"==",
"1",
"]",
",",
"pairs",
"[",
"y",
"==",
"-",
"1",
"]",
"s_sum",
"=",
"np",
".",
"sum",
"(",
"(",
"pos_pairs",
"[",
":",
",",
"0",
",",
":",
"]",
"-",
"pos_pairs",
"[",
":",
",",
"1",
",",
":",
"]",
")",
"**",
"2",
",",
"axis",
"=",
"0",
")",
"it",
"=",
"0",
"error",
"=",
"1.0",
"eps",
"=",
"1e-6",
"reduction",
"=",
"2.0",
"w",
"=",
"np",
".",
"diag",
"(",
"self",
".",
"A_",
")",
".",
"copy",
"(",
")",
"while",
"error",
">",
"self",
".",
"convergence_threshold",
"and",
"it",
"<",
"self",
".",
"max_iter",
":",
"fD0",
",",
"fD_1st_d",
",",
"fD_2nd_d",
"=",
"self",
".",
"_D_constraint",
"(",
"neg_pairs",
",",
"w",
")",
"obj_initial",
"=",
"np",
".",
"dot",
"(",
"s_sum",
",",
"w",
")",
"+",
"self",
".",
"diagonal_c",
"*",
"fD0",
"fS_1st_d",
"=",
"s_sum",
"# first derivative of the similarity constraints",
"gradient",
"=",
"fS_1st_d",
"-",
"self",
".",
"diagonal_c",
"*",
"fD_1st_d",
"# gradient of the objective",
"hessian",
"=",
"-",
"self",
".",
"diagonal_c",
"*",
"fD_2nd_d",
"+",
"eps",
"*",
"np",
".",
"eye",
"(",
"num_dim",
")",
"# Hessian of the objective",
"step",
"=",
"np",
".",
"dot",
"(",
"np",
".",
"linalg",
".",
"inv",
"(",
"hessian",
")",
",",
"gradient",
")",
"# Newton-Rapshon update",
"# search over optimal lambda",
"lambd",
"=",
"1",
"# initial step-size",
"w_tmp",
"=",
"np",
".",
"maximum",
"(",
"0",
",",
"w",
"-",
"lambd",
"*",
"step",
")",
"obj",
"=",
"(",
"np",
".",
"dot",
"(",
"s_sum",
",",
"w_tmp",
")",
"+",
"self",
".",
"diagonal_c",
"*",
"self",
".",
"_D_objective",
"(",
"neg_pairs",
",",
"w_tmp",
")",
")",
"assert_all_finite",
"(",
"obj",
")",
"obj_previous",
"=",
"obj",
"+",
"1",
"# just to get the while-loop started",
"inner_it",
"=",
"0",
"while",
"obj",
"<",
"obj_previous",
":",
"obj_previous",
"=",
"obj",
"w_previous",
"=",
"w_tmp",
".",
"copy",
"(",
")",
"lambd",
"/=",
"reduction",
"w_tmp",
"=",
"np",
".",
"maximum",
"(",
"0",
",",
"w",
"-",
"lambd",
"*",
"step",
")",
"obj",
"=",
"(",
"np",
".",
"dot",
"(",
"s_sum",
",",
"w_tmp",
")",
"+",
"self",
".",
"diagonal_c",
"*",
"self",
".",
"_D_objective",
"(",
"neg_pairs",
",",
"w_tmp",
")",
")",
"inner_it",
"+=",
"1",
"assert_all_finite",
"(",
"obj",
")",
"w",
"[",
":",
"]",
"=",
"w_previous",
"error",
"=",
"np",
".",
"abs",
"(",
"(",
"obj_previous",
"-",
"obj_initial",
")",
"/",
"obj_previous",
")",
"if",
"self",
".",
"verbose",
":",
"print",
"(",
"'mmc iter: %d, conv = %f'",
"%",
"(",
"it",
",",
"error",
")",
")",
"it",
"+=",
"1",
"self",
".",
"A_",
"=",
"np",
".",
"diag",
"(",
"w",
")",
"self",
".",
"transformer_",
"=",
"transformer_from_metric",
"(",
"self",
".",
"A_",
")",
"return",
"self"
] |
Learn diagonal metric using MMC.
Parameters
----------
X : (n x d) data matrix
each row corresponds to a single instance
constraints : 4-tuple of arrays
(a,b,c,d) indices into X, with (a,b) specifying similar and (c,d)
dissimilar pairs
|
[
"Learn",
"diagonal",
"metric",
"using",
"MMC",
".",
"Parameters",
"----------",
"X",
":",
"(",
"n",
"x",
"d",
")",
"data",
"matrix",
"each",
"row",
"corresponds",
"to",
"a",
"single",
"instance",
"constraints",
":",
"4",
"-",
"tuple",
"of",
"arrays",
"(",
"a",
"b",
"c",
"d",
")",
"indices",
"into",
"X",
"with",
"(",
"a",
"b",
")",
"specifying",
"similar",
"and",
"(",
"c",
"d",
")",
"dissimilar",
"pairs"
] |
python
|
train
|
gwastro/pycbc
|
pycbc/dq.py
|
https://github.com/gwastro/pycbc/blob/7a64cdd104d263f1b6ea0b01e6841837d05a4cb3/pycbc/dq.py#L34-L92
|
def parse_veto_definer(veto_def_filename):
""" Parse a veto definer file from the filename and return a dictionary
indexed by ifo and veto definer category level.
Parameters
----------
veto_def_filename: str
The path to the veto definer file
Returns:
parsed_definition: dict
Returns a dictionary first indexed by ifo, then category level, and
finally a list of veto definitions.
"""
from glue.ligolw import table, lsctables, utils as ligolw_utils
from glue.ligolw.ligolw import LIGOLWContentHandler as h
lsctables.use_in(h)
indoc = ligolw_utils.load_filename(veto_def_filename, False,
contenthandler=h)
veto_table = table.get_table(indoc, 'veto_definer')
ifo = veto_table.getColumnByName('ifo')
name = veto_table.getColumnByName('name')
version = numpy.array(veto_table.getColumnByName('version'))
category = numpy.array(veto_table.getColumnByName('category'))
start = numpy.array(veto_table.getColumnByName('start_time'))
end = numpy.array(veto_table.getColumnByName('end_time'))
start_pad = numpy.array(veto_table.getColumnByName('start_pad'))
end_pad = numpy.array(veto_table.getColumnByName('end_pad'))
data = {}
for i in range(len(veto_table)):
if ifo[i] not in data:
data[ifo[i]] = {}
# The veto-definer categories are weird! Hardware injections are stored
# in "3" and numbers above that are bumped up by one (although not
# often used any more). So we remap 3 to H and anything above 3 to
# N-1. 2 and 1 correspond to 2 and 1 (YAY!)
if category[i] > 3:
curr_cat = "CAT_{}".format(category[i]-1)
elif category[i] == 3:
curr_cat = "CAT_H"
else:
curr_cat = "CAT_{}".format(category[i])
if curr_cat not in data[ifo[i]]:
data[ifo[i]][curr_cat] = []
veto_info = {'name': name[i],
'version': version[i],
'start': start[i],
'end': end[i],
'start_pad': start_pad[i],
'end_pad': end_pad[i],
}
data[ifo[i]][curr_cat].append(veto_info)
return data
|
[
"def",
"parse_veto_definer",
"(",
"veto_def_filename",
")",
":",
"from",
"glue",
".",
"ligolw",
"import",
"table",
",",
"lsctables",
",",
"utils",
"as",
"ligolw_utils",
"from",
"glue",
".",
"ligolw",
".",
"ligolw",
"import",
"LIGOLWContentHandler",
"as",
"h",
"lsctables",
".",
"use_in",
"(",
"h",
")",
"indoc",
"=",
"ligolw_utils",
".",
"load_filename",
"(",
"veto_def_filename",
",",
"False",
",",
"contenthandler",
"=",
"h",
")",
"veto_table",
"=",
"table",
".",
"get_table",
"(",
"indoc",
",",
"'veto_definer'",
")",
"ifo",
"=",
"veto_table",
".",
"getColumnByName",
"(",
"'ifo'",
")",
"name",
"=",
"veto_table",
".",
"getColumnByName",
"(",
"'name'",
")",
"version",
"=",
"numpy",
".",
"array",
"(",
"veto_table",
".",
"getColumnByName",
"(",
"'version'",
")",
")",
"category",
"=",
"numpy",
".",
"array",
"(",
"veto_table",
".",
"getColumnByName",
"(",
"'category'",
")",
")",
"start",
"=",
"numpy",
".",
"array",
"(",
"veto_table",
".",
"getColumnByName",
"(",
"'start_time'",
")",
")",
"end",
"=",
"numpy",
".",
"array",
"(",
"veto_table",
".",
"getColumnByName",
"(",
"'end_time'",
")",
")",
"start_pad",
"=",
"numpy",
".",
"array",
"(",
"veto_table",
".",
"getColumnByName",
"(",
"'start_pad'",
")",
")",
"end_pad",
"=",
"numpy",
".",
"array",
"(",
"veto_table",
".",
"getColumnByName",
"(",
"'end_pad'",
")",
")",
"data",
"=",
"{",
"}",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"veto_table",
")",
")",
":",
"if",
"ifo",
"[",
"i",
"]",
"not",
"in",
"data",
":",
"data",
"[",
"ifo",
"[",
"i",
"]",
"]",
"=",
"{",
"}",
"# The veto-definer categories are weird! Hardware injections are stored",
"# in \"3\" and numbers above that are bumped up by one (although not",
"# often used any more). So we remap 3 to H and anything above 3 to",
"# N-1. 2 and 1 correspond to 2 and 1 (YAY!)",
"if",
"category",
"[",
"i",
"]",
">",
"3",
":",
"curr_cat",
"=",
"\"CAT_{}\"",
".",
"format",
"(",
"category",
"[",
"i",
"]",
"-",
"1",
")",
"elif",
"category",
"[",
"i",
"]",
"==",
"3",
":",
"curr_cat",
"=",
"\"CAT_H\"",
"else",
":",
"curr_cat",
"=",
"\"CAT_{}\"",
".",
"format",
"(",
"category",
"[",
"i",
"]",
")",
"if",
"curr_cat",
"not",
"in",
"data",
"[",
"ifo",
"[",
"i",
"]",
"]",
":",
"data",
"[",
"ifo",
"[",
"i",
"]",
"]",
"[",
"curr_cat",
"]",
"=",
"[",
"]",
"veto_info",
"=",
"{",
"'name'",
":",
"name",
"[",
"i",
"]",
",",
"'version'",
":",
"version",
"[",
"i",
"]",
",",
"'start'",
":",
"start",
"[",
"i",
"]",
",",
"'end'",
":",
"end",
"[",
"i",
"]",
",",
"'start_pad'",
":",
"start_pad",
"[",
"i",
"]",
",",
"'end_pad'",
":",
"end_pad",
"[",
"i",
"]",
",",
"}",
"data",
"[",
"ifo",
"[",
"i",
"]",
"]",
"[",
"curr_cat",
"]",
".",
"append",
"(",
"veto_info",
")",
"return",
"data"
] |
Parse a veto definer file from the filename and return a dictionary
indexed by ifo and veto definer category level.
Parameters
----------
veto_def_filename: str
The path to the veto definer file
Returns:
parsed_definition: dict
Returns a dictionary first indexed by ifo, then category level, and
finally a list of veto definitions.
|
[
"Parse",
"a",
"veto",
"definer",
"file",
"from",
"the",
"filename",
"and",
"return",
"a",
"dictionary",
"indexed",
"by",
"ifo",
"and",
"veto",
"definer",
"category",
"level",
"."
] |
python
|
train
|
Jarn/jarn.mkrelease
|
jarn/mkrelease/mkrelease.py
|
https://github.com/Jarn/jarn.mkrelease/blob/844377f37a3cdc0a154148790a926f991019ec4a/jarn/mkrelease/mkrelease.py#L393-L418
|
def get_uploadflags(self, location):
"""Return uploadflags for the given server.
"""
uploadflags = []
server = self.defaults.servers[location]
if self.sign:
uploadflags.append('--sign')
elif server.sign is not None:
if server.sign:
uploadflags.append('--sign')
elif self.defaults.sign:
uploadflags.append('--sign')
if self.identity:
if '--sign' not in uploadflags:
uploadflags.append('--sign')
uploadflags.append('--identity="%s"' % self.identity)
elif '--sign' in uploadflags:
if server.identity is not None:
if server.identity:
uploadflags.append('--identity="%s"' % server.identity)
elif self.defaults.identity:
uploadflags.append('--identity="%s"' % self.defaults.identity)
return uploadflags
|
[
"def",
"get_uploadflags",
"(",
"self",
",",
"location",
")",
":",
"uploadflags",
"=",
"[",
"]",
"server",
"=",
"self",
".",
"defaults",
".",
"servers",
"[",
"location",
"]",
"if",
"self",
".",
"sign",
":",
"uploadflags",
".",
"append",
"(",
"'--sign'",
")",
"elif",
"server",
".",
"sign",
"is",
"not",
"None",
":",
"if",
"server",
".",
"sign",
":",
"uploadflags",
".",
"append",
"(",
"'--sign'",
")",
"elif",
"self",
".",
"defaults",
".",
"sign",
":",
"uploadflags",
".",
"append",
"(",
"'--sign'",
")",
"if",
"self",
".",
"identity",
":",
"if",
"'--sign'",
"not",
"in",
"uploadflags",
":",
"uploadflags",
".",
"append",
"(",
"'--sign'",
")",
"uploadflags",
".",
"append",
"(",
"'--identity=\"%s\"'",
"%",
"self",
".",
"identity",
")",
"elif",
"'--sign'",
"in",
"uploadflags",
":",
"if",
"server",
".",
"identity",
"is",
"not",
"None",
":",
"if",
"server",
".",
"identity",
":",
"uploadflags",
".",
"append",
"(",
"'--identity=\"%s\"'",
"%",
"server",
".",
"identity",
")",
"elif",
"self",
".",
"defaults",
".",
"identity",
":",
"uploadflags",
".",
"append",
"(",
"'--identity=\"%s\"'",
"%",
"self",
".",
"defaults",
".",
"identity",
")",
"return",
"uploadflags"
] |
Return uploadflags for the given server.
|
[
"Return",
"uploadflags",
"for",
"the",
"given",
"server",
"."
] |
python
|
train
|
watson-developer-cloud/python-sdk
|
ibm_watson/natural_language_classifier_v1.py
|
https://github.com/watson-developer-cloud/python-sdk/blob/4c2c9df4466fcde88975da9ecd834e6ba95eb353/ibm_watson/natural_language_classifier_v1.py#L415-L426
|
def _from_dict(cls, _dict):
"""Initialize a ClassificationCollection object from a json dictionary."""
args = {}
if 'classifier_id' in _dict:
args['classifier_id'] = _dict.get('classifier_id')
if 'url' in _dict:
args['url'] = _dict.get('url')
if 'collection' in _dict:
args['collection'] = [
CollectionItem._from_dict(x) for x in (_dict.get('collection'))
]
return cls(**args)
|
[
"def",
"_from_dict",
"(",
"cls",
",",
"_dict",
")",
":",
"args",
"=",
"{",
"}",
"if",
"'classifier_id'",
"in",
"_dict",
":",
"args",
"[",
"'classifier_id'",
"]",
"=",
"_dict",
".",
"get",
"(",
"'classifier_id'",
")",
"if",
"'url'",
"in",
"_dict",
":",
"args",
"[",
"'url'",
"]",
"=",
"_dict",
".",
"get",
"(",
"'url'",
")",
"if",
"'collection'",
"in",
"_dict",
":",
"args",
"[",
"'collection'",
"]",
"=",
"[",
"CollectionItem",
".",
"_from_dict",
"(",
"x",
")",
"for",
"x",
"in",
"(",
"_dict",
".",
"get",
"(",
"'collection'",
")",
")",
"]",
"return",
"cls",
"(",
"*",
"*",
"args",
")"
] |
Initialize a ClassificationCollection object from a json dictionary.
|
[
"Initialize",
"a",
"ClassificationCollection",
"object",
"from",
"a",
"json",
"dictionary",
"."
] |
python
|
train
|
scieloorg/publicationstatsapi
|
publicationstats/queries.py
|
https://github.com/scieloorg/publicationstatsapi/blob/118995d75ef9478f64d5707107f4ce1640a0aa8e/publicationstats/queries.py#L227-L286
|
def journals_status(collection, raw=False):
"""
This method retrieve the total of documents, articles (citable documents),
issues and bibliografic references of a journal
arguments
collection: SciELO 3 letters Acronym
issn: Journal ISSN
return for journal context
{
"citable": 12140,
"non_citable": 20,
"docs": 12160,
"issues": 120,
"references": 286619
}
"""
tc = ThriftClient()
body = {"query": {"filtered": {}}}
fltr = {}
query = {
"query": {
"bool": {
"must": [
{
"match": {
"collection": collection
}
}
]
}
}
}
body['query']['filtered'].update(fltr)
body['query']['filtered'].update(query)
query_parameters = [
('size', '0'),
('search_type', 'count')
]
body['aggs'] = {
"status": {
"terms": {
"field": "status"
}
}
}
query_result = tc.search('journal', json.dumps(body), query_parameters)
computed = _compute_journals_status(query_result)
return query_result if raw else computed
|
[
"def",
"journals_status",
"(",
"collection",
",",
"raw",
"=",
"False",
")",
":",
"tc",
"=",
"ThriftClient",
"(",
")",
"body",
"=",
"{",
"\"query\"",
":",
"{",
"\"filtered\"",
":",
"{",
"}",
"}",
"}",
"fltr",
"=",
"{",
"}",
"query",
"=",
"{",
"\"query\"",
":",
"{",
"\"bool\"",
":",
"{",
"\"must\"",
":",
"[",
"{",
"\"match\"",
":",
"{",
"\"collection\"",
":",
"collection",
"}",
"}",
"]",
"}",
"}",
"}",
"body",
"[",
"'query'",
"]",
"[",
"'filtered'",
"]",
".",
"update",
"(",
"fltr",
")",
"body",
"[",
"'query'",
"]",
"[",
"'filtered'",
"]",
".",
"update",
"(",
"query",
")",
"query_parameters",
"=",
"[",
"(",
"'size'",
",",
"'0'",
")",
",",
"(",
"'search_type'",
",",
"'count'",
")",
"]",
"body",
"[",
"'aggs'",
"]",
"=",
"{",
"\"status\"",
":",
"{",
"\"terms\"",
":",
"{",
"\"field\"",
":",
"\"status\"",
"}",
"}",
"}",
"query_result",
"=",
"tc",
".",
"search",
"(",
"'journal'",
",",
"json",
".",
"dumps",
"(",
"body",
")",
",",
"query_parameters",
")",
"computed",
"=",
"_compute_journals_status",
"(",
"query_result",
")",
"return",
"query_result",
"if",
"raw",
"else",
"computed"
] |
This method retrieve the total of documents, articles (citable documents),
issues and bibliografic references of a journal
arguments
collection: SciELO 3 letters Acronym
issn: Journal ISSN
return for journal context
{
"citable": 12140,
"non_citable": 20,
"docs": 12160,
"issues": 120,
"references": 286619
}
|
[
"This",
"method",
"retrieve",
"the",
"total",
"of",
"documents",
"articles",
"(",
"citable",
"documents",
")",
"issues",
"and",
"bibliografic",
"references",
"of",
"a",
"journal"
] |
python
|
train
|
django-parler/django-parler
|
parler/views.py
|
https://github.com/django-parler/django-parler/blob/11ae4af5e8faddb74c69c848870122df4006a54e/parler/views.py#L243-L253
|
def get_language_tabs(self):
"""
Determine the language tabs to show.
"""
current_language = self.get_current_language()
if self.object:
available_languages = list(self.object.get_available_languages())
else:
available_languages = []
return get_language_tabs(self.request, current_language, available_languages)
|
[
"def",
"get_language_tabs",
"(",
"self",
")",
":",
"current_language",
"=",
"self",
".",
"get_current_language",
"(",
")",
"if",
"self",
".",
"object",
":",
"available_languages",
"=",
"list",
"(",
"self",
".",
"object",
".",
"get_available_languages",
"(",
")",
")",
"else",
":",
"available_languages",
"=",
"[",
"]",
"return",
"get_language_tabs",
"(",
"self",
".",
"request",
",",
"current_language",
",",
"available_languages",
")"
] |
Determine the language tabs to show.
|
[
"Determine",
"the",
"language",
"tabs",
"to",
"show",
"."
] |
python
|
train
|
saltstack/salt
|
salt/modules/aws_sqs.py
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/aws_sqs.py#L71-L112
|
def receive_message(queue, region, num=1, opts=None, user=None):
'''
Receive one or more messages from a queue in a region
queue
The name of the queue to receive messages from
region
Region where SQS queues exists
num : 1
The max number of messages to receive
opts : None
Any additional options to add to the command line
user : None
Run as a user other than what the minion runs as
CLI Example:
.. code-block:: bash
salt '*' aws_sqs.receive_message <sqs queue> <region>
salt '*' aws_sqs.receive_message <sqs queue> <region> num=10
.. versionadded:: 2014.7.0
'''
ret = {
'Messages': None,
}
queues = list_queues(region, opts, user)
url_map = _parse_queue_list(queues)
if queue not in url_map:
log.info('"%s" queue does not exist.', queue)
return ret
out = _run_aws('receive-message', region, opts, user, queue=url_map[queue],
num=num)
ret['Messages'] = out['Messages']
return ret
|
[
"def",
"receive_message",
"(",
"queue",
",",
"region",
",",
"num",
"=",
"1",
",",
"opts",
"=",
"None",
",",
"user",
"=",
"None",
")",
":",
"ret",
"=",
"{",
"'Messages'",
":",
"None",
",",
"}",
"queues",
"=",
"list_queues",
"(",
"region",
",",
"opts",
",",
"user",
")",
"url_map",
"=",
"_parse_queue_list",
"(",
"queues",
")",
"if",
"queue",
"not",
"in",
"url_map",
":",
"log",
".",
"info",
"(",
"'\"%s\" queue does not exist.'",
",",
"queue",
")",
"return",
"ret",
"out",
"=",
"_run_aws",
"(",
"'receive-message'",
",",
"region",
",",
"opts",
",",
"user",
",",
"queue",
"=",
"url_map",
"[",
"queue",
"]",
",",
"num",
"=",
"num",
")",
"ret",
"[",
"'Messages'",
"]",
"=",
"out",
"[",
"'Messages'",
"]",
"return",
"ret"
] |
Receive one or more messages from a queue in a region
queue
The name of the queue to receive messages from
region
Region where SQS queues exists
num : 1
The max number of messages to receive
opts : None
Any additional options to add to the command line
user : None
Run as a user other than what the minion runs as
CLI Example:
.. code-block:: bash
salt '*' aws_sqs.receive_message <sqs queue> <region>
salt '*' aws_sqs.receive_message <sqs queue> <region> num=10
.. versionadded:: 2014.7.0
|
[
"Receive",
"one",
"or",
"more",
"messages",
"from",
"a",
"queue",
"in",
"a",
"region"
] |
python
|
train
|
Spirent/py-stcrestclient
|
stcrestclient/stchttp.py
|
https://github.com/Spirent/py-stcrestclient/blob/80ee82bddf2fb2808f3da8ff2c80b7d588e165e8/stcrestclient/stchttp.py#L142-L154
|
def join_session(self, sid):
"""Attach to an existing session."""
self._rest.add_header('X-STC-API-Session', sid)
self._sid = sid
try:
status, data = self._rest.get_request('objects', 'system1',
['version', 'name'])
except resthttp.RestHttpError as e:
self._rest.del_header('X-STC-API-Session')
self._sid = None
raise RuntimeError('failed to join session "%s": %s' % (sid, e))
return data['version']
|
[
"def",
"join_session",
"(",
"self",
",",
"sid",
")",
":",
"self",
".",
"_rest",
".",
"add_header",
"(",
"'X-STC-API-Session'",
",",
"sid",
")",
"self",
".",
"_sid",
"=",
"sid",
"try",
":",
"status",
",",
"data",
"=",
"self",
".",
"_rest",
".",
"get_request",
"(",
"'objects'",
",",
"'system1'",
",",
"[",
"'version'",
",",
"'name'",
"]",
")",
"except",
"resthttp",
".",
"RestHttpError",
"as",
"e",
":",
"self",
".",
"_rest",
".",
"del_header",
"(",
"'X-STC-API-Session'",
")",
"self",
".",
"_sid",
"=",
"None",
"raise",
"RuntimeError",
"(",
"'failed to join session \"%s\": %s'",
"%",
"(",
"sid",
",",
"e",
")",
")",
"return",
"data",
"[",
"'version'",
"]"
] |
Attach to an existing session.
|
[
"Attach",
"to",
"an",
"existing",
"session",
"."
] |
python
|
train
|
apple/turicreate
|
src/external/xgboost/subtree/rabit/wrapper/rabit.py
|
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/external/xgboost/subtree/rabit/wrapper/rabit.py#L283-L314
|
def checkpoint(global_model, local_model=None):
"""Checkpoint the model.
This means we finished a stage of execution.
Every time we call check point, there is a version number which will increase by one.
Parameters
----------
global_model: anytype that can be pickled
globally shared model/state when calling this function,
the caller need to gauranttees that global_model is the same in all nodes
local_model: anytype that can be pickled
Local model, that is specific to current node/rank.
This can be None when no local state is needed.
Notes
-----
local_model requires explicit replication of the model for fault-tolerance.
This will bring replication cost in checkpoint function.
while global_model do not need explicit replication.
It is recommended to use global_model if possible.
"""
sglobal = pickle.dumps(global_model)
if local_model is None:
_LIB.RabitCheckPoint(sglobal, len(sglobal), None, 0)
del sglobal
else:
slocal = pickle.dumps(local_model)
_LIB.RabitCheckPoint(sglobal, len(sglobal), slocal, len(slocal))
del slocal
del sglobal
|
[
"def",
"checkpoint",
"(",
"global_model",
",",
"local_model",
"=",
"None",
")",
":",
"sglobal",
"=",
"pickle",
".",
"dumps",
"(",
"global_model",
")",
"if",
"local_model",
"is",
"None",
":",
"_LIB",
".",
"RabitCheckPoint",
"(",
"sglobal",
",",
"len",
"(",
"sglobal",
")",
",",
"None",
",",
"0",
")",
"del",
"sglobal",
"else",
":",
"slocal",
"=",
"pickle",
".",
"dumps",
"(",
"local_model",
")",
"_LIB",
".",
"RabitCheckPoint",
"(",
"sglobal",
",",
"len",
"(",
"sglobal",
")",
",",
"slocal",
",",
"len",
"(",
"slocal",
")",
")",
"del",
"slocal",
"del",
"sglobal"
] |
Checkpoint the model.
This means we finished a stage of execution.
Every time we call check point, there is a version number which will increase by one.
Parameters
----------
global_model: anytype that can be pickled
globally shared model/state when calling this function,
the caller need to gauranttees that global_model is the same in all nodes
local_model: anytype that can be pickled
Local model, that is specific to current node/rank.
This can be None when no local state is needed.
Notes
-----
local_model requires explicit replication of the model for fault-tolerance.
This will bring replication cost in checkpoint function.
while global_model do not need explicit replication.
It is recommended to use global_model if possible.
|
[
"Checkpoint",
"the",
"model",
"."
] |
python
|
train
|
noahbenson/neuropythy
|
neuropythy/vision/retinotopy.py
|
https://github.com/noahbenson/neuropythy/blob/b588889f6db36ddb9602ae4a72c1c0d3f41586b2/neuropythy/vision/retinotopy.py#L1929-L2166
|
def clean_retinotopy_potential(hemi, retinotopy=Ellipsis, mask=Ellipsis, weight=Ellipsis,
surface='midgray', min_weight=Ellipsis, min_eccentricity=0.75,
visual_area=None, map_visual_areas=Ellipsis,
visual_area_field_signs=Ellipsis,
measurement_uncertainty=0.3, measurement_knob=1,
magnification_knob=0, fieldsign_knob=6, edge_knob=0):
'''
clean_retinotopy_potential(hemi) yields a retinotopic potential function for the given
hemisphere that, when minimized, should yeild a cleaned/smoothed version of the retinotopic
maps.
The potential function f returned by clean_retinotopy_potential() is a PotentialFunction object,
as defined in neuropythy.optimize. The potential function consists of four terms that are summed
with weights derived from the four '*_knob' options (see below). The function f as well as the
three terms that it comprises require as input a matrix X of the pRF centers of mesh or the
masked part of the mesh (X0 is the initial measurement matrix). These four potential terms are:
* The measurement potential. The retinotopic map that is being minimized is referred to as the
measured map, and the measurement potential function, fm(X), increases as X becomes farther
from X0. Explicitly, fm(X) is the sum over all pRF centers (x,y) in X (with initial position
(x0,y0) in X0) of exp(-0.5 * ((x - x0)**2 + (y - y0)**2) / s**2). The parameter s is the
initial eccentricity (sqrt(x0**2 + y0**2)) times the measurement_uncertainty option.
* The magnification potential. The retinotopy cleaning is performed in part by attempting to
smooth the visual magnification (the inverse of cortical magnification: deg**2 / mm**2)
across the cortical surface; the magnification potential fg(X) specifies how the visual
magnification contributes to the overall potential: it decreases as the magnification
becomes smoother and increases as it becomes less smooth. Explicitly, fg(X) is equal to the
sum over all pairs of faces (s,t) sharing one edge e of
(vmag(s) - sgn(vmag(t))*vmag(e))**2 + (vmag(t) - sgn(vmag(s))*vmag(e))**2. Note that the
function vmag(s) yields the areal visual magnification (deg**2 / mm**2) of any face s and
vmag(e) is the square of the linear magnification of any edge e; additionally, the sign of
vmag(s) for a face s is always equal to the fieldsign of the face (while for edges vmag(e)
is always positive).
* The fieldsign potential. The next way in which the potential function attempts to clean the
retinotopy is via the use of fieldsign: adjacent faces should have the same fieldsign under
most circumstanced. This is modeled by the function fs(X), which is 0 for any pair of faces
that have the same fieldsign and non-zero for faces that have different fieldsigns. The
form of fs(X) is the sum over all pairs of adjacent triangles (s,t) of -vmag(s)*vmag(t) if
vmag(s) and vmag(t) have different fieldsigns, otherwise 0.
* The edge potential. Finally, the potential function attempts to force edges to be smooth by
penalizing edges whose endpoints are far apart in the visual field. The edge potential
function fe(X) is equal to the sum for all edges (u,v) of
(x[u] - x[v])**2 + (y[u] - y[v])**2 / mean(eccen(u), eccen(v)).
Note additionally that all four potential functions are normalized by a factor intended to keep
them on similar scales (this factor is not mentioned above or below, but it is automatically
applied to all potential terms). For the magnification, fieldsign, and edge potential terms, the
normalization factor is 1/m where m is the number of non-perimeter edges (or, alternately, the
number of adjacent face pairs) in the mesh. For the measurement potential term, the
normalization factor is 1/W where W is the sum of the weights on the measurement vertices (if
no weights are given, they are considered to be 1 for each vertex).
The following options may be given:
* retinotopy (default: Ellipsis) specifies the retinotopy data to use for the hemisphere;
the argument may be a map from retinotopy_data or a valid argument to it. The default
indicates that the result of calling retinotopy_data(hemi) is used.
* mask (default: Ellipsis) specifies that the specific mask should be used; by default, the
mask is made using the vertices kept in to_flatmap('occipital_pole', hemi, radius=pi/2.75).
* weight (default: Ellipsis) specifies the weight to use; the default indicates that the
weight found in the retinotopy data, if any, should be used. If None, then all values
in the mask are equally weighted.
* visual_area (default: Ellipsis) specifies the visual area labels to use; the default
indicates that the visual area property found in the retinotopy data should be used, if any.
If None then no visual area splitting is done. This property is only important if
map_visual_areas is not False or None; otherwise it is ignored.
* map_visual_areas (default: Ellipsis) specifies whether the return value should be a lazy map
whose keys are visual area labels and whose values are recursed calls to this function for
only the subset of the mesh with the associated label. May be False or None to specify that
a single potential should be yielded. May be a list of labels to specify that only those
visual areas should be mapped; the default value (Ellipsis) uses all labels in visual_areas
except for 0.
* min_weight (default: Ellipsis) specifies the minimum weight to include, after the
weights have been normalized such that sum(weights) == 1. If the value is a list or
tuple containing a single item [p] then p is taken to be a percentile below which
vertices should be excluded. The default, Ellipsis, is equivalent to [5].
* min_eccentricity (default: 0.75) specifies the eccentricity below which no measurement-based
potential is applied; i.e., by default, vertices with eccentricity below 0.75 degrees will
be considered as having 0 weight.
* surface (default: 'midgray') specifies which surface should be used to establish cortical
magnification; may be 'pial', 'midgray', or 'white'.
* measurement_uncertainty (default: 0.3) is used to determine the standard deviation of the
Gaussian potential well used to prevent individual vertices with valid retinotopic
measurements from straying too far from their initial measured positions. In other words, if
a vertex has a weight that is above threshold and a pRF center of (x0,y0), then the
measurement-potential for that vertex is exp(-0.5 * ((x - x0)**2 + (y - y0)**2)/s**2) where
(x,y) is the center of the pRF during minimization and s is equal to
measurement_uncertainty * sqrt(x0**2 + y0**2).
* measurement_knob, magnification_knob, fieldsign_knob, and edge_knob (defaults: 1, 0, 12, 0,
respectively) specify the relative weights of the terms of the potential function on a log2
scale. In other words, if the measurement, magnification, fieldsign, and edge potential
terms are fm, fg, fs, and fe while the knobs are km, kg, ks, and ke, then the overall
potential function f is equal to:
f(X) = (2**km * fm(X) + 2**kg * fg(X) + 2**ks * fs(X) + 2**ke * fe(X)) / q
where w = (2**km + 2**kg + 2**ks + 2**ke)
If any knob is set to None, then its value is 0 instead of 2**k.
'''
from neuropythy.util import curry
import neuropythy.optimize as op
# first, get the mesh and the retinotopy data
mesh = geo.to_mesh((hemi, surface))
rdat = (retinotopy_data(mesh) if retinotopy is Ellipsis else
retinotopy if pimms.is_map(retinotopy) else
retinotopy_data(mesh, retinotopy))
lbls = (rdat.get('visual_area') if visual_area is Ellipsis else
None if visual_area is None else
hemi.property(visual_area))
wght = (rdat.get('variance_explained') if weight is Ellipsis else
weight if pimms.is_vector(weight) else
rdat.get(weight) if pimms.is_str(weight) and weight in rdat else
hemi.property(weight) if weight is not None else
None)
# figure out the mask
if mask is Ellipsis:
if mesh.coordinates.shape[0] == 2: mask = mesh.indices
else: mask = geo.to_flatmap('occipital_pole', hemi, radius=np.pi/2.75).labels
else: mask = hemi.mask(mask, indices=True)
global_field_sign = None
# if we are splitting on visual area, we should do that here:
if map_visual_areas and lbls is not None:
# get the visual areas
vas = (np.unique(lbls) if map_visual_areas == 'all' else
np.setdiff1d(np.unique(lbls), [0]) if map_visual_areas in [True,Ellipsis] else
np.unique(map_visual_areas))
# we also want to have the field-sign map handy if provided
if visual_area_field_signs is None: visual_area_field_signs = {}
elif visual_area_field_signs is Ellipsis: visual_area_field_signs = {1:-1, 2:1, 3:-1, 4:1}
# special case when map_visual_areas is an integer/string (label)
if pimms.is_int(map_visual_areas) or pimms.is_str(map_visual_areas):
mask = np.intersect1d(mask, np.where(lbls == map_visual_areas)[0])
global_field_sign = visual_area_field_signs.get(map_visual_areas)
else: # otherwise we return a lazy map
kw = dict(retinotopy=rdat, mask=mask, weight=wght,
surface=surface, min_weight=min_weight, min_eccentricity=min_eccentricity,
visual_area=lbls, measurement_uncertainty=measurement_uncertainty,
measurement_knob=measurement_knob,
magnification_knob=magnification_knob, fieldsign_knob=fieldsign_knob,
edge_knob=edge_knob, visual_area_field_signs=visual_area_field_signs)
return pimms.lazy_map(
{lbl: curry(clean_retinotopy_potential, hemi, map_visual_areas=lbl, **kw)
for lbl in vas})
# fix rdat, weight, and mesh to match the mask
(supermesh, orig_mask) = (mesh, mask)
rdat = {k:(v[mask] if len(v) > len(mask) else v) for (k,v) in six.iteritems(rdat)}
mesh = supermesh.submesh(mask)
# possible that the mask got further downsampled:
mask = supermesh.tess.index(mesh.labels)
if len(mask) == len(orig_mask): smsk = np.arange(len(mask))
else:
tmp = set(mask)
smsk = np.asarray([k for (k,u) in enumerate(orig_mask) if u in tmp])
n = mesh.vertex_count # number vertices
N = 2*n # number parameters
if wght is None: wght = np.ones(n)
elif len(wght) == len(orig_mask): wght = np.array(wght)[smsk]
elif len(wght) > n: wght = np.array(wght)[mask]
else: wght = np.array(wght)
wght[~np.isfinite(wght)] = 0
if min_eccentricity < 0 or np.isclose(min_eccentricity, 0):
raise ValueError('min_eccentricity should be small but must be > 0')
# we'll need a potential function...
# The input to the potential function will be a 2 x N matrix of (x,y) visual field coordinates:
xy = op.identity
(x,y) = (xy[np.arange(0,N,2)],xy[np.arange(1,N,2)])
# We have a few components to the potential function
# [1] Deviation from measurements:
# These are the initial measurements we will use
xy0 = np.array(as_retinotopy(rdat, 'geographical')).T
if len(xy0) == len(orig_mask): xy0 = xy0[smsk]
elif len(xy0) > n: xy0 = xy0[mask]
(x0,y0) = xy0.T
xy0 = xy0.flatten()
ecc0 = np.sqrt(x0**2 + y0**2)
ii = np.where(ecc0 > min_eccentricity)[0]
minw = (0 if min_weight is None else
np.percentile(wght[ii], 5) if min_weight is Ellipsis else
min_weight if pimms.is_number(min_weight) else
0 if np.std(wght[ii]) < 0.00001 else
np.percentile(wght[ii], min_weight[0]))
ii = np.intersect1d(ii, np.where(wght > minw)[0])
wsum = np.sum(wght[ii])
if wsum < 0 or np.isclose(wsum, 0): raise ValueError('all-zero weights given')
wght = wght / wsum
s2_meas = (measurement_uncertainty * ecc0[ii])**2
d2_meas = (x[ii] - x0[ii])**2 + (y[ii] - y0[ii])**2
f_meas = (1 - op.exp(-0.5*d2_meas/s2_meas)) * wght[ii]
f_meas = op.sum(f_meas)
# [2] For adjacent triangles, how different are the cortical magnifications?
sarea = mesh.face_areas
faces = mesh.tess.indexed_faces.T
selen = mesh.edge_lengths
# we want to ensure that vmag is locally smooth across triangles, but we need
# to make sure there aren't any edges or faces with 0 surface-areas (so that
# we don't divide by zero)
mnden = 0.0001
(e,s,t) = np.transpose([(i,e[0],e[1]) for (i,e) in enumerate(mesh.tess.edge_faces)
if len(e) == 2 and selen[i] > mnden
if sarea[e[0]] > mnden and sarea[e[1]] > mnden])
m = len(e)
(fis,q) = np.unique(np.concatenate([s,t]), return_inverse=True)
(s,t) = np.reshape(q, (2,-1))
faces = faces[fis]
sarea = sarea[fis]
selen = selen[e]
(u,v) = mesh.tess.indexed_edges[:,e]
# we will use visual mag instead of cortical mag: this way we aren't worried about
# varea going to 0 and creating a singularity, and it should have a linear
# relationship with eccentricity
velen2 = (x[u] - x[v])**2 + (y[u] - y[v])**2
vme = velen2 / selen**2 # visual magnification: edges
varea = op.signed_face_areas(faces)
vmf = varea / sarea # visual magnification: faces
vms = vmf[s]
vmt = vmf[t]
vsgns = op.sign(vmf)
f_magn = (1.0 / m) * op.sum((vms - vsgns[t]*vme)**2 + (vmt - vsgns[s]*vme)**2)
# [3] we want a special function for faces whose vmags are different signs
if global_field_sign is None:
f_sign = op.compose(op.piecewise(0, ((-np.inf, 0), -op.identity)), vms*vmt)
f_sign = (1.0 / m) * op.sum(f_sign)
else:
vmfsgn = vmf * global_field_sign
f_sign = op.compose(op.piecewise(0, ((-np.inf, 0), -op.identity)), vmfsgn)
f_sign = (1.0 / m) * op.sum(f_sign)
# and the edge potential...
ex = 0.5*(x[u] + x[v])
ey = 0.5*(y[u] + y[v])
eecc2 = (ex**2 + ey**2)
f_edge = (1.0 / m) * op.sum(((x[u] - x[v])**2 + (y[u] - y[v])**2) / (eecc2 + 0.05))
# This is the potential function:
(k_meas, k_magn, k_sign, k_edge) = [
0 if knob is None else (2**knob)
for knob in (measurement_knob, magnification_knob, fieldsign_knob, edge_knob)]
fs = (k_meas*f_meas, k_magn*f_magn, k_sign*f_sign, k_edge*f_edge)
f = (fs[0] + fs[1] + fs[2] + fs[3]) / (k_meas + k_magn + k_sign + k_edge)
xy0 = np.reshape(xy0, (-1,2))
object.__setattr__(f, 'meta_data',
pyr.m(f_meas=f_meas, f_magn=f_magn, f_sign=f_sign, f_edge=f_edge,
mesh=mesh, X0=xy0))
return f
|
[
"def",
"clean_retinotopy_potential",
"(",
"hemi",
",",
"retinotopy",
"=",
"Ellipsis",
",",
"mask",
"=",
"Ellipsis",
",",
"weight",
"=",
"Ellipsis",
",",
"surface",
"=",
"'midgray'",
",",
"min_weight",
"=",
"Ellipsis",
",",
"min_eccentricity",
"=",
"0.75",
",",
"visual_area",
"=",
"None",
",",
"map_visual_areas",
"=",
"Ellipsis",
",",
"visual_area_field_signs",
"=",
"Ellipsis",
",",
"measurement_uncertainty",
"=",
"0.3",
",",
"measurement_knob",
"=",
"1",
",",
"magnification_knob",
"=",
"0",
",",
"fieldsign_knob",
"=",
"6",
",",
"edge_knob",
"=",
"0",
")",
":",
"from",
"neuropythy",
".",
"util",
"import",
"curry",
"import",
"neuropythy",
".",
"optimize",
"as",
"op",
"# first, get the mesh and the retinotopy data",
"mesh",
"=",
"geo",
".",
"to_mesh",
"(",
"(",
"hemi",
",",
"surface",
")",
")",
"rdat",
"=",
"(",
"retinotopy_data",
"(",
"mesh",
")",
"if",
"retinotopy",
"is",
"Ellipsis",
"else",
"retinotopy",
"if",
"pimms",
".",
"is_map",
"(",
"retinotopy",
")",
"else",
"retinotopy_data",
"(",
"mesh",
",",
"retinotopy",
")",
")",
"lbls",
"=",
"(",
"rdat",
".",
"get",
"(",
"'visual_area'",
")",
"if",
"visual_area",
"is",
"Ellipsis",
"else",
"None",
"if",
"visual_area",
"is",
"None",
"else",
"hemi",
".",
"property",
"(",
"visual_area",
")",
")",
"wght",
"=",
"(",
"rdat",
".",
"get",
"(",
"'variance_explained'",
")",
"if",
"weight",
"is",
"Ellipsis",
"else",
"weight",
"if",
"pimms",
".",
"is_vector",
"(",
"weight",
")",
"else",
"rdat",
".",
"get",
"(",
"weight",
")",
"if",
"pimms",
".",
"is_str",
"(",
"weight",
")",
"and",
"weight",
"in",
"rdat",
"else",
"hemi",
".",
"property",
"(",
"weight",
")",
"if",
"weight",
"is",
"not",
"None",
"else",
"None",
")",
"# figure out the mask",
"if",
"mask",
"is",
"Ellipsis",
":",
"if",
"mesh",
".",
"coordinates",
".",
"shape",
"[",
"0",
"]",
"==",
"2",
":",
"mask",
"=",
"mesh",
".",
"indices",
"else",
":",
"mask",
"=",
"geo",
".",
"to_flatmap",
"(",
"'occipital_pole'",
",",
"hemi",
",",
"radius",
"=",
"np",
".",
"pi",
"/",
"2.75",
")",
".",
"labels",
"else",
":",
"mask",
"=",
"hemi",
".",
"mask",
"(",
"mask",
",",
"indices",
"=",
"True",
")",
"global_field_sign",
"=",
"None",
"# if we are splitting on visual area, we should do that here:",
"if",
"map_visual_areas",
"and",
"lbls",
"is",
"not",
"None",
":",
"# get the visual areas",
"vas",
"=",
"(",
"np",
".",
"unique",
"(",
"lbls",
")",
"if",
"map_visual_areas",
"==",
"'all'",
"else",
"np",
".",
"setdiff1d",
"(",
"np",
".",
"unique",
"(",
"lbls",
")",
",",
"[",
"0",
"]",
")",
"if",
"map_visual_areas",
"in",
"[",
"True",
",",
"Ellipsis",
"]",
"else",
"np",
".",
"unique",
"(",
"map_visual_areas",
")",
")",
"# we also want to have the field-sign map handy if provided",
"if",
"visual_area_field_signs",
"is",
"None",
":",
"visual_area_field_signs",
"=",
"{",
"}",
"elif",
"visual_area_field_signs",
"is",
"Ellipsis",
":",
"visual_area_field_signs",
"=",
"{",
"1",
":",
"-",
"1",
",",
"2",
":",
"1",
",",
"3",
":",
"-",
"1",
",",
"4",
":",
"1",
"}",
"# special case when map_visual_areas is an integer/string (label)",
"if",
"pimms",
".",
"is_int",
"(",
"map_visual_areas",
")",
"or",
"pimms",
".",
"is_str",
"(",
"map_visual_areas",
")",
":",
"mask",
"=",
"np",
".",
"intersect1d",
"(",
"mask",
",",
"np",
".",
"where",
"(",
"lbls",
"==",
"map_visual_areas",
")",
"[",
"0",
"]",
")",
"global_field_sign",
"=",
"visual_area_field_signs",
".",
"get",
"(",
"map_visual_areas",
")",
"else",
":",
"# otherwise we return a lazy map",
"kw",
"=",
"dict",
"(",
"retinotopy",
"=",
"rdat",
",",
"mask",
"=",
"mask",
",",
"weight",
"=",
"wght",
",",
"surface",
"=",
"surface",
",",
"min_weight",
"=",
"min_weight",
",",
"min_eccentricity",
"=",
"min_eccentricity",
",",
"visual_area",
"=",
"lbls",
",",
"measurement_uncertainty",
"=",
"measurement_uncertainty",
",",
"measurement_knob",
"=",
"measurement_knob",
",",
"magnification_knob",
"=",
"magnification_knob",
",",
"fieldsign_knob",
"=",
"fieldsign_knob",
",",
"edge_knob",
"=",
"edge_knob",
",",
"visual_area_field_signs",
"=",
"visual_area_field_signs",
")",
"return",
"pimms",
".",
"lazy_map",
"(",
"{",
"lbl",
":",
"curry",
"(",
"clean_retinotopy_potential",
",",
"hemi",
",",
"map_visual_areas",
"=",
"lbl",
",",
"*",
"*",
"kw",
")",
"for",
"lbl",
"in",
"vas",
"}",
")",
"# fix rdat, weight, and mesh to match the mask",
"(",
"supermesh",
",",
"orig_mask",
")",
"=",
"(",
"mesh",
",",
"mask",
")",
"rdat",
"=",
"{",
"k",
":",
"(",
"v",
"[",
"mask",
"]",
"if",
"len",
"(",
"v",
")",
">",
"len",
"(",
"mask",
")",
"else",
"v",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"six",
".",
"iteritems",
"(",
"rdat",
")",
"}",
"mesh",
"=",
"supermesh",
".",
"submesh",
"(",
"mask",
")",
"# possible that the mask got further downsampled:",
"mask",
"=",
"supermesh",
".",
"tess",
".",
"index",
"(",
"mesh",
".",
"labels",
")",
"if",
"len",
"(",
"mask",
")",
"==",
"len",
"(",
"orig_mask",
")",
":",
"smsk",
"=",
"np",
".",
"arange",
"(",
"len",
"(",
"mask",
")",
")",
"else",
":",
"tmp",
"=",
"set",
"(",
"mask",
")",
"smsk",
"=",
"np",
".",
"asarray",
"(",
"[",
"k",
"for",
"(",
"k",
",",
"u",
")",
"in",
"enumerate",
"(",
"orig_mask",
")",
"if",
"u",
"in",
"tmp",
"]",
")",
"n",
"=",
"mesh",
".",
"vertex_count",
"# number vertices",
"N",
"=",
"2",
"*",
"n",
"# number parameters",
"if",
"wght",
"is",
"None",
":",
"wght",
"=",
"np",
".",
"ones",
"(",
"n",
")",
"elif",
"len",
"(",
"wght",
")",
"==",
"len",
"(",
"orig_mask",
")",
":",
"wght",
"=",
"np",
".",
"array",
"(",
"wght",
")",
"[",
"smsk",
"]",
"elif",
"len",
"(",
"wght",
")",
">",
"n",
":",
"wght",
"=",
"np",
".",
"array",
"(",
"wght",
")",
"[",
"mask",
"]",
"else",
":",
"wght",
"=",
"np",
".",
"array",
"(",
"wght",
")",
"wght",
"[",
"~",
"np",
".",
"isfinite",
"(",
"wght",
")",
"]",
"=",
"0",
"if",
"min_eccentricity",
"<",
"0",
"or",
"np",
".",
"isclose",
"(",
"min_eccentricity",
",",
"0",
")",
":",
"raise",
"ValueError",
"(",
"'min_eccentricity should be small but must be > 0'",
")",
"# we'll need a potential function...",
"# The input to the potential function will be a 2 x N matrix of (x,y) visual field coordinates:",
"xy",
"=",
"op",
".",
"identity",
"(",
"x",
",",
"y",
")",
"=",
"(",
"xy",
"[",
"np",
".",
"arange",
"(",
"0",
",",
"N",
",",
"2",
")",
"]",
",",
"xy",
"[",
"np",
".",
"arange",
"(",
"1",
",",
"N",
",",
"2",
")",
"]",
")",
"# We have a few components to the potential function",
"# [1] Deviation from measurements:",
"# These are the initial measurements we will use",
"xy0",
"=",
"np",
".",
"array",
"(",
"as_retinotopy",
"(",
"rdat",
",",
"'geographical'",
")",
")",
".",
"T",
"if",
"len",
"(",
"xy0",
")",
"==",
"len",
"(",
"orig_mask",
")",
":",
"xy0",
"=",
"xy0",
"[",
"smsk",
"]",
"elif",
"len",
"(",
"xy0",
")",
">",
"n",
":",
"xy0",
"=",
"xy0",
"[",
"mask",
"]",
"(",
"x0",
",",
"y0",
")",
"=",
"xy0",
".",
"T",
"xy0",
"=",
"xy0",
".",
"flatten",
"(",
")",
"ecc0",
"=",
"np",
".",
"sqrt",
"(",
"x0",
"**",
"2",
"+",
"y0",
"**",
"2",
")",
"ii",
"=",
"np",
".",
"where",
"(",
"ecc0",
">",
"min_eccentricity",
")",
"[",
"0",
"]",
"minw",
"=",
"(",
"0",
"if",
"min_weight",
"is",
"None",
"else",
"np",
".",
"percentile",
"(",
"wght",
"[",
"ii",
"]",
",",
"5",
")",
"if",
"min_weight",
"is",
"Ellipsis",
"else",
"min_weight",
"if",
"pimms",
".",
"is_number",
"(",
"min_weight",
")",
"else",
"0",
"if",
"np",
".",
"std",
"(",
"wght",
"[",
"ii",
"]",
")",
"<",
"0.00001",
"else",
"np",
".",
"percentile",
"(",
"wght",
"[",
"ii",
"]",
",",
"min_weight",
"[",
"0",
"]",
")",
")",
"ii",
"=",
"np",
".",
"intersect1d",
"(",
"ii",
",",
"np",
".",
"where",
"(",
"wght",
">",
"minw",
")",
"[",
"0",
"]",
")",
"wsum",
"=",
"np",
".",
"sum",
"(",
"wght",
"[",
"ii",
"]",
")",
"if",
"wsum",
"<",
"0",
"or",
"np",
".",
"isclose",
"(",
"wsum",
",",
"0",
")",
":",
"raise",
"ValueError",
"(",
"'all-zero weights given'",
")",
"wght",
"=",
"wght",
"/",
"wsum",
"s2_meas",
"=",
"(",
"measurement_uncertainty",
"*",
"ecc0",
"[",
"ii",
"]",
")",
"**",
"2",
"d2_meas",
"=",
"(",
"x",
"[",
"ii",
"]",
"-",
"x0",
"[",
"ii",
"]",
")",
"**",
"2",
"+",
"(",
"y",
"[",
"ii",
"]",
"-",
"y0",
"[",
"ii",
"]",
")",
"**",
"2",
"f_meas",
"=",
"(",
"1",
"-",
"op",
".",
"exp",
"(",
"-",
"0.5",
"*",
"d2_meas",
"/",
"s2_meas",
")",
")",
"*",
"wght",
"[",
"ii",
"]",
"f_meas",
"=",
"op",
".",
"sum",
"(",
"f_meas",
")",
"# [2] For adjacent triangles, how different are the cortical magnifications?",
"sarea",
"=",
"mesh",
".",
"face_areas",
"faces",
"=",
"mesh",
".",
"tess",
".",
"indexed_faces",
".",
"T",
"selen",
"=",
"mesh",
".",
"edge_lengths",
"# we want to ensure that vmag is locally smooth across triangles, but we need",
"# to make sure there aren't any edges or faces with 0 surface-areas (so that",
"# we don't divide by zero)",
"mnden",
"=",
"0.0001",
"(",
"e",
",",
"s",
",",
"t",
")",
"=",
"np",
".",
"transpose",
"(",
"[",
"(",
"i",
",",
"e",
"[",
"0",
"]",
",",
"e",
"[",
"1",
"]",
")",
"for",
"(",
"i",
",",
"e",
")",
"in",
"enumerate",
"(",
"mesh",
".",
"tess",
".",
"edge_faces",
")",
"if",
"len",
"(",
"e",
")",
"==",
"2",
"and",
"selen",
"[",
"i",
"]",
">",
"mnden",
"if",
"sarea",
"[",
"e",
"[",
"0",
"]",
"]",
">",
"mnden",
"and",
"sarea",
"[",
"e",
"[",
"1",
"]",
"]",
">",
"mnden",
"]",
")",
"m",
"=",
"len",
"(",
"e",
")",
"(",
"fis",
",",
"q",
")",
"=",
"np",
".",
"unique",
"(",
"np",
".",
"concatenate",
"(",
"[",
"s",
",",
"t",
"]",
")",
",",
"return_inverse",
"=",
"True",
")",
"(",
"s",
",",
"t",
")",
"=",
"np",
".",
"reshape",
"(",
"q",
",",
"(",
"2",
",",
"-",
"1",
")",
")",
"faces",
"=",
"faces",
"[",
"fis",
"]",
"sarea",
"=",
"sarea",
"[",
"fis",
"]",
"selen",
"=",
"selen",
"[",
"e",
"]",
"(",
"u",
",",
"v",
")",
"=",
"mesh",
".",
"tess",
".",
"indexed_edges",
"[",
":",
",",
"e",
"]",
"# we will use visual mag instead of cortical mag: this way we aren't worried about",
"# varea going to 0 and creating a singularity, and it should have a linear ",
"# relationship with eccentricity",
"velen2",
"=",
"(",
"x",
"[",
"u",
"]",
"-",
"x",
"[",
"v",
"]",
")",
"**",
"2",
"+",
"(",
"y",
"[",
"u",
"]",
"-",
"y",
"[",
"v",
"]",
")",
"**",
"2",
"vme",
"=",
"velen2",
"/",
"selen",
"**",
"2",
"# visual magnification: edges",
"varea",
"=",
"op",
".",
"signed_face_areas",
"(",
"faces",
")",
"vmf",
"=",
"varea",
"/",
"sarea",
"# visual magnification: faces",
"vms",
"=",
"vmf",
"[",
"s",
"]",
"vmt",
"=",
"vmf",
"[",
"t",
"]",
"vsgns",
"=",
"op",
".",
"sign",
"(",
"vmf",
")",
"f_magn",
"=",
"(",
"1.0",
"/",
"m",
")",
"*",
"op",
".",
"sum",
"(",
"(",
"vms",
"-",
"vsgns",
"[",
"t",
"]",
"*",
"vme",
")",
"**",
"2",
"+",
"(",
"vmt",
"-",
"vsgns",
"[",
"s",
"]",
"*",
"vme",
")",
"**",
"2",
")",
"# [3] we want a special function for faces whose vmags are different signs",
"if",
"global_field_sign",
"is",
"None",
":",
"f_sign",
"=",
"op",
".",
"compose",
"(",
"op",
".",
"piecewise",
"(",
"0",
",",
"(",
"(",
"-",
"np",
".",
"inf",
",",
"0",
")",
",",
"-",
"op",
".",
"identity",
")",
")",
",",
"vms",
"*",
"vmt",
")",
"f_sign",
"=",
"(",
"1.0",
"/",
"m",
")",
"*",
"op",
".",
"sum",
"(",
"f_sign",
")",
"else",
":",
"vmfsgn",
"=",
"vmf",
"*",
"global_field_sign",
"f_sign",
"=",
"op",
".",
"compose",
"(",
"op",
".",
"piecewise",
"(",
"0",
",",
"(",
"(",
"-",
"np",
".",
"inf",
",",
"0",
")",
",",
"-",
"op",
".",
"identity",
")",
")",
",",
"vmfsgn",
")",
"f_sign",
"=",
"(",
"1.0",
"/",
"m",
")",
"*",
"op",
".",
"sum",
"(",
"f_sign",
")",
"# and the edge potential...",
"ex",
"=",
"0.5",
"*",
"(",
"x",
"[",
"u",
"]",
"+",
"x",
"[",
"v",
"]",
")",
"ey",
"=",
"0.5",
"*",
"(",
"y",
"[",
"u",
"]",
"+",
"y",
"[",
"v",
"]",
")",
"eecc2",
"=",
"(",
"ex",
"**",
"2",
"+",
"ey",
"**",
"2",
")",
"f_edge",
"=",
"(",
"1.0",
"/",
"m",
")",
"*",
"op",
".",
"sum",
"(",
"(",
"(",
"x",
"[",
"u",
"]",
"-",
"x",
"[",
"v",
"]",
")",
"**",
"2",
"+",
"(",
"y",
"[",
"u",
"]",
"-",
"y",
"[",
"v",
"]",
")",
"**",
"2",
")",
"/",
"(",
"eecc2",
"+",
"0.05",
")",
")",
"# This is the potential function:",
"(",
"k_meas",
",",
"k_magn",
",",
"k_sign",
",",
"k_edge",
")",
"=",
"[",
"0",
"if",
"knob",
"is",
"None",
"else",
"(",
"2",
"**",
"knob",
")",
"for",
"knob",
"in",
"(",
"measurement_knob",
",",
"magnification_knob",
",",
"fieldsign_knob",
",",
"edge_knob",
")",
"]",
"fs",
"=",
"(",
"k_meas",
"*",
"f_meas",
",",
"k_magn",
"*",
"f_magn",
",",
"k_sign",
"*",
"f_sign",
",",
"k_edge",
"*",
"f_edge",
")",
"f",
"=",
"(",
"fs",
"[",
"0",
"]",
"+",
"fs",
"[",
"1",
"]",
"+",
"fs",
"[",
"2",
"]",
"+",
"fs",
"[",
"3",
"]",
")",
"/",
"(",
"k_meas",
"+",
"k_magn",
"+",
"k_sign",
"+",
"k_edge",
")",
"xy0",
"=",
"np",
".",
"reshape",
"(",
"xy0",
",",
"(",
"-",
"1",
",",
"2",
")",
")",
"object",
".",
"__setattr__",
"(",
"f",
",",
"'meta_data'",
",",
"pyr",
".",
"m",
"(",
"f_meas",
"=",
"f_meas",
",",
"f_magn",
"=",
"f_magn",
",",
"f_sign",
"=",
"f_sign",
",",
"f_edge",
"=",
"f_edge",
",",
"mesh",
"=",
"mesh",
",",
"X0",
"=",
"xy0",
")",
")",
"return",
"f"
] |
clean_retinotopy_potential(hemi) yields a retinotopic potential function for the given
hemisphere that, when minimized, should yeild a cleaned/smoothed version of the retinotopic
maps.
The potential function f returned by clean_retinotopy_potential() is a PotentialFunction object,
as defined in neuropythy.optimize. The potential function consists of four terms that are summed
with weights derived from the four '*_knob' options (see below). The function f as well as the
three terms that it comprises require as input a matrix X of the pRF centers of mesh or the
masked part of the mesh (X0 is the initial measurement matrix). These four potential terms are:
* The measurement potential. The retinotopic map that is being minimized is referred to as the
measured map, and the measurement potential function, fm(X), increases as X becomes farther
from X0. Explicitly, fm(X) is the sum over all pRF centers (x,y) in X (with initial position
(x0,y0) in X0) of exp(-0.5 * ((x - x0)**2 + (y - y0)**2) / s**2). The parameter s is the
initial eccentricity (sqrt(x0**2 + y0**2)) times the measurement_uncertainty option.
* The magnification potential. The retinotopy cleaning is performed in part by attempting to
smooth the visual magnification (the inverse of cortical magnification: deg**2 / mm**2)
across the cortical surface; the magnification potential fg(X) specifies how the visual
magnification contributes to the overall potential: it decreases as the magnification
becomes smoother and increases as it becomes less smooth. Explicitly, fg(X) is equal to the
sum over all pairs of faces (s,t) sharing one edge e of
(vmag(s) - sgn(vmag(t))*vmag(e))**2 + (vmag(t) - sgn(vmag(s))*vmag(e))**2. Note that the
function vmag(s) yields the areal visual magnification (deg**2 / mm**2) of any face s and
vmag(e) is the square of the linear magnification of any edge e; additionally, the sign of
vmag(s) for a face s is always equal to the fieldsign of the face (while for edges vmag(e)
is always positive).
* The fieldsign potential. The next way in which the potential function attempts to clean the
retinotopy is via the use of fieldsign: adjacent faces should have the same fieldsign under
most circumstanced. This is modeled by the function fs(X), which is 0 for any pair of faces
that have the same fieldsign and non-zero for faces that have different fieldsigns. The
form of fs(X) is the sum over all pairs of adjacent triangles (s,t) of -vmag(s)*vmag(t) if
vmag(s) and vmag(t) have different fieldsigns, otherwise 0.
* The edge potential. Finally, the potential function attempts to force edges to be smooth by
penalizing edges whose endpoints are far apart in the visual field. The edge potential
function fe(X) is equal to the sum for all edges (u,v) of
(x[u] - x[v])**2 + (y[u] - y[v])**2 / mean(eccen(u), eccen(v)).
Note additionally that all four potential functions are normalized by a factor intended to keep
them on similar scales (this factor is not mentioned above or below, but it is automatically
applied to all potential terms). For the magnification, fieldsign, and edge potential terms, the
normalization factor is 1/m where m is the number of non-perimeter edges (or, alternately, the
number of adjacent face pairs) in the mesh. For the measurement potential term, the
normalization factor is 1/W where W is the sum of the weights on the measurement vertices (if
no weights are given, they are considered to be 1 for each vertex).
The following options may be given:
* retinotopy (default: Ellipsis) specifies the retinotopy data to use for the hemisphere;
the argument may be a map from retinotopy_data or a valid argument to it. The default
indicates that the result of calling retinotopy_data(hemi) is used.
* mask (default: Ellipsis) specifies that the specific mask should be used; by default, the
mask is made using the vertices kept in to_flatmap('occipital_pole', hemi, radius=pi/2.75).
* weight (default: Ellipsis) specifies the weight to use; the default indicates that the
weight found in the retinotopy data, if any, should be used. If None, then all values
in the mask are equally weighted.
* visual_area (default: Ellipsis) specifies the visual area labels to use; the default
indicates that the visual area property found in the retinotopy data should be used, if any.
If None then no visual area splitting is done. This property is only important if
map_visual_areas is not False or None; otherwise it is ignored.
* map_visual_areas (default: Ellipsis) specifies whether the return value should be a lazy map
whose keys are visual area labels and whose values are recursed calls to this function for
only the subset of the mesh with the associated label. May be False or None to specify that
a single potential should be yielded. May be a list of labels to specify that only those
visual areas should be mapped; the default value (Ellipsis) uses all labels in visual_areas
except for 0.
* min_weight (default: Ellipsis) specifies the minimum weight to include, after the
weights have been normalized such that sum(weights) == 1. If the value is a list or
tuple containing a single item [p] then p is taken to be a percentile below which
vertices should be excluded. The default, Ellipsis, is equivalent to [5].
* min_eccentricity (default: 0.75) specifies the eccentricity below which no measurement-based
potential is applied; i.e., by default, vertices with eccentricity below 0.75 degrees will
be considered as having 0 weight.
* surface (default: 'midgray') specifies which surface should be used to establish cortical
magnification; may be 'pial', 'midgray', or 'white'.
* measurement_uncertainty (default: 0.3) is used to determine the standard deviation of the
Gaussian potential well used to prevent individual vertices with valid retinotopic
measurements from straying too far from their initial measured positions. In other words, if
a vertex has a weight that is above threshold and a pRF center of (x0,y0), then the
measurement-potential for that vertex is exp(-0.5 * ((x - x0)**2 + (y - y0)**2)/s**2) where
(x,y) is the center of the pRF during minimization and s is equal to
measurement_uncertainty * sqrt(x0**2 + y0**2).
* measurement_knob, magnification_knob, fieldsign_knob, and edge_knob (defaults: 1, 0, 12, 0,
respectively) specify the relative weights of the terms of the potential function on a log2
scale. In other words, if the measurement, magnification, fieldsign, and edge potential
terms are fm, fg, fs, and fe while the knobs are km, kg, ks, and ke, then the overall
potential function f is equal to:
f(X) = (2**km * fm(X) + 2**kg * fg(X) + 2**ks * fs(X) + 2**ke * fe(X)) / q
where w = (2**km + 2**kg + 2**ks + 2**ke)
If any knob is set to None, then its value is 0 instead of 2**k.
|
[
"clean_retinotopy_potential",
"(",
"hemi",
")",
"yields",
"a",
"retinotopic",
"potential",
"function",
"for",
"the",
"given",
"hemisphere",
"that",
"when",
"minimized",
"should",
"yeild",
"a",
"cleaned",
"/",
"smoothed",
"version",
"of",
"the",
"retinotopic",
"maps",
"."
] |
python
|
train
|
openego/eDisGo
|
edisgo/grid/network.py
|
https://github.com/openego/eDisGo/blob/e6245bdaf236f9c49dbda5a18c1c458290f41e2b/edisgo/grid/network.py#L3391-L3434
|
def v_res(self, nodes=None, level=None):
"""
Get resulting voltage level at node.
Parameters
----------
nodes : :obj:`list`
List of string representatives of grid topology components, e.g.
:class:`~.grid.components.Generator`. If not provided defaults to
all nodes available in grid level `level`.
level : :obj:`str`
Either 'mv' or 'lv' or None (default). Depending on which grid
level results you are interested in. It is required to provide this
argument in order to distinguish voltage levels at primary and
secondary side of the transformer/LV station.
If not provided (respectively None) defaults to ['mv', 'lv'].
Returns
-------
:pandas:`pandas.DataFrame<dataframe>`
Resulting voltage levels obtained from power flow analysis
"""
# check if voltages are available:
if hasattr(self, 'pfa_v_mag_pu'):
self.pfa_v_mag_pu.sort_index(axis=1, inplace=True)
else:
message = "No voltage results available."
raise AttributeError(message)
if level is None:
level = ['mv', 'lv']
if nodes is None:
return self.pfa_v_mag_pu.loc[:, (level, slice(None))]
else:
not_included = [_ for _ in nodes
if _ not in list(self.pfa_v_mag_pu[level].columns)]
labels_included = [_ for _ in nodes if _ not in not_included]
if not_included:
logging.warning("Voltage levels for {nodes} are not returned "
"from PFA".format(nodes=not_included))
return self.pfa_v_mag_pu[level][labels_included]
|
[
"def",
"v_res",
"(",
"self",
",",
"nodes",
"=",
"None",
",",
"level",
"=",
"None",
")",
":",
"# check if voltages are available:",
"if",
"hasattr",
"(",
"self",
",",
"'pfa_v_mag_pu'",
")",
":",
"self",
".",
"pfa_v_mag_pu",
".",
"sort_index",
"(",
"axis",
"=",
"1",
",",
"inplace",
"=",
"True",
")",
"else",
":",
"message",
"=",
"\"No voltage results available.\"",
"raise",
"AttributeError",
"(",
"message",
")",
"if",
"level",
"is",
"None",
":",
"level",
"=",
"[",
"'mv'",
",",
"'lv'",
"]",
"if",
"nodes",
"is",
"None",
":",
"return",
"self",
".",
"pfa_v_mag_pu",
".",
"loc",
"[",
":",
",",
"(",
"level",
",",
"slice",
"(",
"None",
")",
")",
"]",
"else",
":",
"not_included",
"=",
"[",
"_",
"for",
"_",
"in",
"nodes",
"if",
"_",
"not",
"in",
"list",
"(",
"self",
".",
"pfa_v_mag_pu",
"[",
"level",
"]",
".",
"columns",
")",
"]",
"labels_included",
"=",
"[",
"_",
"for",
"_",
"in",
"nodes",
"if",
"_",
"not",
"in",
"not_included",
"]",
"if",
"not_included",
":",
"logging",
".",
"warning",
"(",
"\"Voltage levels for {nodes} are not returned \"",
"\"from PFA\"",
".",
"format",
"(",
"nodes",
"=",
"not_included",
")",
")",
"return",
"self",
".",
"pfa_v_mag_pu",
"[",
"level",
"]",
"[",
"labels_included",
"]"
] |
Get resulting voltage level at node.
Parameters
----------
nodes : :obj:`list`
List of string representatives of grid topology components, e.g.
:class:`~.grid.components.Generator`. If not provided defaults to
all nodes available in grid level `level`.
level : :obj:`str`
Either 'mv' or 'lv' or None (default). Depending on which grid
level results you are interested in. It is required to provide this
argument in order to distinguish voltage levels at primary and
secondary side of the transformer/LV station.
If not provided (respectively None) defaults to ['mv', 'lv'].
Returns
-------
:pandas:`pandas.DataFrame<dataframe>`
Resulting voltage levels obtained from power flow analysis
|
[
"Get",
"resulting",
"voltage",
"level",
"at",
"node",
"."
] |
python
|
train
|
mitsei/dlkit
|
dlkit/json_/grading/objects.py
|
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/grading/objects.py#L596-L607
|
def get_based_on_grades_metadata(self):
"""Gets the metadata for a grade-based designation.
return: (osid.Metadata) - metadata for the grade-based
designation
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceForm.get_group_metadata_template
metadata = dict(self._mdata['based_on_grades'])
metadata.update({'existing_boolean_values': self._my_map['basedOnGrades']})
return Metadata(**metadata)
|
[
"def",
"get_based_on_grades_metadata",
"(",
"self",
")",
":",
"# Implemented from template for osid.resource.ResourceForm.get_group_metadata_template",
"metadata",
"=",
"dict",
"(",
"self",
".",
"_mdata",
"[",
"'based_on_grades'",
"]",
")",
"metadata",
".",
"update",
"(",
"{",
"'existing_boolean_values'",
":",
"self",
".",
"_my_map",
"[",
"'basedOnGrades'",
"]",
"}",
")",
"return",
"Metadata",
"(",
"*",
"*",
"metadata",
")"
] |
Gets the metadata for a grade-based designation.
return: (osid.Metadata) - metadata for the grade-based
designation
*compliance: mandatory -- This method must be implemented.*
|
[
"Gets",
"the",
"metadata",
"for",
"a",
"grade",
"-",
"based",
"designation",
"."
] |
python
|
train
|
KyleWpppd/css-audit
|
cssaudit/parser.py
|
https://github.com/KyleWpppd/css-audit/blob/cab4d4204cf30d54bc1881deee6ad92ae6aacc56/cssaudit/parser.py#L44-L82
|
def handle_starttag(self, tag, attrs):
"""
This method handles any HTML tags that have a matching
closing tag. So elements like <p> and <div> are handled
by this method.
@param <string> tag
An html tag that has a separate closing tag such as <p>
<div> or <body>
@param <tuple> attrs
A tuple of HTML element attributes such as 'class', 'id',
'style', etc. The tuple is of the form ('html_attribute',
'attr1', 'attr2', 'attr3' ... 'attrN')
"""
dattrs = dict(attrs)
# look for '<link type='text/css' rel='stylesheet' href='...' > tags
# to see if looking for link tags makes sense here, we need to know
# a little more about the implementation. Whether HTML parser looks for
# the trailing slash at the end of an element, or just knows which elements
# should be paired or not.
if tag.lower() == 'link':
print "Found link"
if all (k in dattrs for k in ('rel', 'href', 'type')):
if ( dattrs['rel'].lower() == 'stylesheet' and
dattrs['type'].lower() == 'text/css' ):
# Add the url to the stack
if (dattrs['href'][:5].lower() == 'http:' or
dattrs['href'][:6].lower() == 'https:'):
self.linked_sheets.append(dattrs['href'])
else:
self.linked_sheets.append(self.url_root+dattrs['href'])
# Look for <style type='text/css' ... /> tags and add their rules
# into the list.
elif (tag.lower() == 'style' and
'type' in dattrs and dattrs['type'].lower() == 'text/css'):
#print "Found CSS inline defs"
self.get_data = True
self.append_styles(tag, attrs)
|
[
"def",
"handle_starttag",
"(",
"self",
",",
"tag",
",",
"attrs",
")",
":",
"dattrs",
"=",
"dict",
"(",
"attrs",
")",
"# look for '<link type='text/css' rel='stylesheet' href='...' > tags",
"# to see if looking for link tags makes sense here, we need to know",
"# a little more about the implementation. Whether HTML parser looks for ",
"# the trailing slash at the end of an element, or just knows which elements",
"# should be paired or not. ",
"if",
"tag",
".",
"lower",
"(",
")",
"==",
"'link'",
":",
"print",
"\"Found link\"",
"if",
"all",
"(",
"k",
"in",
"dattrs",
"for",
"k",
"in",
"(",
"'rel'",
",",
"'href'",
",",
"'type'",
")",
")",
":",
"if",
"(",
"dattrs",
"[",
"'rel'",
"]",
".",
"lower",
"(",
")",
"==",
"'stylesheet'",
"and",
"dattrs",
"[",
"'type'",
"]",
".",
"lower",
"(",
")",
"==",
"'text/css'",
")",
":",
"# Add the url to the stack",
"if",
"(",
"dattrs",
"[",
"'href'",
"]",
"[",
":",
"5",
"]",
".",
"lower",
"(",
")",
"==",
"'http:'",
"or",
"dattrs",
"[",
"'href'",
"]",
"[",
":",
"6",
"]",
".",
"lower",
"(",
")",
"==",
"'https:'",
")",
":",
"self",
".",
"linked_sheets",
".",
"append",
"(",
"dattrs",
"[",
"'href'",
"]",
")",
"else",
":",
"self",
".",
"linked_sheets",
".",
"append",
"(",
"self",
".",
"url_root",
"+",
"dattrs",
"[",
"'href'",
"]",
")",
"# Look for <style type='text/css' ... /> tags and add their rules",
"# into the list.",
"elif",
"(",
"tag",
".",
"lower",
"(",
")",
"==",
"'style'",
"and",
"'type'",
"in",
"dattrs",
"and",
"dattrs",
"[",
"'type'",
"]",
".",
"lower",
"(",
")",
"==",
"'text/css'",
")",
":",
"#print \"Found CSS inline defs\"",
"self",
".",
"get_data",
"=",
"True",
"self",
".",
"append_styles",
"(",
"tag",
",",
"attrs",
")"
] |
This method handles any HTML tags that have a matching
closing tag. So elements like <p> and <div> are handled
by this method.
@param <string> tag
An html tag that has a separate closing tag such as <p>
<div> or <body>
@param <tuple> attrs
A tuple of HTML element attributes such as 'class', 'id',
'style', etc. The tuple is of the form ('html_attribute',
'attr1', 'attr2', 'attr3' ... 'attrN')
|
[
"This",
"method",
"handles",
"any",
"HTML",
"tags",
"that",
"have",
"a",
"matching",
"closing",
"tag",
".",
"So",
"elements",
"like",
"<p",
">",
"and",
"<div",
">",
"are",
"handled",
"by",
"this",
"method",
"."
] |
python
|
train
|
google/grumpy
|
third_party/pythonparser/parser.py
|
https://github.com/google/grumpy/blob/3ec87959189cfcdeae82eb68a47648ac25ceb10b/third_party/pythonparser/parser.py#L987-L998
|
def raise_stmt__30(self, raise_loc, exc_opt):
"""(3.0-) raise_stmt: 'raise' [test ['from' test]]"""
exc = from_loc = cause = None
loc = raise_loc
if exc_opt:
exc, cause_opt = exc_opt
loc = loc.join(exc.loc)
if cause_opt:
from_loc, cause = cause_opt
loc = loc.join(cause.loc)
return ast.Raise(exc=exc, inst=None, tback=None, cause=cause,
keyword_loc=raise_loc, from_loc=from_loc, loc=loc)
|
[
"def",
"raise_stmt__30",
"(",
"self",
",",
"raise_loc",
",",
"exc_opt",
")",
":",
"exc",
"=",
"from_loc",
"=",
"cause",
"=",
"None",
"loc",
"=",
"raise_loc",
"if",
"exc_opt",
":",
"exc",
",",
"cause_opt",
"=",
"exc_opt",
"loc",
"=",
"loc",
".",
"join",
"(",
"exc",
".",
"loc",
")",
"if",
"cause_opt",
":",
"from_loc",
",",
"cause",
"=",
"cause_opt",
"loc",
"=",
"loc",
".",
"join",
"(",
"cause",
".",
"loc",
")",
"return",
"ast",
".",
"Raise",
"(",
"exc",
"=",
"exc",
",",
"inst",
"=",
"None",
",",
"tback",
"=",
"None",
",",
"cause",
"=",
"cause",
",",
"keyword_loc",
"=",
"raise_loc",
",",
"from_loc",
"=",
"from_loc",
",",
"loc",
"=",
"loc",
")"
] |
(3.0-) raise_stmt: 'raise' [test ['from' test]]
|
[
"(",
"3",
".",
"0",
"-",
")",
"raise_stmt",
":",
"raise",
"[",
"test",
"[",
"from",
"test",
"]]"
] |
python
|
valid
|
mdickinson/bigfloat
|
bigfloat/core.py
|
https://github.com/mdickinson/bigfloat/blob/e5fdd1048615191ed32a2b7460e14b3b3ff24662/bigfloat/core.py#L2411-L2429
|
def trunc(x, context=None):
"""
Return the next integer towards zero.
If the result is not exactly representable, it will be rounded according to
the current context.
.. note::
This function corresponds to the MPFR function ``mpfr_rint_trunc``,
not to ``mpfr_trunc``.
"""
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_rint_trunc,
(BigFloat._implicit_convert(x),),
context,
)
|
[
"def",
"trunc",
"(",
"x",
",",
"context",
"=",
"None",
")",
":",
"return",
"_apply_function_in_current_context",
"(",
"BigFloat",
",",
"mpfr",
".",
"mpfr_rint_trunc",
",",
"(",
"BigFloat",
".",
"_implicit_convert",
"(",
"x",
")",
",",
")",
",",
"context",
",",
")"
] |
Return the next integer towards zero.
If the result is not exactly representable, it will be rounded according to
the current context.
.. note::
This function corresponds to the MPFR function ``mpfr_rint_trunc``,
not to ``mpfr_trunc``.
|
[
"Return",
"the",
"next",
"integer",
"towards",
"zero",
"."
] |
python
|
train
|
seanpar203/event-bus
|
event_bus/bus.py
|
https://github.com/seanpar203/event-bus/blob/60319b9eb4e38c348e80f3ec625312eda75da765/event_bus/bus.py#L73-L92
|
def on(self, event: str) -> Callable:
""" Decorator for subscribing a function to a specific event.
:param event: Name of the event to subscribe to.
:type event: str
:return: The outer function.
:rtype: Callable
"""
def outer(func):
self.add_event(func, event)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return outer
|
[
"def",
"on",
"(",
"self",
",",
"event",
":",
"str",
")",
"->",
"Callable",
":",
"def",
"outer",
"(",
"func",
")",
":",
"self",
".",
"add_event",
"(",
"func",
",",
"event",
")",
"@",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper",
"return",
"outer"
] |
Decorator for subscribing a function to a specific event.
:param event: Name of the event to subscribe to.
:type event: str
:return: The outer function.
:rtype: Callable
|
[
"Decorator",
"for",
"subscribing",
"a",
"function",
"to",
"a",
"specific",
"event",
"."
] |
python
|
train
|
thefactory/marathon-python
|
marathon/client.py
|
https://github.com/thefactory/marathon-python/blob/592b253aa8edf2475c97ca438ad7b6936652caf2/marathon/client.py#L301-L325
|
def update_apps(self, apps, force=False, minimal=True):
"""Update multiple apps.
Applies writable settings in elements of apps either by upgrading existing ones or creating new ones
:param apps: sequence of application settings
:param bool force: apply even if a deployment is in progress
:param bool minimal: ignore nulls and empty collections
:returns: a dict containing the deployment id and version
:rtype: dict
"""
json_repr_apps = []
for app in apps:
# Changes won't take if version is set - blank it for convenience
app.version = None
json_repr_apps.append(app.json_repr(minimal=minimal))
params = {'force': force}
encoder = MarathonMinimalJsonEncoder if minimal else MarathonJsonEncoder
data = json.dumps(json_repr_apps, cls=encoder, sort_keys=True)
response = self._do_request(
'PUT', '/v2/apps', params=params, data=data)
return response.json()
|
[
"def",
"update_apps",
"(",
"self",
",",
"apps",
",",
"force",
"=",
"False",
",",
"minimal",
"=",
"True",
")",
":",
"json_repr_apps",
"=",
"[",
"]",
"for",
"app",
"in",
"apps",
":",
"# Changes won't take if version is set - blank it for convenience",
"app",
".",
"version",
"=",
"None",
"json_repr_apps",
".",
"append",
"(",
"app",
".",
"json_repr",
"(",
"minimal",
"=",
"minimal",
")",
")",
"params",
"=",
"{",
"'force'",
":",
"force",
"}",
"encoder",
"=",
"MarathonMinimalJsonEncoder",
"if",
"minimal",
"else",
"MarathonJsonEncoder",
"data",
"=",
"json",
".",
"dumps",
"(",
"json_repr_apps",
",",
"cls",
"=",
"encoder",
",",
"sort_keys",
"=",
"True",
")",
"response",
"=",
"self",
".",
"_do_request",
"(",
"'PUT'",
",",
"'/v2/apps'",
",",
"params",
"=",
"params",
",",
"data",
"=",
"data",
")",
"return",
"response",
".",
"json",
"(",
")"
] |
Update multiple apps.
Applies writable settings in elements of apps either by upgrading existing ones or creating new ones
:param apps: sequence of application settings
:param bool force: apply even if a deployment is in progress
:param bool minimal: ignore nulls and empty collections
:returns: a dict containing the deployment id and version
:rtype: dict
|
[
"Update",
"multiple",
"apps",
"."
] |
python
|
train
|
Kane610/deconz
|
pydeconz/__init__.py
|
https://github.com/Kane610/deconz/blob/8a9498dbbc8c168d4a081173ad6c3b1e17fffdf6/pydeconz/__init__.py#L200-L225
|
def update_group_color(self, lights: list) -> None:
"""Update group colors based on light states.
deCONZ group updates don't contain any information about the current
state of the lights in the group. This method updates the color
properties of the group to the current color of the lights in the
group.
For groups where the lights have different colors the group color will
only reflect the color of the latest changed light in the group.
"""
for group in self.groups.values():
# Skip group if there are no common light ids.
if not any({*lights} & {*group.lights}):
continue
# More than one light means load_parameters called this method.
# Then we take first best light to be available.
light_ids = lights
if len(light_ids) > 1:
light_ids = group.lights
for light_id in light_ids:
if self.lights[light_id].reachable:
group.update_color_state(self.lights[light_id])
break
|
[
"def",
"update_group_color",
"(",
"self",
",",
"lights",
":",
"list",
")",
"->",
"None",
":",
"for",
"group",
"in",
"self",
".",
"groups",
".",
"values",
"(",
")",
":",
"# Skip group if there are no common light ids.",
"if",
"not",
"any",
"(",
"{",
"*",
"lights",
"}",
"&",
"{",
"*",
"group",
".",
"lights",
"}",
")",
":",
"continue",
"# More than one light means load_parameters called this method.",
"# Then we take first best light to be available.",
"light_ids",
"=",
"lights",
"if",
"len",
"(",
"light_ids",
")",
">",
"1",
":",
"light_ids",
"=",
"group",
".",
"lights",
"for",
"light_id",
"in",
"light_ids",
":",
"if",
"self",
".",
"lights",
"[",
"light_id",
"]",
".",
"reachable",
":",
"group",
".",
"update_color_state",
"(",
"self",
".",
"lights",
"[",
"light_id",
"]",
")",
"break"
] |
Update group colors based on light states.
deCONZ group updates don't contain any information about the current
state of the lights in the group. This method updates the color
properties of the group to the current color of the lights in the
group.
For groups where the lights have different colors the group color will
only reflect the color of the latest changed light in the group.
|
[
"Update",
"group",
"colors",
"based",
"on",
"light",
"states",
"."
] |
python
|
train
|
teepark/greenhouse
|
greenhouse/io/ssl.py
|
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/ssl.py#L242-L261
|
def do_handshake(self, timeout):
'perform a SSL/TLS handshake'
tout = _timeout(timeout)
if not self._blocking:
return self._sslobj.do_handshake()
while 1:
try:
return self._sslobj.do_handshake()
except ssl.SSLError, exc:
if exc.args[0] == ssl.SSL_ERROR_WANT_READ:
self._wait_event(tout.now)
continue
elif exc.args[0] == ssl.SSL_ERROR_WANT_WRITE:
self._wait_event(tout.now, write=True)
continue
raise
self._wait_event(timeout)
self._sslobj.do_handshake()
|
[
"def",
"do_handshake",
"(",
"self",
",",
"timeout",
")",
":",
"tout",
"=",
"_timeout",
"(",
"timeout",
")",
"if",
"not",
"self",
".",
"_blocking",
":",
"return",
"self",
".",
"_sslobj",
".",
"do_handshake",
"(",
")",
"while",
"1",
":",
"try",
":",
"return",
"self",
".",
"_sslobj",
".",
"do_handshake",
"(",
")",
"except",
"ssl",
".",
"SSLError",
",",
"exc",
":",
"if",
"exc",
".",
"args",
"[",
"0",
"]",
"==",
"ssl",
".",
"SSL_ERROR_WANT_READ",
":",
"self",
".",
"_wait_event",
"(",
"tout",
".",
"now",
")",
"continue",
"elif",
"exc",
".",
"args",
"[",
"0",
"]",
"==",
"ssl",
".",
"SSL_ERROR_WANT_WRITE",
":",
"self",
".",
"_wait_event",
"(",
"tout",
".",
"now",
",",
"write",
"=",
"True",
")",
"continue",
"raise",
"self",
".",
"_wait_event",
"(",
"timeout",
")",
"self",
".",
"_sslobj",
".",
"do_handshake",
"(",
")"
] |
perform a SSL/TLS handshake
|
[
"perform",
"a",
"SSL",
"/",
"TLS",
"handshake"
] |
python
|
train
|
buildbot/buildbot
|
master/buildbot/steps/cmake.py
|
https://github.com/buildbot/buildbot/blob/5df3cfae6d760557d99156633c32b1822a1e130c/master/buildbot/steps/cmake.py#L62-L87
|
def run(self):
"""
run CMake
"""
command = [self.cmake]
if self.generator:
command.extend([
'-G', self.generator
])
if self.path:
command.append(self.path)
if self.definitions is not None:
for item in self.definitions.items():
command.append('-D%s=%s' % item)
if self.options is not None:
command.extend(self.options)
cmd = yield self.makeRemoteShellCommand(command=command)
yield self.runCommand(cmd)
return cmd.results()
|
[
"def",
"run",
"(",
"self",
")",
":",
"command",
"=",
"[",
"self",
".",
"cmake",
"]",
"if",
"self",
".",
"generator",
":",
"command",
".",
"extend",
"(",
"[",
"'-G'",
",",
"self",
".",
"generator",
"]",
")",
"if",
"self",
".",
"path",
":",
"command",
".",
"append",
"(",
"self",
".",
"path",
")",
"if",
"self",
".",
"definitions",
"is",
"not",
"None",
":",
"for",
"item",
"in",
"self",
".",
"definitions",
".",
"items",
"(",
")",
":",
"command",
".",
"append",
"(",
"'-D%s=%s'",
"%",
"item",
")",
"if",
"self",
".",
"options",
"is",
"not",
"None",
":",
"command",
".",
"extend",
"(",
"self",
".",
"options",
")",
"cmd",
"=",
"yield",
"self",
".",
"makeRemoteShellCommand",
"(",
"command",
"=",
"command",
")",
"yield",
"self",
".",
"runCommand",
"(",
"cmd",
")",
"return",
"cmd",
".",
"results",
"(",
")"
] |
run CMake
|
[
"run",
"CMake"
] |
python
|
train
|
tensorflow/mesh
|
mesh_tensorflow/auto_mtf/layout.py
|
https://github.com/tensorflow/mesh/blob/3921196e5e43302e820da0a87329f25d7e2a3016/mesh_tensorflow/auto_mtf/layout.py#L47-L63
|
def layout(mtf_graph, mesh_shape, mtf_outputs=()):
"""Compute layout rules based on a computational graph and mesh shape.
Args:
mtf_graph: a mtf.Graph.
mesh_shape: an mtf.Shape, str, or listlike of mtf.Dimension.
mtf_outputs: an optional iterable of mtf.Tensor, representing the outputs
of the computation.
Returns:
a mtf.LayoutRules
"""
mesh_shape = mtf.convert_to_shape(mesh_shape)
estimator = memory_estimator.MemoryEstimator(mtf_graph, mesh_shape,
mtf_outputs)
optimizer = layout_optimizer.LayoutOptimizer(estimator)
return mtf.convert_to_layout_rules(optimizer.solve())
|
[
"def",
"layout",
"(",
"mtf_graph",
",",
"mesh_shape",
",",
"mtf_outputs",
"=",
"(",
")",
")",
":",
"mesh_shape",
"=",
"mtf",
".",
"convert_to_shape",
"(",
"mesh_shape",
")",
"estimator",
"=",
"memory_estimator",
".",
"MemoryEstimator",
"(",
"mtf_graph",
",",
"mesh_shape",
",",
"mtf_outputs",
")",
"optimizer",
"=",
"layout_optimizer",
".",
"LayoutOptimizer",
"(",
"estimator",
")",
"return",
"mtf",
".",
"convert_to_layout_rules",
"(",
"optimizer",
".",
"solve",
"(",
")",
")"
] |
Compute layout rules based on a computational graph and mesh shape.
Args:
mtf_graph: a mtf.Graph.
mesh_shape: an mtf.Shape, str, or listlike of mtf.Dimension.
mtf_outputs: an optional iterable of mtf.Tensor, representing the outputs
of the computation.
Returns:
a mtf.LayoutRules
|
[
"Compute",
"layout",
"rules",
"based",
"on",
"a",
"computational",
"graph",
"and",
"mesh",
"shape",
"."
] |
python
|
train
|
mdiener/grace
|
grace/py27/cssmin.py
|
https://github.com/mdiener/grace/blob/2dab13a2cf636da5da989904c5885166fc94d36d/grace/py27/cssmin.py#L19-L50
|
def remove_comments(css):
"""Remove all CSS comment blocks."""
iemac = False
preserve = False
comment_start = css.find("/*")
while comment_start >= 0:
# Preserve comments that look like `/*!...*/`.
# Slicing is used to make sure we don"t get an IndexError.
preserve = css[comment_start + 2:comment_start + 3] == "!"
comment_end = css.find("*/", comment_start + 2)
if comment_end < 0:
if not preserve:
css = css[:comment_start]
break
elif comment_end >= (comment_start + 2):
if css[comment_end - 1] == "\\":
# This is an IE Mac-specific comment; leave this one and the
# following one alone.
comment_start = comment_end + 2
iemac = True
elif iemac:
comment_start = comment_end + 2
iemac = False
elif not preserve:
css = css[:comment_start] + css[comment_end + 2:]
else:
comment_start = comment_end + 2
comment_start = css.find("/*", comment_start)
return css
|
[
"def",
"remove_comments",
"(",
"css",
")",
":",
"iemac",
"=",
"False",
"preserve",
"=",
"False",
"comment_start",
"=",
"css",
".",
"find",
"(",
"\"/*\"",
")",
"while",
"comment_start",
">=",
"0",
":",
"# Preserve comments that look like `/*!...*/`.",
"# Slicing is used to make sure we don\"t get an IndexError.",
"preserve",
"=",
"css",
"[",
"comment_start",
"+",
"2",
":",
"comment_start",
"+",
"3",
"]",
"==",
"\"!\"",
"comment_end",
"=",
"css",
".",
"find",
"(",
"\"*/\"",
",",
"comment_start",
"+",
"2",
")",
"if",
"comment_end",
"<",
"0",
":",
"if",
"not",
"preserve",
":",
"css",
"=",
"css",
"[",
":",
"comment_start",
"]",
"break",
"elif",
"comment_end",
">=",
"(",
"comment_start",
"+",
"2",
")",
":",
"if",
"css",
"[",
"comment_end",
"-",
"1",
"]",
"==",
"\"\\\\\"",
":",
"# This is an IE Mac-specific comment; leave this one and the",
"# following one alone.",
"comment_start",
"=",
"comment_end",
"+",
"2",
"iemac",
"=",
"True",
"elif",
"iemac",
":",
"comment_start",
"=",
"comment_end",
"+",
"2",
"iemac",
"=",
"False",
"elif",
"not",
"preserve",
":",
"css",
"=",
"css",
"[",
":",
"comment_start",
"]",
"+",
"css",
"[",
"comment_end",
"+",
"2",
":",
"]",
"else",
":",
"comment_start",
"=",
"comment_end",
"+",
"2",
"comment_start",
"=",
"css",
".",
"find",
"(",
"\"/*\"",
",",
"comment_start",
")",
"return",
"css"
] |
Remove all CSS comment blocks.
|
[
"Remove",
"all",
"CSS",
"comment",
"blocks",
"."
] |
python
|
train
|
wmayner/pyphi
|
pyphi/subsystem.py
|
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L259-L274
|
def indices2nodes(self, indices):
"""Return |Nodes| for these indices.
Args:
indices (tuple[int]): The indices in question.
Returns:
tuple[Node]: The |Node| objects corresponding to these indices.
Raises:
ValueError: If requested indices are not in the subsystem.
"""
if set(indices) - set(self.node_indices):
raise ValueError(
"`indices` must be a subset of the Subsystem's indices.")
return tuple(self._index2node[n] for n in indices)
|
[
"def",
"indices2nodes",
"(",
"self",
",",
"indices",
")",
":",
"if",
"set",
"(",
"indices",
")",
"-",
"set",
"(",
"self",
".",
"node_indices",
")",
":",
"raise",
"ValueError",
"(",
"\"`indices` must be a subset of the Subsystem's indices.\"",
")",
"return",
"tuple",
"(",
"self",
".",
"_index2node",
"[",
"n",
"]",
"for",
"n",
"in",
"indices",
")"
] |
Return |Nodes| for these indices.
Args:
indices (tuple[int]): The indices in question.
Returns:
tuple[Node]: The |Node| objects corresponding to these indices.
Raises:
ValueError: If requested indices are not in the subsystem.
|
[
"Return",
"|Nodes|",
"for",
"these",
"indices",
"."
] |
python
|
train
|
RudolfCardinal/pythonlib
|
cardinal_pythonlib/sqlalchemy/schema.py
|
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/schema.py#L825-L923
|
def convert_sqla_type_for_dialect(
coltype: TypeEngine,
dialect: Dialect,
strip_collation: bool = True,
convert_mssql_timestamp: bool = True,
expand_for_scrubbing: bool = False) -> TypeEngine:
"""
Converts an SQLAlchemy column type from one SQL dialect to another.
Args:
coltype: SQLAlchemy column type in the source dialect
dialect: destination :class:`Dialect`
strip_collation: remove any ``COLLATION`` information?
convert_mssql_timestamp:
since you cannot write to a SQL Server ``TIMESTAMP`` field, setting
this option to ``True`` (the default) converts such types to
something equivalent but writable.
expand_for_scrubbing:
The purpose of expand_for_scrubbing is that, for example, a
``VARCHAR(200)`` field containing one or more instances of
``Jones``, where ``Jones`` is to be replaced with ``[XXXXXX]``,
will get longer (by an unpredictable amount). So, better to expand
to unlimited length.
Returns:
an SQLAlchemy column type instance, in the destination dialect
"""
assert coltype is not None
# noinspection PyUnresolvedReferences
to_mysql = dialect.name == SqlaDialectName.MYSQL
# noinspection PyUnresolvedReferences
to_mssql = dialect.name == SqlaDialectName.MSSQL
typeclass = type(coltype)
# -------------------------------------------------------------------------
# Text
# -------------------------------------------------------------------------
if isinstance(coltype, sqltypes.Enum):
return sqltypes.String(length=coltype.length)
if isinstance(coltype, sqltypes.UnicodeText):
# Unbounded Unicode text.
# Includes derived classes such as mssql.base.NTEXT.
return sqltypes.UnicodeText()
if isinstance(coltype, sqltypes.Text):
# Unbounded text, more generally. (UnicodeText inherits from Text.)
# Includes sqltypes.TEXT.
return sqltypes.Text()
# Everything inheriting from String has a length property, but can be None.
# There are types that can be unlimited in SQL Server, e.g. VARCHAR(MAX)
# and NVARCHAR(MAX), that MySQL needs a length for. (Failure to convert
# gives e.g.: 'NVARCHAR requires a length on dialect mysql'.)
if isinstance(coltype, sqltypes.Unicode):
# Includes NVARCHAR(MAX) in SQL -> NVARCHAR() in SQLAlchemy.
if (coltype.length is None and to_mysql) or expand_for_scrubbing:
return sqltypes.UnicodeText()
# The most general case; will pick up any other string types.
if isinstance(coltype, sqltypes.String):
# Includes VARCHAR(MAX) in SQL -> VARCHAR() in SQLAlchemy
if (coltype.length is None and to_mysql) or expand_for_scrubbing:
return sqltypes.Text()
if strip_collation:
return remove_collation(coltype)
return coltype
# -------------------------------------------------------------------------
# Binary
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# BIT
# -------------------------------------------------------------------------
if typeclass == mssql.base.BIT and to_mysql:
# MySQL BIT objects have a length attribute.
return mysql.base.BIT()
# -------------------------------------------------------------------------
# TIMESTAMP
# -------------------------------------------------------------------------
is_mssql_timestamp = isinstance(coltype, MSSQL_TIMESTAMP)
if is_mssql_timestamp and to_mssql and convert_mssql_timestamp:
# You cannot write explicitly to a TIMESTAMP field in SQL Server; it's
# used for autogenerated values only.
# - http://stackoverflow.com/questions/10262426/sql-server-cannot-insert-an-explicit-value-into-a-timestamp-column # noqa
# - https://social.msdn.microsoft.com/Forums/sqlserver/en-US/5167204b-ef32-4662-8e01-00c9f0f362c2/how-to-tranfer-a-column-with-timestamp-datatype?forum=transactsql # noqa
# ... suggesting BINARY(8) to store the value.
# MySQL is more helpful:
# - http://stackoverflow.com/questions/409286/should-i-use-field-datetime-or-timestamp # noqa
return mssql.base.BINARY(8)
# -------------------------------------------------------------------------
# Some other type
# -------------------------------------------------------------------------
return coltype
|
[
"def",
"convert_sqla_type_for_dialect",
"(",
"coltype",
":",
"TypeEngine",
",",
"dialect",
":",
"Dialect",
",",
"strip_collation",
":",
"bool",
"=",
"True",
",",
"convert_mssql_timestamp",
":",
"bool",
"=",
"True",
",",
"expand_for_scrubbing",
":",
"bool",
"=",
"False",
")",
"->",
"TypeEngine",
":",
"assert",
"coltype",
"is",
"not",
"None",
"# noinspection PyUnresolvedReferences",
"to_mysql",
"=",
"dialect",
".",
"name",
"==",
"SqlaDialectName",
".",
"MYSQL",
"# noinspection PyUnresolvedReferences",
"to_mssql",
"=",
"dialect",
".",
"name",
"==",
"SqlaDialectName",
".",
"MSSQL",
"typeclass",
"=",
"type",
"(",
"coltype",
")",
"# -------------------------------------------------------------------------",
"# Text",
"# -------------------------------------------------------------------------",
"if",
"isinstance",
"(",
"coltype",
",",
"sqltypes",
".",
"Enum",
")",
":",
"return",
"sqltypes",
".",
"String",
"(",
"length",
"=",
"coltype",
".",
"length",
")",
"if",
"isinstance",
"(",
"coltype",
",",
"sqltypes",
".",
"UnicodeText",
")",
":",
"# Unbounded Unicode text.",
"# Includes derived classes such as mssql.base.NTEXT.",
"return",
"sqltypes",
".",
"UnicodeText",
"(",
")",
"if",
"isinstance",
"(",
"coltype",
",",
"sqltypes",
".",
"Text",
")",
":",
"# Unbounded text, more generally. (UnicodeText inherits from Text.)",
"# Includes sqltypes.TEXT.",
"return",
"sqltypes",
".",
"Text",
"(",
")",
"# Everything inheriting from String has a length property, but can be None.",
"# There are types that can be unlimited in SQL Server, e.g. VARCHAR(MAX)",
"# and NVARCHAR(MAX), that MySQL needs a length for. (Failure to convert",
"# gives e.g.: 'NVARCHAR requires a length on dialect mysql'.)",
"if",
"isinstance",
"(",
"coltype",
",",
"sqltypes",
".",
"Unicode",
")",
":",
"# Includes NVARCHAR(MAX) in SQL -> NVARCHAR() in SQLAlchemy.",
"if",
"(",
"coltype",
".",
"length",
"is",
"None",
"and",
"to_mysql",
")",
"or",
"expand_for_scrubbing",
":",
"return",
"sqltypes",
".",
"UnicodeText",
"(",
")",
"# The most general case; will pick up any other string types.",
"if",
"isinstance",
"(",
"coltype",
",",
"sqltypes",
".",
"String",
")",
":",
"# Includes VARCHAR(MAX) in SQL -> VARCHAR() in SQLAlchemy",
"if",
"(",
"coltype",
".",
"length",
"is",
"None",
"and",
"to_mysql",
")",
"or",
"expand_for_scrubbing",
":",
"return",
"sqltypes",
".",
"Text",
"(",
")",
"if",
"strip_collation",
":",
"return",
"remove_collation",
"(",
"coltype",
")",
"return",
"coltype",
"# -------------------------------------------------------------------------",
"# Binary",
"# -------------------------------------------------------------------------",
"# -------------------------------------------------------------------------",
"# BIT",
"# -------------------------------------------------------------------------",
"if",
"typeclass",
"==",
"mssql",
".",
"base",
".",
"BIT",
"and",
"to_mysql",
":",
"# MySQL BIT objects have a length attribute.",
"return",
"mysql",
".",
"base",
".",
"BIT",
"(",
")",
"# -------------------------------------------------------------------------",
"# TIMESTAMP",
"# -------------------------------------------------------------------------",
"is_mssql_timestamp",
"=",
"isinstance",
"(",
"coltype",
",",
"MSSQL_TIMESTAMP",
")",
"if",
"is_mssql_timestamp",
"and",
"to_mssql",
"and",
"convert_mssql_timestamp",
":",
"# You cannot write explicitly to a TIMESTAMP field in SQL Server; it's",
"# used for autogenerated values only.",
"# - http://stackoverflow.com/questions/10262426/sql-server-cannot-insert-an-explicit-value-into-a-timestamp-column # noqa",
"# - https://social.msdn.microsoft.com/Forums/sqlserver/en-US/5167204b-ef32-4662-8e01-00c9f0f362c2/how-to-tranfer-a-column-with-timestamp-datatype?forum=transactsql # noqa",
"# ... suggesting BINARY(8) to store the value.",
"# MySQL is more helpful:",
"# - http://stackoverflow.com/questions/409286/should-i-use-field-datetime-or-timestamp # noqa",
"return",
"mssql",
".",
"base",
".",
"BINARY",
"(",
"8",
")",
"# -------------------------------------------------------------------------",
"# Some other type",
"# -------------------------------------------------------------------------",
"return",
"coltype"
] |
Converts an SQLAlchemy column type from one SQL dialect to another.
Args:
coltype: SQLAlchemy column type in the source dialect
dialect: destination :class:`Dialect`
strip_collation: remove any ``COLLATION`` information?
convert_mssql_timestamp:
since you cannot write to a SQL Server ``TIMESTAMP`` field, setting
this option to ``True`` (the default) converts such types to
something equivalent but writable.
expand_for_scrubbing:
The purpose of expand_for_scrubbing is that, for example, a
``VARCHAR(200)`` field containing one or more instances of
``Jones``, where ``Jones`` is to be replaced with ``[XXXXXX]``,
will get longer (by an unpredictable amount). So, better to expand
to unlimited length.
Returns:
an SQLAlchemy column type instance, in the destination dialect
|
[
"Converts",
"an",
"SQLAlchemy",
"column",
"type",
"from",
"one",
"SQL",
"dialect",
"to",
"another",
"."
] |
python
|
train
|
barrust/pyspellchecker
|
spellchecker/spellchecker.py
|
https://github.com/barrust/pyspellchecker/blob/fa96024c0cdeba99e10e11060d5fd7aba796b271/spellchecker/spellchecker.py#L413-L420
|
def remove_words(self, words):
""" Remove a list of words from the word frequency list
Args:
words (list): The list of words to remove """
for word in words:
self._dictionary.pop(word.lower())
self._update_dictionary()
|
[
"def",
"remove_words",
"(",
"self",
",",
"words",
")",
":",
"for",
"word",
"in",
"words",
":",
"self",
".",
"_dictionary",
".",
"pop",
"(",
"word",
".",
"lower",
"(",
")",
")",
"self",
".",
"_update_dictionary",
"(",
")"
] |
Remove a list of words from the word frequency list
Args:
words (list): The list of words to remove
|
[
"Remove",
"a",
"list",
"of",
"words",
"from",
"the",
"word",
"frequency",
"list"
] |
python
|
train
|
huyingxi/Synonyms
|
synonyms/synonyms.py
|
https://github.com/huyingxi/Synonyms/blob/fe7450d51d9ad825fdba86b9377da9dc76ae26a4/synonyms/synonyms.py#L125-L133
|
def _load_w2v(model_file=_f_model, binary=True):
'''
load word2vec model
'''
if not os.path.exists(model_file):
print("os.path : ", os.path)
raise Exception("Model file [%s] does not exist." % model_file)
return KeyedVectors.load_word2vec_format(
model_file, binary=binary, unicode_errors='ignore')
|
[
"def",
"_load_w2v",
"(",
"model_file",
"=",
"_f_model",
",",
"binary",
"=",
"True",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"model_file",
")",
":",
"print",
"(",
"\"os.path : \"",
",",
"os",
".",
"path",
")",
"raise",
"Exception",
"(",
"\"Model file [%s] does not exist.\"",
"%",
"model_file",
")",
"return",
"KeyedVectors",
".",
"load_word2vec_format",
"(",
"model_file",
",",
"binary",
"=",
"binary",
",",
"unicode_errors",
"=",
"'ignore'",
")"
] |
load word2vec model
|
[
"load",
"word2vec",
"model"
] |
python
|
train
|
Galarzaa90/tibia.py
|
tibiapy/utils.py
|
https://github.com/Galarzaa90/tibia.py/blob/02ba1a8f1e18177ef5c7dcd44affc8d761d59e12/tibiapy/utils.py#L242-L262
|
def parse_tibiacom_content(content, *, html_class="BoxContent", tag="div", builder="lxml"):
"""Parses HTML content from Tibia.com into a BeautifulSoup object.
Parameters
----------
content: :class:`str`
The raw HTML content from Tibia.com
html_class: :class:`str`
The HTML class of the parsed element. The default value is ``BoxContent``.
tag: :class:`str`
The HTML tag select. The default value is ``div``.
builder: :class:`str`
The builder to use. The default value is ``lxml``.
Returns
-------
:class:`bs4.BeautifulSoup`, optional
The parsed content.
"""
return bs4.BeautifulSoup(content.replace('ISO-8859-1', 'utf-8'), builder,
parse_only=bs4.SoupStrainer(tag, class_=html_class))
|
[
"def",
"parse_tibiacom_content",
"(",
"content",
",",
"*",
",",
"html_class",
"=",
"\"BoxContent\"",
",",
"tag",
"=",
"\"div\"",
",",
"builder",
"=",
"\"lxml\"",
")",
":",
"return",
"bs4",
".",
"BeautifulSoup",
"(",
"content",
".",
"replace",
"(",
"'ISO-8859-1'",
",",
"'utf-8'",
")",
",",
"builder",
",",
"parse_only",
"=",
"bs4",
".",
"SoupStrainer",
"(",
"tag",
",",
"class_",
"=",
"html_class",
")",
")"
] |
Parses HTML content from Tibia.com into a BeautifulSoup object.
Parameters
----------
content: :class:`str`
The raw HTML content from Tibia.com
html_class: :class:`str`
The HTML class of the parsed element. The default value is ``BoxContent``.
tag: :class:`str`
The HTML tag select. The default value is ``div``.
builder: :class:`str`
The builder to use. The default value is ``lxml``.
Returns
-------
:class:`bs4.BeautifulSoup`, optional
The parsed content.
|
[
"Parses",
"HTML",
"content",
"from",
"Tibia",
".",
"com",
"into",
"a",
"BeautifulSoup",
"object",
"."
] |
python
|
train
|
arcticfoxnv/slackminion
|
slackminion/dispatcher.py
|
https://github.com/arcticfoxnv/slackminion/blob/62ea77aba5ac5ba582793e578a379a76f7d26cdb/slackminion/dispatcher.py#L77-L86
|
def _ignore_event(self, message):
"""
message_replied event is not truly a message event and does not have a message.text
don't process such events
commands may not be idempotent, so ignore message_changed events.
"""
if hasattr(message, 'subtype') and message.subtype in self.ignored_events:
return True
return False
|
[
"def",
"_ignore_event",
"(",
"self",
",",
"message",
")",
":",
"if",
"hasattr",
"(",
"message",
",",
"'subtype'",
")",
"and",
"message",
".",
"subtype",
"in",
"self",
".",
"ignored_events",
":",
"return",
"True",
"return",
"False"
] |
message_replied event is not truly a message event and does not have a message.text
don't process such events
commands may not be idempotent, so ignore message_changed events.
|
[
"message_replied",
"event",
"is",
"not",
"truly",
"a",
"message",
"event",
"and",
"does",
"not",
"have",
"a",
"message",
".",
"text",
"don",
"t",
"process",
"such",
"events"
] |
python
|
valid
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.