nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Exa-Networks/exaproxy
|
5dc732760d811fd4986f83e6dd78d29228927aec
|
lib/exaproxy/network/async/interface.py
|
python
|
IPoller.clearRead
|
(self, name)
|
Flush all sockets currently watched for the event
|
Flush all sockets currently watched for the event
|
[
"Flush",
"all",
"sockets",
"currently",
"watched",
"for",
"the",
"event"
] |
def clearRead(self, name):
"""Flush all sockets currently watched for the event"""
raise NotImplementedError
|
[
"def",
"clearRead",
"(",
"self",
",",
"name",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/Exa-Networks/exaproxy/blob/5dc732760d811fd4986f83e6dd78d29228927aec/lib/exaproxy/network/async/interface.py#L34-L36
|
||
theotherp/nzbhydra
|
4b03d7f769384b97dfc60dade4806c0fc987514e
|
libs/bs4/element.py
|
python
|
Tag._all_strings
|
(self, strip=False, types=(NavigableString, CData))
|
Yield all strings of certain classes, possibly stripping them.
By default, yields only NavigableString and CData objects. So
no comments, processing instructions, etc.
|
Yield all strings of certain classes, possibly stripping them.
|
[
"Yield",
"all",
"strings",
"of",
"certain",
"classes",
"possibly",
"stripping",
"them",
"."
] |
def _all_strings(self, strip=False, types=(NavigableString, CData)):
"""Yield all strings of certain classes, possibly stripping them.
By default, yields only NavigableString and CData objects. So
no comments, processing instructions, etc.
"""
for descendant in self.descendants:
if (
(types is None and not isinstance(descendant, NavigableString))
or
(types is not None and type(descendant) not in types)):
continue
if strip:
descendant = descendant.strip()
if len(descendant) == 0:
continue
yield descendant
|
[
"def",
"_all_strings",
"(",
"self",
",",
"strip",
"=",
"False",
",",
"types",
"=",
"(",
"NavigableString",
",",
"CData",
")",
")",
":",
"for",
"descendant",
"in",
"self",
".",
"descendants",
":",
"if",
"(",
"(",
"types",
"is",
"None",
"and",
"not",
"isinstance",
"(",
"descendant",
",",
"NavigableString",
")",
")",
"or",
"(",
"types",
"is",
"not",
"None",
"and",
"type",
"(",
"descendant",
")",
"not",
"in",
"types",
")",
")",
":",
"continue",
"if",
"strip",
":",
"descendant",
"=",
"descendant",
".",
"strip",
"(",
")",
"if",
"len",
"(",
"descendant",
")",
"==",
"0",
":",
"continue",
"yield",
"descendant"
] |
https://github.com/theotherp/nzbhydra/blob/4b03d7f769384b97dfc60dade4806c0fc987514e/libs/bs4/element.py#L913-L929
|
||
chb/indivo_server
|
9826c67ab17d7fc0df935db327344fb0c7d237e5
|
indivo/views/shares/shares_pha.py
|
python
|
carenet_apps_create
|
(request, carenet, pha)
|
return DONE
|
Add an app to a carenet
Read/write capability is determined by the user who uses the app,
not by the app itself, so no permissions are assigned here.
Apps can only be added to carenets if they have already been shared with
the carenet's record (i.e. the user has agreed to use the app).
Autonomous apps cannot be added to carenets, as they require a full-record
scope.
Will return :http:statuscode:`200` on success, :http:statuscode:`404` if
there is no existing share between *pha* and *carenet's* record, or
:http:statuscode:`400` if *pha* is autonomous.
|
Add an app to a carenet
|
[
"Add",
"an",
"app",
"to",
"a",
"carenet"
] |
def carenet_apps_create(request, carenet, pha):
""" Add an app to a carenet
Read/write capability is determined by the user who uses the app,
not by the app itself, so no permissions are assigned here.
Apps can only be added to carenets if they have already been shared with
the carenet's record (i.e. the user has agreed to use the app).
Autonomous apps cannot be added to carenets, as they require a full-record
scope.
Will return :http:statuscode:`200` on success, :http:statuscode:`404` if
there is no existing share between *pha* and *carenet's* record, or
:http:statuscode:`400` if *pha* is autonomous.
"""
# make sure the PHA already has access to record
try:
pha = carenet.record.pha_shares.get(with_pha__email = pha.email).with_pha
except PHAShare.DoesNotExist:
raise Http404
if not pha.is_autonomous:
CarenetPHA.objects.get_or_create(carenet=carenet, pha=pha)
else:
return HttpResponseBadRequest('Autonomous apps may not be linked to individual carenets: they always access the entire record')
return DONE
|
[
"def",
"carenet_apps_create",
"(",
"request",
",",
"carenet",
",",
"pha",
")",
":",
"# make sure the PHA already has access to record",
"try",
":",
"pha",
"=",
"carenet",
".",
"record",
".",
"pha_shares",
".",
"get",
"(",
"with_pha__email",
"=",
"pha",
".",
"email",
")",
".",
"with_pha",
"except",
"PHAShare",
".",
"DoesNotExist",
":",
"raise",
"Http404",
"if",
"not",
"pha",
".",
"is_autonomous",
":",
"CarenetPHA",
".",
"objects",
".",
"get_or_create",
"(",
"carenet",
"=",
"carenet",
",",
"pha",
"=",
"pha",
")",
"else",
":",
"return",
"HttpResponseBadRequest",
"(",
"'Autonomous apps may not be linked to individual carenets: they always access the entire record'",
")",
"return",
"DONE"
] |
https://github.com/chb/indivo_server/blob/9826c67ab17d7fc0df935db327344fb0c7d237e5/indivo/views/shares/shares_pha.py#L25-L54
|
|
alan-turing-institute/sktime
|
79cc513346b1257a6f3fa8e4ed855b5a2a7de716
|
sktime/benchmarking/metrics.py
|
python
|
AggregateMetric._compute_jackknife_stderr
|
(x)
|
return np.sqrt(n_instances - 1) * np.std(x)
|
Compute standard error of jacknife samples.
References
----------
.. [1] Efron and Stein, (1981), "The jackknife estimate of variance.
|
Compute standard error of jacknife samples.
|
[
"Compute",
"standard",
"error",
"of",
"jacknife",
"samples",
"."
] |
def _compute_jackknife_stderr(x):
"""Compute standard error of jacknife samples.
References
----------
.. [1] Efron and Stein, (1981), "The jackknife estimate of variance.
"""
n_instances = x.shape[0]
# np.sqrt((((n - 1) / n) * np.sum((x - x.mean()) ** 2)))
return np.sqrt(n_instances - 1) * np.std(x)
|
[
"def",
"_compute_jackknife_stderr",
"(",
"x",
")",
":",
"n_instances",
"=",
"x",
".",
"shape",
"[",
"0",
"]",
"# np.sqrt((((n - 1) / n) * np.sum((x - x.mean()) ** 2)))",
"return",
"np",
".",
"sqrt",
"(",
"n_instances",
"-",
"1",
")",
"*",
"np",
".",
"std",
"(",
"x",
")"
] |
https://github.com/alan-turing-institute/sktime/blob/79cc513346b1257a6f3fa8e4ed855b5a2a7de716/sktime/benchmarking/metrics.py#L109-L118
|
|
opherlieber/rltime
|
504c8405fed93aaa3cfe2ebcee7dda064bc5be14
|
rltime/env_wrappers/common.py
|
python
|
make_env_creator
|
(env_type, wrappers=[], imports=[],
max_episode_steps=None, **kwargs)
|
return create
|
Returns a function for creating a given ENV
Args:
env_type: The ENV to use. If it's a string it creates a registered GYM
env, otherwise it should be a python callable used to create the
ENV (e.g. a python function or class)
wrappers: A list of wrappers to apply to the ENV. Each wrapper should
have the format {'type' : type, 'args' : {}} where 'type' is a
callable (e.g. function or class) and args are optional arguments
to the callable (In addition to the env being wrapped)
imports: Python modules to import before creating the ENV (e.g.
'gym_ple', 'retro'), for example these modules may register the ENV
with gym
max_episode_steps: If set configures/overrides the max steps for each
episode
kwargs: Additional args which will be passed to the env creation itself
(Valid only if env_type is a callable and not a string)
Returns: A function which creates a new instance of the ENV
|
Returns a function for creating a given ENV
|
[
"Returns",
"a",
"function",
"for",
"creating",
"a",
"given",
"ENV"
] |
def make_env_creator(env_type, wrappers=[], imports=[],
max_episode_steps=None, **kwargs):
"""Returns a function for creating a given ENV
Args:
env_type: The ENV to use. If it's a string it creates a registered GYM
env, otherwise it should be a python callable used to create the
ENV (e.g. a python function or class)
wrappers: A list of wrappers to apply to the ENV. Each wrapper should
have the format {'type' : type, 'args' : {}} where 'type' is a
callable (e.g. function or class) and args are optional arguments
to the callable (In addition to the env being wrapped)
imports: Python modules to import before creating the ENV (e.g.
'gym_ple', 'retro'), for example these modules may register the ENV
with gym
max_episode_steps: If set configures/overrides the max steps for each
episode
kwargs: Additional args which will be passed to the env creation itself
(Valid only if env_type is a callable and not a string)
Returns: A function which creates a new instance of the ENV
"""
def create():
for import_name in imports:
importlib.import_module(import_name)
if isinstance(env_type, str):
assert(not kwargs), "ENV kwargs not supported for gym envs"
env = gym.make(env_type)
elif callable(env_type):
env = env_type(**kwargs)
else:
raise ValueError(
"make_env_creator() expected either a string or "
f"callable for 'env_type', got {type(env_type)}")
# Limit the max steps per episode if requested
if max_episode_steps is not None:
if hasattr(env, "_max_episode_steps"):
# Use the '_max_episode_steps' if available from gym. This is
# to allow increasing the limit for example in cartpole.
# (The TimeLimit option can only decrease the limit)
env._max_episode_steps = max_episode_steps
else:
env = TimeLimit(env, max_episode_steps)
# Always begin with EpisodeTracker so that the training gets the real
# rewards/dones before any additional wrappers process them
env = EpisodeTracker(env)
# Apply all requested wrappers
for wrapper in wrappers:
wrapper_type = wrapper.get("type")
wrapper_args = wrapper.get("args", {})
env = wrapper_type(env, **wrapper_args)
return env
return create
|
[
"def",
"make_env_creator",
"(",
"env_type",
",",
"wrappers",
"=",
"[",
"]",
",",
"imports",
"=",
"[",
"]",
",",
"max_episode_steps",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"create",
"(",
")",
":",
"for",
"import_name",
"in",
"imports",
":",
"importlib",
".",
"import_module",
"(",
"import_name",
")",
"if",
"isinstance",
"(",
"env_type",
",",
"str",
")",
":",
"assert",
"(",
"not",
"kwargs",
")",
",",
"\"ENV kwargs not supported for gym envs\"",
"env",
"=",
"gym",
".",
"make",
"(",
"env_type",
")",
"elif",
"callable",
"(",
"env_type",
")",
":",
"env",
"=",
"env_type",
"(",
"*",
"*",
"kwargs",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"make_env_creator() expected either a string or \"",
"f\"callable for 'env_type', got {type(env_type)}\"",
")",
"# Limit the max steps per episode if requested",
"if",
"max_episode_steps",
"is",
"not",
"None",
":",
"if",
"hasattr",
"(",
"env",
",",
"\"_max_episode_steps\"",
")",
":",
"# Use the '_max_episode_steps' if available from gym. This is",
"# to allow increasing the limit for example in cartpole.",
"# (The TimeLimit option can only decrease the limit)",
"env",
".",
"_max_episode_steps",
"=",
"max_episode_steps",
"else",
":",
"env",
"=",
"TimeLimit",
"(",
"env",
",",
"max_episode_steps",
")",
"# Always begin with EpisodeTracker so that the training gets the real",
"# rewards/dones before any additional wrappers process them",
"env",
"=",
"EpisodeTracker",
"(",
"env",
")",
"# Apply all requested wrappers",
"for",
"wrapper",
"in",
"wrappers",
":",
"wrapper_type",
"=",
"wrapper",
".",
"get",
"(",
"\"type\"",
")",
"wrapper_args",
"=",
"wrapper",
".",
"get",
"(",
"\"args\"",
",",
"{",
"}",
")",
"env",
"=",
"wrapper_type",
"(",
"env",
",",
"*",
"*",
"wrapper_args",
")",
"return",
"env",
"return",
"create"
] |
https://github.com/opherlieber/rltime/blob/504c8405fed93aaa3cfe2ebcee7dda064bc5be14/rltime/env_wrappers/common.py#L419-L474
|
|
CuriousAI/mean-teacher
|
546348ff863c998c26be4339021425df973b4a36
|
tensorflow/experiments/run_context.py
|
python
|
TrainLog.save
|
(self)
|
[] |
def save(self):
df = self._as_dataframe()
df.to_msgpack(self.log_file_path, compress='zlib')
|
[
"def",
"save",
"(",
"self",
")",
":",
"df",
"=",
"self",
".",
"_as_dataframe",
"(",
")",
"df",
".",
"to_msgpack",
"(",
"self",
".",
"log_file_path",
",",
"compress",
"=",
"'zlib'",
")"
] |
https://github.com/CuriousAI/mean-teacher/blob/546348ff863c998c26be4339021425df973b4a36/tensorflow/experiments/run_context.py#L36-L38
|
||||
RexYing/diffpool
|
8dfb97cf60c2376ac804761837b9966f1d302acb
|
encoders.py
|
python
|
GcnEncoderGraph.loss
|
(self, pred, label, type='softmax')
|
[] |
def loss(self, pred, label, type='softmax'):
# softmax + CE
if type == 'softmax':
return F.cross_entropy(pred, label, reduction='mean')
elif type == 'margin':
batch_size = pred.size()[0]
label_onehot = torch.zeros(batch_size, self.label_dim).long().cuda()
label_onehot.scatter_(1, label.view(-1,1), 1)
return torch.nn.MultiLabelMarginLoss()(pred, label_onehot)
|
[
"def",
"loss",
"(",
"self",
",",
"pred",
",",
"label",
",",
"type",
"=",
"'softmax'",
")",
":",
"# softmax + CE",
"if",
"type",
"==",
"'softmax'",
":",
"return",
"F",
".",
"cross_entropy",
"(",
"pred",
",",
"label",
",",
"reduction",
"=",
"'mean'",
")",
"elif",
"type",
"==",
"'margin'",
":",
"batch_size",
"=",
"pred",
".",
"size",
"(",
")",
"[",
"0",
"]",
"label_onehot",
"=",
"torch",
".",
"zeros",
"(",
"batch_size",
",",
"self",
".",
"label_dim",
")",
".",
"long",
"(",
")",
".",
"cuda",
"(",
")",
"label_onehot",
".",
"scatter_",
"(",
"1",
",",
"label",
".",
"view",
"(",
"-",
"1",
",",
"1",
")",
",",
"1",
")",
"return",
"torch",
".",
"nn",
".",
"MultiLabelMarginLoss",
"(",
")",
"(",
"pred",
",",
"label_onehot",
")"
] |
https://github.com/RexYing/diffpool/blob/8dfb97cf60c2376ac804761837b9966f1d302acb/encoders.py#L190-L198
|
||||
daniilidis-group/neural_renderer
|
b2a1e6ce16a54f94f26f86fe1dc3814637e15251
|
neural_renderer/rasterize.py
|
python
|
rasterize
|
(
faces,
textures,
image_size=DEFAULT_IMAGE_SIZE,
anti_aliasing=DEFAULT_ANTI_ALIASING,
near=DEFAULT_NEAR,
far=DEFAULT_FAR,
eps=DEFAULT_EPS,
background_color=DEFAULT_BACKGROUND_COLOR,
)
|
return rasterize_rgbad(
faces, textures, image_size, anti_aliasing, near, far, eps, background_color, True, False, False)['rgb']
|
Generate RGB images from faces and textures.
Args:
faces: see `rasterize_rgbad`.
textures: see `rasterize_rgbad`.
image_size: see `rasterize_rgbad`.
anti_aliasing: see `rasterize_rgbad`.
near: see `rasterize_rgbad`.
far: see `rasterize_rgbad`.
eps: see `rasterize_rgbad`.
background_color: see `rasterize_rgbad`.
Returns:
~torch.Tensor: RGB images. The shape is [batch size, 3, image_size, image_size].
|
Generate RGB images from faces and textures.
|
[
"Generate",
"RGB",
"images",
"from",
"faces",
"and",
"textures",
"."
] |
def rasterize(
faces,
textures,
image_size=DEFAULT_IMAGE_SIZE,
anti_aliasing=DEFAULT_ANTI_ALIASING,
near=DEFAULT_NEAR,
far=DEFAULT_FAR,
eps=DEFAULT_EPS,
background_color=DEFAULT_BACKGROUND_COLOR,
):
"""
Generate RGB images from faces and textures.
Args:
faces: see `rasterize_rgbad`.
textures: see `rasterize_rgbad`.
image_size: see `rasterize_rgbad`.
anti_aliasing: see `rasterize_rgbad`.
near: see `rasterize_rgbad`.
far: see `rasterize_rgbad`.
eps: see `rasterize_rgbad`.
background_color: see `rasterize_rgbad`.
Returns:
~torch.Tensor: RGB images. The shape is [batch size, 3, image_size, image_size].
"""
return rasterize_rgbad(
faces, textures, image_size, anti_aliasing, near, far, eps, background_color, True, False, False)['rgb']
|
[
"def",
"rasterize",
"(",
"faces",
",",
"textures",
",",
"image_size",
"=",
"DEFAULT_IMAGE_SIZE",
",",
"anti_aliasing",
"=",
"DEFAULT_ANTI_ALIASING",
",",
"near",
"=",
"DEFAULT_NEAR",
",",
"far",
"=",
"DEFAULT_FAR",
",",
"eps",
"=",
"DEFAULT_EPS",
",",
"background_color",
"=",
"DEFAULT_BACKGROUND_COLOR",
",",
")",
":",
"return",
"rasterize_rgbad",
"(",
"faces",
",",
"textures",
",",
"image_size",
",",
"anti_aliasing",
",",
"near",
",",
"far",
",",
"eps",
",",
"background_color",
",",
"True",
",",
"False",
",",
"False",
")",
"[",
"'rgb'",
"]"
] |
https://github.com/daniilidis-group/neural_renderer/blob/b2a1e6ce16a54f94f26f86fe1dc3814637e15251/neural_renderer/rasterize.py#L337-L365
|
|
dropbox/pygerduty
|
ef0f6c64737d38bbd1cb709d434595b1e2892d72
|
pygerduty/__init__.py
|
python
|
PagerDuty.trigger_incident
|
(self, service_key, description,
incident_key=None, details=None,
client=None, client_url=None, contexts=None)
|
return self.create_event(service_key, description, "trigger",
details, incident_key,
client=client, client_url=client_url, contexts=contexts)
|
Report a new or ongoing problem. When PagerDuty receives a trigger,
it will either open a new incident, or add a new log entry to an
existing incident.
|
Report a new or ongoing problem. When PagerDuty receives a trigger,
it will either open a new incident, or add a new log entry to an
existing incident.
|
[
"Report",
"a",
"new",
"or",
"ongoing",
"problem",
".",
"When",
"PagerDuty",
"receives",
"a",
"trigger",
"it",
"will",
"either",
"open",
"a",
"new",
"incident",
"or",
"add",
"a",
"new",
"log",
"entry",
"to",
"an",
"existing",
"incident",
"."
] |
def trigger_incident(self, service_key, description,
incident_key=None, details=None,
client=None, client_url=None, contexts=None):
""" Report a new or ongoing problem. When PagerDuty receives a trigger,
it will either open a new incident, or add a new log entry to an
existing incident.
"""
return self.create_event(service_key, description, "trigger",
details, incident_key,
client=client, client_url=client_url, contexts=contexts)
|
[
"def",
"trigger_incident",
"(",
"self",
",",
"service_key",
",",
"description",
",",
"incident_key",
"=",
"None",
",",
"details",
"=",
"None",
",",
"client",
"=",
"None",
",",
"client_url",
"=",
"None",
",",
"contexts",
"=",
"None",
")",
":",
"return",
"self",
".",
"create_event",
"(",
"service_key",
",",
"description",
",",
"\"trigger\"",
",",
"details",
",",
"incident_key",
",",
"client",
"=",
"client",
",",
"client_url",
"=",
"client_url",
",",
"contexts",
"=",
"contexts",
")"
] |
https://github.com/dropbox/pygerduty/blob/ef0f6c64737d38bbd1cb709d434595b1e2892d72/pygerduty/__init__.py#L583-L593
|
|
llSourcell/AI_Artist
|
3038c06c2e389b9c919c881c9a169efe2fd7810e
|
lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/sjisprober.py
|
python
|
SJISProber.reset
|
(self)
|
[] |
def reset(self):
MultiByteCharSetProber.reset(self)
self._mContextAnalyzer.reset()
|
[
"def",
"reset",
"(",
"self",
")",
":",
"MultiByteCharSetProber",
".",
"reset",
"(",
"self",
")",
"self",
".",
"_mContextAnalyzer",
".",
"reset",
"(",
")"
] |
https://github.com/llSourcell/AI_Artist/blob/3038c06c2e389b9c919c881c9a169efe2fd7810e/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/sjisprober.py#L45-L47
|
||||
osmr/imgclsmob
|
f2993d3ce73a2f7ddba05da3891defb08547d504
|
pytorch/pytorchcv/models/drn.py
|
python
|
drnd38
|
(**kwargs)
|
return get_drn(blocks=38, simplified=True, model_name="drnd38", **kwargs)
|
DRN-D-38 model from 'Dilated Residual Networks,' https://arxiv.org/abs/1705.09914.
Parameters:
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
|
DRN-D-38 model from 'Dilated Residual Networks,' https://arxiv.org/abs/1705.09914.
|
[
"DRN",
"-",
"D",
"-",
"38",
"model",
"from",
"Dilated",
"Residual",
"Networks",
"https",
":",
"//",
"arxiv",
".",
"org",
"/",
"abs",
"/",
"1705",
".",
"09914",
"."
] |
def drnd38(**kwargs):
"""
DRN-D-38 model from 'Dilated Residual Networks,' https://arxiv.org/abs/1705.09914.
Parameters:
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.torch/models'
Location for keeping the model parameters.
"""
return get_drn(blocks=38, simplified=True, model_name="drnd38", **kwargs)
|
[
"def",
"drnd38",
"(",
"*",
"*",
"kwargs",
")",
":",
"return",
"get_drn",
"(",
"blocks",
"=",
"38",
",",
"simplified",
"=",
"True",
",",
"model_name",
"=",
"\"drnd38\"",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/osmr/imgclsmob/blob/f2993d3ce73a2f7ddba05da3891defb08547d504/pytorch/pytorchcv/models/drn.py#L544-L555
|
|
ztosec/hunter
|
4ee5cca8dc5fc5d7e631e935517bd0f493c30a37
|
HunterCelery/parser/base_traffic_parser.py
|
python
|
BaseTrafficParser._get_json_parameter
|
(str)
|
return result
|
str1 = '{"name":{"pass": {"bb": 12222, "aa": {"hello": "xxx"}}}, "hello": "ssss"}'
str2 = ```
{"video":{"id":"29BA6ACE7A9427489C33DC5901307461","title":"体验课01","desp":"","tags":" ","duration":503,"category":"07AD1E11DBE6FDFC","image":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0.jpg","imageindex":0,"image-alternate":[{"index":0,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/0.jpg"},{"index":1,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/1.jpg"},{"index":2,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/2.jpg"},{"index":3,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/3.jpg"}]}}
```
str3 = '{"name":{"pass": [{"bb":"xxx", "aaa": "bb"}, {"bb":"xxx34444444", "aaa": "bb"}]}, "hello": "ssss"}'
str = '{"name":[{"bb":"xxx"}]}'
str4 = '{"name":"chenming","whoamo":"11123333"}'
递归解决json解析问题,解析多重json
:param str:
:return:
|
str1 = '{"name":{"pass": {"bb": 12222, "aa": {"hello": "xxx"}}}, "hello": "ssss"}'
str2 = ```
{"video":{"id":"29BA6ACE7A9427489C33DC5901307461","title":"体验课01","desp":"","tags":" ","duration":503,"category":"07AD1E11DBE6FDFC","image":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0.jpg","imageindex":0,"image-alternate":[{"index":0,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/0.jpg"},{"index":1,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/1.jpg"},{"index":2,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/2.jpg"},{"index":3,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/3.jpg"}]}}
```
str3 = '{"name":{"pass": [{"bb":"xxx", "aaa": "bb"}, {"bb":"xxx34444444", "aaa": "bb"}]}, "hello": "ssss"}'
str = '{"name":[{"bb":"xxx"}]}'
str4 = '{"name":"chenming","whoamo":"11123333"}'
递归解决json解析问题,解析多重json
:param str:
:return:
|
[
"str1",
"=",
"{",
"name",
":",
"{",
"pass",
":",
"{",
"bb",
":",
"12222",
"aa",
":",
"{",
"hello",
":",
"xxx",
"}}}",
"hello",
":",
"ssss",
"}",
"str2",
"=",
"{",
"video",
":",
"{",
"id",
":",
"29BA6ACE7A9427489C33DC5901307461",
"title",
":",
"体验课01",
"desp",
":",
"tags",
":",
"duration",
":",
"503",
"category",
":",
"07AD1E11DBE6FDFC",
"image",
":",
"http",
":",
"//",
"2",
".",
"img",
".",
"bokecc",
".",
"com",
"/",
"comimage",
"/",
"0DD1F081022C163E",
"/",
"2016",
"-",
"03",
"-",
"09",
"/",
"29BA6ACE7A9427489C33DC5901307461",
"-",
"0",
".",
"jpg",
"imageindex",
":",
"0",
"image",
"-",
"alternate",
":",
"[",
"{",
"index",
":",
"0",
"url",
":",
"http",
":",
"//",
"2",
".",
"img",
".",
"bokecc",
".",
"com",
"/",
"comimage",
"/",
"0DD1F081022C163E",
"/",
"2016",
"-",
"03",
"-",
"09",
"/",
"29BA6ACE7A9427489C33DC5901307461",
"-",
"0",
"/",
"0",
".",
"jpg",
"}",
"{",
"index",
":",
"1",
"url",
":",
"http",
":",
"//",
"2",
".",
"img",
".",
"bokecc",
".",
"com",
"/",
"comimage",
"/",
"0DD1F081022C163E",
"/",
"2016",
"-",
"03",
"-",
"09",
"/",
"29BA6ACE7A9427489C33DC5901307461",
"-",
"0",
"/",
"1",
".",
"jpg",
"}",
"{",
"index",
":",
"2",
"url",
":",
"http",
":",
"//",
"2",
".",
"img",
".",
"bokecc",
".",
"com",
"/",
"comimage",
"/",
"0DD1F081022C163E",
"/",
"2016",
"-",
"03",
"-",
"09",
"/",
"29BA6ACE7A9427489C33DC5901307461",
"-",
"0",
"/",
"2",
".",
"jpg",
"}",
"{",
"index",
":",
"3",
"url",
":",
"http",
":",
"//",
"2",
".",
"img",
".",
"bokecc",
".",
"com",
"/",
"comimage",
"/",
"0DD1F081022C163E",
"/",
"2016",
"-",
"03",
"-",
"09",
"/",
"29BA6ACE7A9427489C33DC5901307461",
"-",
"0",
"/",
"3",
".",
"jpg",
"}",
"]",
"}}",
"str3",
"=",
"{",
"name",
":",
"{",
"pass",
":",
"[",
"{",
"bb",
":",
"xxx",
"aaa",
":",
"bb",
"}",
"{",
"bb",
":",
"xxx34444444",
"aaa",
":",
"bb",
"}",
"]",
"}",
"hello",
":",
"ssss",
"}",
"str",
"=",
"{",
"name",
":",
"[",
"{",
"bb",
":",
"xxx",
"}",
"]",
"}",
"str4",
"=",
"{",
"name",
":",
"chenming",
"whoamo",
":",
"11123333",
"}",
"递归解决json解析问题,解析多重json",
":",
"param",
"str",
":",
":",
"return",
":"
] |
def _get_json_parameter(str):
"""
str1 = '{"name":{"pass": {"bb": 12222, "aa": {"hello": "xxx"}}}, "hello": "ssss"}'
str2 = ```
{"video":{"id":"29BA6ACE7A9427489C33DC5901307461","title":"体验课01","desp":"","tags":" ","duration":503,"category":"07AD1E11DBE6FDFC","image":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0.jpg","imageindex":0,"image-alternate":[{"index":0,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/0.jpg"},{"index":1,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/1.jpg"},{"index":2,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/2.jpg"},{"index":3,"url":"http://2.img.bokecc.com/comimage/0DD1F081022C163E/2016-03-09/29BA6ACE7A9427489C33DC5901307461-0/3.jpg"}]}}
```
str3 = '{"name":{"pass": [{"bb":"xxx", "aaa": "bb"}, {"bb":"xxx34444444", "aaa": "bb"}]}, "hello": "ssss"}'
str = '{"name":[{"bb":"xxx"}]}'
str4 = '{"name":"chenming","whoamo":"11123333"}'
递归解决json解析问题,解析多重json
:param str:
:return:
"""
result = {}
temp_jsons = BaseTrafficParser.loads(str)
if temp_jsons is not None:
if isinstance(temp_jsons, list):
temp_result = dict()
temp_result_list = list()
for temp_json in temp_jsons:
BaseTrafficParser.set_type(temp_result, temp_json)
temp_result_list.append(temp_result)
return temp_result_list
else:
BaseTrafficParser._set_type(result, temp_jsons)
return result
return result
|
[
"def",
"_get_json_parameter",
"(",
"str",
")",
":",
"result",
"=",
"{",
"}",
"temp_jsons",
"=",
"BaseTrafficParser",
".",
"loads",
"(",
"str",
")",
"if",
"temp_jsons",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"temp_jsons",
",",
"list",
")",
":",
"temp_result",
"=",
"dict",
"(",
")",
"temp_result_list",
"=",
"list",
"(",
")",
"for",
"temp_json",
"in",
"temp_jsons",
":",
"BaseTrafficParser",
".",
"set_type",
"(",
"temp_result",
",",
"temp_json",
")",
"temp_result_list",
".",
"append",
"(",
"temp_result",
")",
"return",
"temp_result_list",
"else",
":",
"BaseTrafficParser",
".",
"_set_type",
"(",
"result",
",",
"temp_jsons",
")",
"return",
"result",
"return",
"result"
] |
https://github.com/ztosec/hunter/blob/4ee5cca8dc5fc5d7e631e935517bd0f493c30a37/HunterCelery/parser/base_traffic_parser.py#L253-L279
|
|
volatilityfoundation/volatility3
|
168b0d0b053ab97a7cb096ef2048795cc54d885f
|
volatility3/framework/layers/registry.py
|
python
|
RegistryHive.visit_nodes
|
(self,
visitor: Callable[[objects.StructType], None],
node: Optional[objects.StructType] = None)
|
Applies a callable (visitor) to all nodes within the registry tree
from a given node.
|
Applies a callable (visitor) to all nodes within the registry tree
from a given node.
|
[
"Applies",
"a",
"callable",
"(",
"visitor",
")",
"to",
"all",
"nodes",
"within",
"the",
"registry",
"tree",
"from",
"a",
"given",
"node",
"."
] |
def visit_nodes(self,
visitor: Callable[[objects.StructType], None],
node: Optional[objects.StructType] = None) -> None:
"""Applies a callable (visitor) to all nodes within the registry tree
from a given node."""
if not node:
node = self.get_node(self.root_cell_offset)
visitor(node)
for node in node.get_subkeys():
self.visit_nodes(visitor, node)
|
[
"def",
"visit_nodes",
"(",
"self",
",",
"visitor",
":",
"Callable",
"[",
"[",
"objects",
".",
"StructType",
"]",
",",
"None",
"]",
",",
"node",
":",
"Optional",
"[",
"objects",
".",
"StructType",
"]",
"=",
"None",
")",
"->",
"None",
":",
"if",
"not",
"node",
":",
"node",
"=",
"self",
".",
"get_node",
"(",
"self",
".",
"root_cell_offset",
")",
"visitor",
"(",
"node",
")",
"for",
"node",
"in",
"node",
".",
"get_subkeys",
"(",
")",
":",
"self",
".",
"visit_nodes",
"(",
"visitor",
",",
"node",
")"
] |
https://github.com/volatilityfoundation/volatility3/blob/168b0d0b053ab97a7cb096ef2048795cc54d885f/volatility3/framework/layers/registry.py#L162-L171
|
||
FanfouAPI/FanFouAPIDoc
|
ba7c391323330b2ba70b4db79ef0452dca4e3b1a
|
django-fanfou/contrib/fanfouapi/oauth.py
|
python
|
_utf8_str
|
(s)
|
Convert unicode to utf-8.
|
Convert unicode to utf-8.
|
[
"Convert",
"unicode",
"to",
"utf",
"-",
"8",
"."
] |
def _utf8_str(s):
"""Convert unicode to utf-8."""
if isinstance(s, unicode):
return s.encode("utf-8")
else:
return str(s)
|
[
"def",
"_utf8_str",
"(",
"s",
")",
":",
"if",
"isinstance",
"(",
"s",
",",
"unicode",
")",
":",
"return",
"s",
".",
"encode",
"(",
"\"utf-8\"",
")",
"else",
":",
"return",
"str",
"(",
"s",
")"
] |
https://github.com/FanfouAPI/FanFouAPIDoc/blob/ba7c391323330b2ba70b4db79ef0452dca4e3b1a/django-fanfou/contrib/fanfouapi/oauth.py#L52-L57
|
||
naftaliharris/tauthon
|
5587ceec329b75f7caf6d65a036db61ac1bae214
|
Lib/lib2to3/fixer_base.py
|
python
|
BaseFix.set_filename
|
(self, filename)
|
Set the filename, and a logger derived from it.
The main refactoring tool should call this.
|
Set the filename, and a logger derived from it.
|
[
"Set",
"the",
"filename",
"and",
"a",
"logger",
"derived",
"from",
"it",
"."
] |
def set_filename(self, filename):
"""Set the filename, and a logger derived from it.
The main refactoring tool should call this.
"""
self.filename = filename
|
[
"def",
"set_filename",
"(",
"self",
",",
"filename",
")",
":",
"self",
".",
"filename",
"=",
"filename"
] |
https://github.com/naftaliharris/tauthon/blob/5587ceec329b75f7caf6d65a036db61ac1bae214/Lib/lib2to3/fixer_base.py#L71-L76
|
||
pyparallel/pyparallel
|
11e8c6072d48c8f13641925d17b147bf36ee0ba3
|
Lib/site-packages/pip-7.1.2-py3.3.egg/pip/_vendor/requests/packages/urllib3/filepost.py
|
python
|
encode_multipart_formdata
|
(fields, boundary=None)
|
return body.getvalue(), content_type
|
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
:param boundary:
If not specified, then a random boundary will be generated using
:func:`mimetools.choose_boundary`.
|
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
|
[
"Encode",
"a",
"dictionary",
"of",
"fields",
"using",
"the",
"multipart",
"/",
"form",
"-",
"data",
"MIME",
"format",
"."
] |
def encode_multipart_formdata(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
:param boundary:
If not specified, then a random boundary will be generated using
:func:`mimetools.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for field in iter_field_objects(fields):
body.write(b('--%s\r\n' % (boundary)))
writer(body).write(field.render_headers())
data = field.data
if isinstance(data, int):
data = str(data) # Backwards compatibility
if isinstance(data, six.text_type):
writer(body).write(data)
else:
body.write(data)
body.write(b'\r\n')
body.write(b('--%s--\r\n' % (boundary)))
content_type = str('multipart/form-data; boundary=%s' % boundary)
return body.getvalue(), content_type
|
[
"def",
"encode_multipart_formdata",
"(",
"fields",
",",
"boundary",
"=",
"None",
")",
":",
"body",
"=",
"BytesIO",
"(",
")",
"if",
"boundary",
"is",
"None",
":",
"boundary",
"=",
"choose_boundary",
"(",
")",
"for",
"field",
"in",
"iter_field_objects",
"(",
"fields",
")",
":",
"body",
".",
"write",
"(",
"b",
"(",
"'--%s\\r\\n'",
"%",
"(",
"boundary",
")",
")",
")",
"writer",
"(",
"body",
")",
".",
"write",
"(",
"field",
".",
"render_headers",
"(",
")",
")",
"data",
"=",
"field",
".",
"data",
"if",
"isinstance",
"(",
"data",
",",
"int",
")",
":",
"data",
"=",
"str",
"(",
"data",
")",
"# Backwards compatibility",
"if",
"isinstance",
"(",
"data",
",",
"six",
".",
"text_type",
")",
":",
"writer",
"(",
"body",
")",
".",
"write",
"(",
"data",
")",
"else",
":",
"body",
".",
"write",
"(",
"data",
")",
"body",
".",
"write",
"(",
"b'\\r\\n'",
")",
"body",
".",
"write",
"(",
"b",
"(",
"'--%s--\\r\\n'",
"%",
"(",
"boundary",
")",
")",
")",
"content_type",
"=",
"str",
"(",
"'multipart/form-data; boundary=%s'",
"%",
"boundary",
")",
"return",
"body",
".",
"getvalue",
"(",
")",
",",
"content_type"
] |
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/site-packages/pip-7.1.2-py3.3.egg/pip/_vendor/requests/packages/urllib3/filepost.py#L58-L93
|
|
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/sympy/core/numbers.py
|
python
|
AlgebraicNumber.to_algebraic_integer
|
(self)
|
return AlgebraicNumber((minpoly, root), self.coeffs())
|
Convert ``self`` to an algebraic integer.
|
Convert ``self`` to an algebraic integer.
|
[
"Convert",
"self",
"to",
"an",
"algebraic",
"integer",
"."
] |
def to_algebraic_integer(self):
"""Convert ``self`` to an algebraic integer. """
from sympy import Poly
f = self.minpoly
if f.LC() == 1:
return self
coeff = f.LC()**(f.degree() - 1)
poly = f.compose(Poly(f.gen/f.LC()))
minpoly = poly*coeff
root = f.LC()*self.root
return AlgebraicNumber((minpoly, root), self.coeffs())
|
[
"def",
"to_algebraic_integer",
"(",
"self",
")",
":",
"from",
"sympy",
"import",
"Poly",
"f",
"=",
"self",
".",
"minpoly",
"if",
"f",
".",
"LC",
"(",
")",
"==",
"1",
":",
"return",
"self",
"coeff",
"=",
"f",
".",
"LC",
"(",
")",
"**",
"(",
"f",
".",
"degree",
"(",
")",
"-",
"1",
")",
"poly",
"=",
"f",
".",
"compose",
"(",
"Poly",
"(",
"f",
".",
"gen",
"/",
"f",
".",
"LC",
"(",
")",
")",
")",
"minpoly",
"=",
"poly",
"*",
"coeff",
"root",
"=",
"f",
".",
"LC",
"(",
")",
"*",
"self",
".",
"root",
"return",
"AlgebraicNumber",
"(",
"(",
"minpoly",
",",
"root",
")",
",",
"self",
".",
"coeffs",
"(",
")",
")"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/core/numbers.py#L2528-L2542
|
|
twilio/twilio-python
|
6e1e811ea57a1edfadd5161ace87397c563f6915
|
twilio/rest/chat/v2/service/binding.py
|
python
|
BindingInstance.credential_sid
|
(self)
|
return self._properties['credential_sid']
|
:returns: The SID of the Credential for the binding
:rtype: unicode
|
:returns: The SID of the Credential for the binding
:rtype: unicode
|
[
":",
"returns",
":",
"The",
"SID",
"of",
"the",
"Credential",
"for",
"the",
"binding",
":",
"rtype",
":",
"unicode"
] |
def credential_sid(self):
"""
:returns: The SID of the Credential for the binding
:rtype: unicode
"""
return self._properties['credential_sid']
|
[
"def",
"credential_sid",
"(",
"self",
")",
":",
"return",
"self",
".",
"_properties",
"[",
"'credential_sid'",
"]"
] |
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/chat/v2/service/binding.py#L369-L374
|
|
TheAlgorithms/Python
|
9af2eef9b3761bf51580dedfb6fa7136ca0c5c2c
|
other/dijkstra_bankers_algorithm.py
|
python
|
BankersAlgorithm.__pretty_data
|
(self)
|
Properly align display of the algorithm's solution
|
Properly align display of the algorithm's solution
|
[
"Properly",
"align",
"display",
"of",
"the",
"algorithm",
"s",
"solution"
] |
def __pretty_data(self):
"""
Properly align display of the algorithm's solution
"""
print(" " * 9 + "Allocated Resource Table")
for item in self.__allocated_resources_table:
print(
f"P{self.__allocated_resources_table.index(item) + 1}"
+ " ".join(f"{it:>8}" for it in item)
+ "\n"
)
print(" " * 9 + "System Resource Table")
for item in self.__maximum_claim_table:
print(
f"P{self.__maximum_claim_table.index(item) + 1}"
+ " ".join(f"{it:>8}" for it in item)
+ "\n"
)
print(
"Current Usage by Active Processes: "
+ " ".join(str(x) for x in self.__claim_vector)
)
print(
"Initial Available Resources: "
+ " ".join(str(x) for x in self.__available_resources())
)
time.sleep(1)
|
[
"def",
"__pretty_data",
"(",
"self",
")",
":",
"print",
"(",
"\" \"",
"*",
"9",
"+",
"\"Allocated Resource Table\"",
")",
"for",
"item",
"in",
"self",
".",
"__allocated_resources_table",
":",
"print",
"(",
"f\"P{self.__allocated_resources_table.index(item) + 1}\"",
"+",
"\" \"",
".",
"join",
"(",
"f\"{it:>8}\"",
"for",
"it",
"in",
"item",
")",
"+",
"\"\\n\"",
")",
"print",
"(",
"\" \"",
"*",
"9",
"+",
"\"System Resource Table\"",
")",
"for",
"item",
"in",
"self",
".",
"__maximum_claim_table",
":",
"print",
"(",
"f\"P{self.__maximum_claim_table.index(item) + 1}\"",
"+",
"\" \"",
".",
"join",
"(",
"f\"{it:>8}\"",
"for",
"it",
"in",
"item",
")",
"+",
"\"\\n\"",
")",
"print",
"(",
"\"Current Usage by Active Processes: \"",
"+",
"\" \"",
".",
"join",
"(",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"self",
".",
"__claim_vector",
")",
")",
"print",
"(",
"\"Initial Available Resources: \"",
"+",
"\" \"",
".",
"join",
"(",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"self",
".",
"__available_resources",
"(",
")",
")",
")",
"time",
".",
"sleep",
"(",
"1",
")"
] |
https://github.com/TheAlgorithms/Python/blob/9af2eef9b3761bf51580dedfb6fa7136ca0c5c2c/other/dijkstra_bankers_algorithm.py#L193-L219
|
||
pyansys/pymapdl
|
c07291fc062b359abf0e92b95a92d753a95ef3d7
|
ansys/mapdl/core/mapdl_grpc.py
|
python
|
MapdlGrpc.arinqr
|
(self, anmi, key, **kwargs)
|
return self.scalar_param(TMP_VAR)
|
Wrap the ``arinqr`` method to take advantage of the gRPC methods.
|
Wrap the ``arinqr`` method to take advantage of the gRPC methods.
|
[
"Wrap",
"the",
"arinqr",
"method",
"to",
"take",
"advantage",
"of",
"the",
"gRPC",
"methods",
"."
] |
def arinqr(self, anmi, key, **kwargs):
"""Wrap the ``arinqr`` method to take advantage of the gRPC methods."""
super().arinqr(anmi, key, pname=TMP_VAR, mute=True, **kwargs)
return self.scalar_param(TMP_VAR)
|
[
"def",
"arinqr",
"(",
"self",
",",
"anmi",
",",
"key",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
")",
".",
"arinqr",
"(",
"anmi",
",",
"key",
",",
"pname",
"=",
"TMP_VAR",
",",
"mute",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"scalar_param",
"(",
"TMP_VAR",
")"
] |
https://github.com/pyansys/pymapdl/blob/c07291fc062b359abf0e92b95a92d753a95ef3d7/ansys/mapdl/core/mapdl_grpc.py#L1913-L1916
|
|
mirumee/ariadne
|
1b8b7ef0ed65cde95a6bd9e25500584a38393b71
|
ariadne/contrib/federation/utils.py
|
python
|
add_typename_to_possible_return
|
(obj: Any, typename: str)
|
return None
|
[] |
def add_typename_to_possible_return(obj: Any, typename: str) -> Any:
if obj is not None:
if isinstance(obj, dict):
obj["__typename"] = typename
else:
setattr(obj, f"_{obj.__class__.__name__}__typename", typename)
return obj
return None
|
[
"def",
"add_typename_to_possible_return",
"(",
"obj",
":",
"Any",
",",
"typename",
":",
"str",
")",
"->",
"Any",
":",
"if",
"obj",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"obj",
"[",
"\"__typename\"",
"]",
"=",
"typename",
"else",
":",
"setattr",
"(",
"obj",
",",
"f\"_{obj.__class__.__name__}__typename\"",
",",
"typename",
")",
"return",
"obj",
"return",
"None"
] |
https://github.com/mirumee/ariadne/blob/1b8b7ef0ed65cde95a6bd9e25500584a38393b71/ariadne/contrib/federation/utils.py#L142-L149
|
|||
cloudera/hue
|
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
|
desktop/core/ext-py/thrift-0.13.0/src/transport/sslcompat.py
|
python
|
legacy_validate_callback
|
(cert, hostname)
|
legacy method to validate the peer's SSL certificate, and to check
the commonName of the certificate to ensure it matches the hostname we
used to make this connection. Does not support subjectAltName records
in certificates.
raises TTransportException if the certificate fails validation.
|
legacy method to validate the peer's SSL certificate, and to check
the commonName of the certificate to ensure it matches the hostname we
used to make this connection. Does not support subjectAltName records
in certificates.
|
[
"legacy",
"method",
"to",
"validate",
"the",
"peer",
"s",
"SSL",
"certificate",
"and",
"to",
"check",
"the",
"commonName",
"of",
"the",
"certificate",
"to",
"ensure",
"it",
"matches",
"the",
"hostname",
"we",
"used",
"to",
"make",
"this",
"connection",
".",
"Does",
"not",
"support",
"subjectAltName",
"records",
"in",
"certificates",
"."
] |
def legacy_validate_callback(cert, hostname):
"""legacy method to validate the peer's SSL certificate, and to check
the commonName of the certificate to ensure it matches the hostname we
used to make this connection. Does not support subjectAltName records
in certificates.
raises TTransportException if the certificate fails validation.
"""
if 'subject' not in cert:
raise TTransportException(
TTransportException.NOT_OPEN,
'No SSL certificate found from %s' % hostname)
fields = cert['subject']
for field in fields:
# ensure structure we get back is what we expect
if not isinstance(field, tuple):
continue
cert_pair = field[0]
if len(cert_pair) < 2:
continue
cert_key, cert_value = cert_pair[0:2]
if cert_key != 'commonName':
continue
certhost = cert_value
# this check should be performed by some sort of Access Manager
if certhost == hostname:
# success, cert commonName matches desired hostname
return
else:
raise TTransportException(
TTransportException.UNKNOWN,
'Hostname we connected to "%s" doesn\'t match certificate '
'provided commonName "%s"' % (hostname, certhost))
raise TTransportException(
TTransportException.UNKNOWN,
'Could not validate SSL certificate from host "%s". Cert=%s'
% (hostname, cert))
|
[
"def",
"legacy_validate_callback",
"(",
"cert",
",",
"hostname",
")",
":",
"if",
"'subject'",
"not",
"in",
"cert",
":",
"raise",
"TTransportException",
"(",
"TTransportException",
".",
"NOT_OPEN",
",",
"'No SSL certificate found from %s'",
"%",
"hostname",
")",
"fields",
"=",
"cert",
"[",
"'subject'",
"]",
"for",
"field",
"in",
"fields",
":",
"# ensure structure we get back is what we expect",
"if",
"not",
"isinstance",
"(",
"field",
",",
"tuple",
")",
":",
"continue",
"cert_pair",
"=",
"field",
"[",
"0",
"]",
"if",
"len",
"(",
"cert_pair",
")",
"<",
"2",
":",
"continue",
"cert_key",
",",
"cert_value",
"=",
"cert_pair",
"[",
"0",
":",
"2",
"]",
"if",
"cert_key",
"!=",
"'commonName'",
":",
"continue",
"certhost",
"=",
"cert_value",
"# this check should be performed by some sort of Access Manager",
"if",
"certhost",
"==",
"hostname",
":",
"# success, cert commonName matches desired hostname",
"return",
"else",
":",
"raise",
"TTransportException",
"(",
"TTransportException",
".",
"UNKNOWN",
",",
"'Hostname we connected to \"%s\" doesn\\'t match certificate '",
"'provided commonName \"%s\"'",
"%",
"(",
"hostname",
",",
"certhost",
")",
")",
"raise",
"TTransportException",
"(",
"TTransportException",
".",
"UNKNOWN",
",",
"'Could not validate SSL certificate from host \"%s\". Cert=%s'",
"%",
"(",
"hostname",
",",
"cert",
")",
")"
] |
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/thrift-0.13.0/src/transport/sslcompat.py#L28-L64
|
||
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/ntheory/modular.py
|
python
|
symmetric_residue
|
(a, m)
|
return a - m
|
Return the residual mod m such that it is within half of the modulus.
>>> from sympy.ntheory.modular import symmetric_residue
>>> symmetric_residue(1, 6)
1
>>> symmetric_residue(4, 6)
-2
|
Return the residual mod m such that it is within half of the modulus.
|
[
"Return",
"the",
"residual",
"mod",
"m",
"such",
"that",
"it",
"is",
"within",
"half",
"of",
"the",
"modulus",
"."
] |
def symmetric_residue(a, m):
"""Return the residual mod m such that it is within half of the modulus.
>>> from sympy.ntheory.modular import symmetric_residue
>>> symmetric_residue(1, 6)
1
>>> symmetric_residue(4, 6)
-2
"""
if a <= m // 2:
return a
return a - m
|
[
"def",
"symmetric_residue",
"(",
"a",
",",
"m",
")",
":",
"if",
"a",
"<=",
"m",
"//",
"2",
":",
"return",
"a",
"return",
"a",
"-",
"m"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/ntheory/modular.py#L11-L22
|
|
justdoit0823/pywxclient
|
9a61c4c0c26d6566e6121641ab37c35b176d8e20
|
pywxclient/core/client.py
|
python
|
SyncClient.get_batch_contact
|
(self, user_list)
|
return contact_res['ContactList']
|
Batch getting contact.
|
Batch getting contact.
|
[
"Batch",
"getting",
"contact",
"."
] |
def get_batch_contact(self, user_list):
"""Batch getting contact."""
contact_res = self._api_cls.mget_contact_list(self.session, user_list)
return contact_res['ContactList']
|
[
"def",
"get_batch_contact",
"(",
"self",
",",
"user_list",
")",
":",
"contact_res",
"=",
"self",
".",
"_api_cls",
".",
"mget_contact_list",
"(",
"self",
".",
"session",
",",
"user_list",
")",
"return",
"contact_res",
"[",
"'ContactList'",
"]"
] |
https://github.com/justdoit0823/pywxclient/blob/9a61c4c0c26d6566e6121641ab37c35b176d8e20/pywxclient/core/client.py#L181-L184
|
|
openvax/pyensembl
|
3fd9948ec6157db3061f74aa448fce1f1f914e98
|
pyensembl/genome.py
|
python
|
Genome.genes
|
(self, contig=None, strand=None)
|
return [self.gene_by_id(gene_id) for gene_id in gene_ids]
|
Returns all Gene objects in the database. Can be restricted to a
particular contig/chromosome and strand by the following arguments:
Parameters
----------
contig : str
Only return genes on the given contig.
strand : str
Only return genes on this strand.
|
Returns all Gene objects in the database. Can be restricted to a
particular contig/chromosome and strand by the following arguments:
|
[
"Returns",
"all",
"Gene",
"objects",
"in",
"the",
"database",
".",
"Can",
"be",
"restricted",
"to",
"a",
"particular",
"contig",
"/",
"chromosome",
"and",
"strand",
"by",
"the",
"following",
"arguments",
":"
] |
def genes(self, contig=None, strand=None):
"""
Returns all Gene objects in the database. Can be restricted to a
particular contig/chromosome and strand by the following arguments:
Parameters
----------
contig : str
Only return genes on the given contig.
strand : str
Only return genes on this strand.
"""
gene_ids = self.gene_ids(contig=contig, strand=strand)
return [self.gene_by_id(gene_id) for gene_id in gene_ids]
|
[
"def",
"genes",
"(",
"self",
",",
"contig",
"=",
"None",
",",
"strand",
"=",
"None",
")",
":",
"gene_ids",
"=",
"self",
".",
"gene_ids",
"(",
"contig",
"=",
"contig",
",",
"strand",
"=",
"strand",
")",
"return",
"[",
"self",
".",
"gene_by_id",
"(",
"gene_id",
")",
"for",
"gene_id",
"in",
"gene_ids",
"]"
] |
https://github.com/openvax/pyensembl/blob/3fd9948ec6157db3061f74aa448fce1f1f914e98/pyensembl/genome.py#L649-L663
|
|
urwid/urwid
|
e2423b5069f51d318ea1ac0f355a0efe5448f7eb
|
urwid/treetools.py
|
python
|
TreeNode.is_root
|
(self)
|
return self.get_depth() == 0
|
[] |
def is_root(self):
return self.get_depth() == 0
|
[
"def",
"is_root",
"(",
"self",
")",
":",
"return",
"self",
".",
"get_depth",
"(",
")",
"==",
"0"
] |
https://github.com/urwid/urwid/blob/e2423b5069f51d318ea1ac0f355a0efe5448f7eb/urwid/treetools.py#L259-L260
|
|||
dropbox/dropbox-sdk-python
|
015437429be224732990041164a21a0501235db1
|
dropbox/team_log.py
|
python
|
EventType.note_acl_invite_only
|
(cls, val)
|
return cls('note_acl_invite_only', val)
|
Create an instance of this class set to the ``note_acl_invite_only`` tag
with value ``val``.
:param NoteAclInviteOnlyType val:
:rtype: EventType
|
Create an instance of this class set to the ``note_acl_invite_only`` tag
with value ``val``.
|
[
"Create",
"an",
"instance",
"of",
"this",
"class",
"set",
"to",
"the",
"note_acl_invite_only",
"tag",
"with",
"value",
"val",
"."
] |
def note_acl_invite_only(cls, val):
"""
Create an instance of this class set to the ``note_acl_invite_only`` tag
with value ``val``.
:param NoteAclInviteOnlyType val:
:rtype: EventType
"""
return cls('note_acl_invite_only', val)
|
[
"def",
"note_acl_invite_only",
"(",
"cls",
",",
"val",
")",
":",
"return",
"cls",
"(",
"'note_acl_invite_only'",
",",
"val",
")"
] |
https://github.com/dropbox/dropbox-sdk-python/blob/015437429be224732990041164a21a0501235db1/dropbox/team_log.py#L25724-L25732
|
|
rgerum/pylustrator
|
b01825bc3de75ac127291647729fa7b0e6f8b821
|
pylustrator/QtShortCuts.py
|
python
|
QDragableColor.openDialog
|
(self)
|
open a color choosed dialog
|
open a color choosed dialog
|
[
"open",
"a",
"color",
"choosed",
"dialog"
] |
def openDialog(self):
""" open a color choosed dialog """
if self.color in self.maps:
dialog = ColorMapChoose(self.parent(), self.color)
colormap, selected = dialog.exec()
if selected is False:
return
self.setColor(colormap)
else:
# get new color from color picker
qcolor = QtGui.QColor(*np.array(mpl.colors.to_rgb(self.getColor())) * 255)
color = QtWidgets.QColorDialog.getColor(qcolor, self.parent())
# if a color is set, apply it
if color.isValid():
color = "#%02x%02x%02x" % color.getRgb()[:3]
self.setColor(color)
|
[
"def",
"openDialog",
"(",
"self",
")",
":",
"if",
"self",
".",
"color",
"in",
"self",
".",
"maps",
":",
"dialog",
"=",
"ColorMapChoose",
"(",
"self",
".",
"parent",
"(",
")",
",",
"self",
".",
"color",
")",
"colormap",
",",
"selected",
"=",
"dialog",
".",
"exec",
"(",
")",
"if",
"selected",
"is",
"False",
":",
"return",
"self",
".",
"setColor",
"(",
"colormap",
")",
"else",
":",
"# get new color from color picker",
"qcolor",
"=",
"QtGui",
".",
"QColor",
"(",
"*",
"np",
".",
"array",
"(",
"mpl",
".",
"colors",
".",
"to_rgb",
"(",
"self",
".",
"getColor",
"(",
")",
")",
")",
"*",
"255",
")",
"color",
"=",
"QtWidgets",
".",
"QColorDialog",
".",
"getColor",
"(",
"qcolor",
",",
"self",
".",
"parent",
"(",
")",
")",
"# if a color is set, apply it",
"if",
"color",
".",
"isValid",
"(",
")",
":",
"color",
"=",
"\"#%02x%02x%02x\"",
"%",
"color",
".",
"getRgb",
"(",
")",
"[",
":",
"3",
"]",
"self",
".",
"setColor",
"(",
"color",
")"
] |
https://github.com/rgerum/pylustrator/blob/b01825bc3de75ac127291647729fa7b0e6f8b821/pylustrator/QtShortCuts.py#L122-L137
|
||
apache/libcloud
|
90971e17bfd7b6bb97b2489986472c531cc8e140
|
libcloud/compute/drivers/cloudstack.py
|
python
|
CloudStackNodeDriver.ex_create_tags
|
(self, resource_ids, resource_type, tags)
|
return True
|
Create tags for a resource (Node/StorageVolume/etc).
A list of resource types can be found at http://goo.gl/6OKphH
:param resource_ids: Resource IDs to be tagged. The resource IDs must
all be associated with the resource_type.
For example, for virtual machines (UserVm) you
can only specify a list of virtual machine IDs.
:type resource_ids: ``list`` of resource IDs
:param resource_type: Resource type (eg: UserVm)
:type resource_type: ``str``
:param tags: A dictionary or other mapping of strings to strings,
associating tag names with tag values.
:type tags: ``dict``
:rtype: ``bool``
|
Create tags for a resource (Node/StorageVolume/etc).
A list of resource types can be found at http://goo.gl/6OKphH
|
[
"Create",
"tags",
"for",
"a",
"resource",
"(",
"Node",
"/",
"StorageVolume",
"/",
"etc",
")",
".",
"A",
"list",
"of",
"resource",
"types",
"can",
"be",
"found",
"at",
"http",
":",
"//",
"goo",
".",
"gl",
"/",
"6OKphH"
] |
def ex_create_tags(self, resource_ids, resource_type, tags):
"""
Create tags for a resource (Node/StorageVolume/etc).
A list of resource types can be found at http://goo.gl/6OKphH
:param resource_ids: Resource IDs to be tagged. The resource IDs must
all be associated with the resource_type.
For example, for virtual machines (UserVm) you
can only specify a list of virtual machine IDs.
:type resource_ids: ``list`` of resource IDs
:param resource_type: Resource type (eg: UserVm)
:type resource_type: ``str``
:param tags: A dictionary or other mapping of strings to strings,
associating tag names with tag values.
:type tags: ``dict``
:rtype: ``bool``
"""
params = {"resourcetype": resource_type, "resourceids": ",".join(resource_ids)}
for i, key in enumerate(tags):
params["tags[%d].key" % i] = key
params["tags[%d].value" % i] = tags[key]
self._async_request(command="createTags", params=params, method="GET")
return True
|
[
"def",
"ex_create_tags",
"(",
"self",
",",
"resource_ids",
",",
"resource_type",
",",
"tags",
")",
":",
"params",
"=",
"{",
"\"resourcetype\"",
":",
"resource_type",
",",
"\"resourceids\"",
":",
"\",\"",
".",
"join",
"(",
"resource_ids",
")",
"}",
"for",
"i",
",",
"key",
"in",
"enumerate",
"(",
"tags",
")",
":",
"params",
"[",
"\"tags[%d].key\"",
"%",
"i",
"]",
"=",
"key",
"params",
"[",
"\"tags[%d].value\"",
"%",
"i",
"]",
"=",
"tags",
"[",
"key",
"]",
"self",
".",
"_async_request",
"(",
"command",
"=",
"\"createTags\"",
",",
"params",
"=",
"params",
",",
"method",
"=",
"\"GET\"",
")",
"return",
"True"
] |
https://github.com/apache/libcloud/blob/90971e17bfd7b6bb97b2489986472c531cc8e140/libcloud/compute/drivers/cloudstack.py#L4082-L4109
|
|
AIChallenger/AI_Challenger_2017
|
52014e0defbbdd85bf94ab05d308300d5764022f
|
Baselines/caption_baseline/im2txt/im2txt/inference_wrapper.py
|
python
|
InferenceWrapper.__init__
|
(self)
|
[] |
def __init__(self):
super(InferenceWrapper, self).__init__()
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"super",
"(",
"InferenceWrapper",
",",
"self",
")",
".",
"__init__",
"(",
")"
] |
https://github.com/AIChallenger/AI_Challenger_2017/blob/52014e0defbbdd85bf94ab05d308300d5764022f/Baselines/caption_baseline/im2txt/im2txt/inference_wrapper.py#L31-L32
|
||||
robhagemans/pcbasic
|
c3a043b46af66623a801e18a38175be077251ada
|
pcbasic/basic/devices/disk.py
|
python
|
dos_is_legal_name
|
(dos_name)
|
return (
# enforce lengths
(len(trunk) <= 8 and len(ext) <= 3) and
# no leading or trailing spaces
(trunk == trunk.strip() and ext == ext.strip()) and
# enforce allowable characters
((set(trunk) | set(ext)) <= ALLOWABLE_CHARS)
)
|
Check if a (bytes) name is a legal DOS name.
|
Check if a (bytes) name is a legal DOS name.
|
[
"Check",
"if",
"a",
"(",
"bytes",
")",
"name",
"is",
"a",
"legal",
"DOS",
"name",
"."
] |
def dos_is_legal_name(dos_name):
"""Check if a (bytes) name is a legal DOS name."""
assert isinstance(dos_name, bytes), type(dos_name)
if dos_name in (b'.', b'..'):
return True
trunk, ext = dos_splitext(dos_name)
return (
# enforce lengths
(len(trunk) <= 8 and len(ext) <= 3) and
# no leading or trailing spaces
(trunk == trunk.strip() and ext == ext.strip()) and
# enforce allowable characters
((set(trunk) | set(ext)) <= ALLOWABLE_CHARS)
)
|
[
"def",
"dos_is_legal_name",
"(",
"dos_name",
")",
":",
"assert",
"isinstance",
"(",
"dos_name",
",",
"bytes",
")",
",",
"type",
"(",
"dos_name",
")",
"if",
"dos_name",
"in",
"(",
"b'.'",
",",
"b'..'",
")",
":",
"return",
"True",
"trunk",
",",
"ext",
"=",
"dos_splitext",
"(",
"dos_name",
")",
"return",
"(",
"# enforce lengths",
"(",
"len",
"(",
"trunk",
")",
"<=",
"8",
"and",
"len",
"(",
"ext",
")",
"<=",
"3",
")",
"and",
"# no leading or trailing spaces",
"(",
"trunk",
"==",
"trunk",
".",
"strip",
"(",
")",
"and",
"ext",
"==",
"ext",
".",
"strip",
"(",
")",
")",
"and",
"# enforce allowable characters",
"(",
"(",
"set",
"(",
"trunk",
")",
"|",
"set",
"(",
"ext",
")",
")",
"<=",
"ALLOWABLE_CHARS",
")",
")"
] |
https://github.com/robhagemans/pcbasic/blob/c3a043b46af66623a801e18a38175be077251ada/pcbasic/basic/devices/disk.py#L164-L177
|
|
sagemath/sage
|
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
|
src/sage/modular/modform_hecketriangle/series_constructor.py
|
python
|
MFSeriesConstructor.G_inv_ZZ
|
(self)
|
r"""
Return the rational Fourier expansion of ``G_inv``,
where the parameter ``d`` is replaced by ``1``.
.. NOTE::
The Fourier expansion of ``G_inv`` for ``d!=1``
is given by ``d*G_inv_ZZ(q/d)``.
EXAMPLES::
sage: from sage.modular.modform_hecketriangle.series_constructor import MFSeriesConstructor
sage: MFSeriesConstructor(group=4, prec=3).G_inv_ZZ()
q^-1 - 3/32 - 955/16384*q + O(q^2)
sage: MFSeriesConstructor(group=8, prec=3).G_inv_ZZ()
q^-1 - 15/128 - 15139/262144*q + O(q^2)
sage: MFSeriesConstructor(group=8, prec=3).G_inv_ZZ().parent()
Laurent Series Ring in q over Rational Field
sage: MFSeriesConstructor(group=infinity, prec=3).G_inv_ZZ()
q^-1 - 1/8 - 59/1024*q + O(q^2)
|
r"""
Return the rational Fourier expansion of ``G_inv``,
where the parameter ``d`` is replaced by ``1``.
|
[
"r",
"Return",
"the",
"rational",
"Fourier",
"expansion",
"of",
"G_inv",
"where",
"the",
"parameter",
"d",
"is",
"replaced",
"by",
"1",
"."
] |
def G_inv_ZZ(self):
r"""
Return the rational Fourier expansion of ``G_inv``,
where the parameter ``d`` is replaced by ``1``.
.. NOTE::
The Fourier expansion of ``G_inv`` for ``d!=1``
is given by ``d*G_inv_ZZ(q/d)``.
EXAMPLES::
sage: from sage.modular.modform_hecketriangle.series_constructor import MFSeriesConstructor
sage: MFSeriesConstructor(group=4, prec=3).G_inv_ZZ()
q^-1 - 3/32 - 955/16384*q + O(q^2)
sage: MFSeriesConstructor(group=8, prec=3).G_inv_ZZ()
q^-1 - 15/128 - 15139/262144*q + O(q^2)
sage: MFSeriesConstructor(group=8, prec=3).G_inv_ZZ().parent()
Laurent Series Ring in q over Rational Field
sage: MFSeriesConstructor(group=infinity, prec=3).G_inv_ZZ()
q^-1 - 1/8 - 59/1024*q + O(q^2)
"""
n = self.hecke_n()
# Note that G_inv is not a weakly holomorphic form (because of the behavior at -1)
if (n == infinity):
q = self._series_ring.gen()
temp_expr = (self.J_inv_ZZ()/self.f_inf_ZZ()*q**2).power_series()
return 1/q*self.f_i_ZZ()*(temp_expr.log()/2).exp()
elif (ZZ(2).divides(n)):
return self.f_i_ZZ()*(self.f_rho_ZZ()**(ZZ(n/ZZ(2))))/self.f_inf_ZZ()
else:
#return self._qseries_ring([])
raise ValueError("G_inv doesn't exist for n={}.".format(self.hecke_n()))
|
[
"def",
"G_inv_ZZ",
"(",
"self",
")",
":",
"n",
"=",
"self",
".",
"hecke_n",
"(",
")",
"# Note that G_inv is not a weakly holomorphic form (because of the behavior at -1)",
"if",
"(",
"n",
"==",
"infinity",
")",
":",
"q",
"=",
"self",
".",
"_series_ring",
".",
"gen",
"(",
")",
"temp_expr",
"=",
"(",
"self",
".",
"J_inv_ZZ",
"(",
")",
"/",
"self",
".",
"f_inf_ZZ",
"(",
")",
"*",
"q",
"**",
"2",
")",
".",
"power_series",
"(",
")",
"return",
"1",
"/",
"q",
"*",
"self",
".",
"f_i_ZZ",
"(",
")",
"*",
"(",
"temp_expr",
".",
"log",
"(",
")",
"/",
"2",
")",
".",
"exp",
"(",
")",
"elif",
"(",
"ZZ",
"(",
"2",
")",
".",
"divides",
"(",
"n",
")",
")",
":",
"return",
"self",
".",
"f_i_ZZ",
"(",
")",
"*",
"(",
"self",
".",
"f_rho_ZZ",
"(",
")",
"**",
"(",
"ZZ",
"(",
"n",
"/",
"ZZ",
"(",
"2",
")",
")",
")",
")",
"/",
"self",
".",
"f_inf_ZZ",
"(",
")",
"else",
":",
"#return self._qseries_ring([])",
"raise",
"ValueError",
"(",
"\"G_inv doesn't exist for n={}.\"",
".",
"format",
"(",
"self",
".",
"hecke_n",
"(",
")",
")",
")"
] |
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/modular/modform_hecketriangle/series_constructor.py#L352-L386
|
||
HuguesTHOMAS/KPConv-PyTorch
|
73e444d486cd6cb56122c3dd410e51c734064cfe
|
utils/mayavi_visu.py
|
python
|
show_neighbors
|
(query, supports, neighbors)
|
[] |
def show_neighbors(query, supports, neighbors):
from mayavi import mlab
###########################
# Interactive visualization
###########################
# Create figure for features
fig1 = mlab.figure('Models', bgcolor=(1, 1, 1), size=(1000, 800))
fig1.scene.parallel_projection = False
# Indices
global file_i
file_i = 0
def update_scene():
# clear figure
mlab.clf(fig1)
# Rescale points for visu
p1 = (query * 1.5 + np.array([1.0, 1.0, 1.0])) * 50.0
p2 = (supports * 1.5 + np.array([1.0, 1.0, 1.0])) * 50.0
l1 = p1[:, 2]*0
l1[file_i] = 1
l2 = p2[:, 2]*0 + 2
l2[neighbors[file_i]] = 3
# Show point clouds colorized with activations
activations = mlab.points3d(p1[:, 0],
p1[:, 1],
p1[:, 2],
l1,
scale_factor=2.0,
scale_mode='none',
vmin=0.0,
vmax=3.0,
figure=fig1)
activations = mlab.points3d(p2[:, 0],
p2[:, 1],
p2[:, 2],
l2,
scale_factor=3.0,
scale_mode='none',
vmin=0.0,
vmax=3.0,
figure=fig1)
# New title
mlab.title(str(file_i), color=(0, 0, 0), size=0.3, height=0.01)
text = '<--- (press g for previous)' + 50 * ' ' + '(press h for next) --->'
mlab.text(0.01, 0.01, text, color=(0, 0, 0), width=0.98)
mlab.orientation_axes()
return
def keyboard_callback(vtk_obj, event):
global file_i
if vtk_obj.GetKeyCode() in ['g', 'G']:
file_i = (file_i - 1) % len(query)
update_scene()
elif vtk_obj.GetKeyCode() in ['h', 'H']:
file_i = (file_i + 1) % len(query)
update_scene()
return
# Draw a first plot
update_scene()
fig1.scene.interactor.add_observer('KeyPressEvent', keyboard_callback)
mlab.show()
|
[
"def",
"show_neighbors",
"(",
"query",
",",
"supports",
",",
"neighbors",
")",
":",
"from",
"mayavi",
"import",
"mlab",
"###########################",
"# Interactive visualization",
"###########################",
"# Create figure for features",
"fig1",
"=",
"mlab",
".",
"figure",
"(",
"'Models'",
",",
"bgcolor",
"=",
"(",
"1",
",",
"1",
",",
"1",
")",
",",
"size",
"=",
"(",
"1000",
",",
"800",
")",
")",
"fig1",
".",
"scene",
".",
"parallel_projection",
"=",
"False",
"# Indices",
"global",
"file_i",
"file_i",
"=",
"0",
"def",
"update_scene",
"(",
")",
":",
"# clear figure",
"mlab",
".",
"clf",
"(",
"fig1",
")",
"# Rescale points for visu",
"p1",
"=",
"(",
"query",
"*",
"1.5",
"+",
"np",
".",
"array",
"(",
"[",
"1.0",
",",
"1.0",
",",
"1.0",
"]",
")",
")",
"*",
"50.0",
"p2",
"=",
"(",
"supports",
"*",
"1.5",
"+",
"np",
".",
"array",
"(",
"[",
"1.0",
",",
"1.0",
",",
"1.0",
"]",
")",
")",
"*",
"50.0",
"l1",
"=",
"p1",
"[",
":",
",",
"2",
"]",
"*",
"0",
"l1",
"[",
"file_i",
"]",
"=",
"1",
"l2",
"=",
"p2",
"[",
":",
",",
"2",
"]",
"*",
"0",
"+",
"2",
"l2",
"[",
"neighbors",
"[",
"file_i",
"]",
"]",
"=",
"3",
"# Show point clouds colorized with activations",
"activations",
"=",
"mlab",
".",
"points3d",
"(",
"p1",
"[",
":",
",",
"0",
"]",
",",
"p1",
"[",
":",
",",
"1",
"]",
",",
"p1",
"[",
":",
",",
"2",
"]",
",",
"l1",
",",
"scale_factor",
"=",
"2.0",
",",
"scale_mode",
"=",
"'none'",
",",
"vmin",
"=",
"0.0",
",",
"vmax",
"=",
"3.0",
",",
"figure",
"=",
"fig1",
")",
"activations",
"=",
"mlab",
".",
"points3d",
"(",
"p2",
"[",
":",
",",
"0",
"]",
",",
"p2",
"[",
":",
",",
"1",
"]",
",",
"p2",
"[",
":",
",",
"2",
"]",
",",
"l2",
",",
"scale_factor",
"=",
"3.0",
",",
"scale_mode",
"=",
"'none'",
",",
"vmin",
"=",
"0.0",
",",
"vmax",
"=",
"3.0",
",",
"figure",
"=",
"fig1",
")",
"# New title",
"mlab",
".",
"title",
"(",
"str",
"(",
"file_i",
")",
",",
"color",
"=",
"(",
"0",
",",
"0",
",",
"0",
")",
",",
"size",
"=",
"0.3",
",",
"height",
"=",
"0.01",
")",
"text",
"=",
"'<--- (press g for previous)'",
"+",
"50",
"*",
"' '",
"+",
"'(press h for next) --->'",
"mlab",
".",
"text",
"(",
"0.01",
",",
"0.01",
",",
"text",
",",
"color",
"=",
"(",
"0",
",",
"0",
",",
"0",
")",
",",
"width",
"=",
"0.98",
")",
"mlab",
".",
"orientation_axes",
"(",
")",
"return",
"def",
"keyboard_callback",
"(",
"vtk_obj",
",",
"event",
")",
":",
"global",
"file_i",
"if",
"vtk_obj",
".",
"GetKeyCode",
"(",
")",
"in",
"[",
"'g'",
",",
"'G'",
"]",
":",
"file_i",
"=",
"(",
"file_i",
"-",
"1",
")",
"%",
"len",
"(",
"query",
")",
"update_scene",
"(",
")",
"elif",
"vtk_obj",
".",
"GetKeyCode",
"(",
")",
"in",
"[",
"'h'",
",",
"'H'",
"]",
":",
"file_i",
"=",
"(",
"file_i",
"+",
"1",
")",
"%",
"len",
"(",
"query",
")",
"update_scene",
"(",
")",
"return",
"# Draw a first plot",
"update_scene",
"(",
")",
"fig1",
".",
"scene",
".",
"interactor",
".",
"add_observer",
"(",
"'KeyPressEvent'",
",",
"keyboard_callback",
")",
"mlab",
".",
"show",
"(",
")"
] |
https://github.com/HuguesTHOMAS/KPConv-PyTorch/blob/73e444d486cd6cb56122c3dd410e51c734064cfe/utils/mayavi_visu.py#L191-L268
|
||||
bleachbit/bleachbit
|
88fc4452936d02b56a76f07ce2142306bb47262b
|
bleachbit/markovify/text.py
|
python
|
Text.to_json
|
(self)
|
Returns the underlying data as a JSON string.
|
Returns the underlying data as a JSON string.
|
[
"Returns",
"the",
"underlying",
"data",
"as",
"a",
"JSON",
"string",
"."
] |
def to_json(self):
"""
Returns the underlying data as a JSON string.
"""
# not used in BleachBit
pass
|
[
"def",
"to_json",
"(",
"self",
")",
":",
"# not used in BleachBit",
"pass"
] |
https://github.com/bleachbit/bleachbit/blob/88fc4452936d02b56a76f07ce2142306bb47262b/bleachbit/markovify/text.py#L53-L58
|
||
tp4a/teleport
|
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
|
server/www/packages/packages-linux/x64/mako/cache.py
|
python
|
CacheImpl.invalidate
|
(self, key, **kw)
|
r"""Invalidate a value in the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments.
|
r"""Invalidate a value in the cache.
|
[
"r",
"Invalidate",
"a",
"value",
"in",
"the",
"cache",
"."
] |
def invalidate(self, key, **kw):
r"""Invalidate a value in the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments.
"""
raise NotImplementedError()
|
[
"def",
"invalidate",
"(",
"self",
",",
"key",
",",
"*",
"*",
"kw",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-linux/x64/mako/cache.py#L233-L240
|
||
cowrie/cowrie
|
86488fa4eed1559574653eee4c76b4a33d1b42db
|
src/cowrie/commands/iptables.py
|
python
|
Command_iptables.list
|
(self, chain)
|
List current rules
|
List current rules
|
[
"List",
"current",
"rules"
] |
def list(self, chain):
"""
List current rules
"""
if self.user_is_root():
if len(chain) > 0:
print(chain)
# Check chain
if not self.is_valid_chain(chain):
return
chains = [chain]
else:
chains = iter(self.current_table.keys())
# Output buffer
output = []
for chain in chains:
# Chain table header
chain_output = [
"Chain %s (policy ACCEPT)" % chain,
"target prot opt source destination",
]
# Format the rules
for rule in self.current_table[chain]:
chain_output.append(
"%-10s %-4s %-3s %-20s %-20s %s %s" % rule,
)
# Create one string
output.append("\n".join(chain_output))
# Done
self.write("{}\n".format("\n\n".join(output)))
self.exit()
else:
self.no_permission()
|
[
"def",
"list",
"(",
"self",
",",
"chain",
")",
":",
"if",
"self",
".",
"user_is_root",
"(",
")",
":",
"if",
"len",
"(",
"chain",
")",
">",
"0",
":",
"print",
"(",
"chain",
")",
"# Check chain",
"if",
"not",
"self",
".",
"is_valid_chain",
"(",
"chain",
")",
":",
"return",
"chains",
"=",
"[",
"chain",
"]",
"else",
":",
"chains",
"=",
"iter",
"(",
"self",
".",
"current_table",
".",
"keys",
"(",
")",
")",
"# Output buffer",
"output",
"=",
"[",
"]",
"for",
"chain",
"in",
"chains",
":",
"# Chain table header",
"chain_output",
"=",
"[",
"\"Chain %s (policy ACCEPT)\"",
"%",
"chain",
",",
"\"target prot opt source destination\"",
",",
"]",
"# Format the rules",
"for",
"rule",
"in",
"self",
".",
"current_table",
"[",
"chain",
"]",
":",
"chain_output",
".",
"append",
"(",
"\"%-10s %-4s %-3s %-20s %-20s %s %s\"",
"%",
"rule",
",",
")",
"# Create one string",
"output",
".",
"append",
"(",
"\"\\n\"",
".",
"join",
"(",
"chain_output",
")",
")",
"# Done",
"self",
".",
"write",
"(",
"\"{}\\n\"",
".",
"format",
"(",
"\"\\n\\n\"",
".",
"join",
"(",
"output",
")",
")",
")",
"self",
".",
"exit",
"(",
")",
"else",
":",
"self",
".",
"no_permission",
"(",
")"
] |
https://github.com/cowrie/cowrie/blob/86488fa4eed1559574653eee4c76b4a33d1b42db/src/cowrie/commands/iptables.py#L385-L424
|
||
lazylibrarian/LazyLibrarian
|
ae3c14e9db9328ce81765e094ab2a14ed7155624
|
lib/requests/packages/idna/intranges.py
|
python
|
intranges_from_list
|
(list_)
|
return tuple(ranges)
|
Represent a list of integers as a sequence of ranges:
((start_0, end_0), (start_1, end_1), ...), such that the original
integers are exactly those x such that start_i <= x < end_i for some i.
|
Represent a list of integers as a sequence of ranges:
((start_0, end_0), (start_1, end_1), ...), such that the original
integers are exactly those x such that start_i <= x < end_i for some i.
|
[
"Represent",
"a",
"list",
"of",
"integers",
"as",
"a",
"sequence",
"of",
"ranges",
":",
"((",
"start_0",
"end_0",
")",
"(",
"start_1",
"end_1",
")",
"...",
")",
"such",
"that",
"the",
"original",
"integers",
"are",
"exactly",
"those",
"x",
"such",
"that",
"start_i",
"<",
"=",
"x",
"<",
"end_i",
"for",
"some",
"i",
"."
] |
def intranges_from_list(list_):
"""Represent a list of integers as a sequence of ranges:
((start_0, end_0), (start_1, end_1), ...), such that the original
integers are exactly those x such that start_i <= x < end_i for some i.
"""
sorted_list = sorted(list_)
ranges = []
last_write = -1
for i in range(len(sorted_list)):
if i+1 < len(sorted_list):
if sorted_list[i] == sorted_list[i+1]-1:
continue
current_range = sorted_list[last_write+1:i+1]
range_tuple = (current_range[0], current_range[-1] + 1)
ranges.append(range_tuple)
last_write = i
return tuple(ranges)
|
[
"def",
"intranges_from_list",
"(",
"list_",
")",
":",
"sorted_list",
"=",
"sorted",
"(",
"list_",
")",
"ranges",
"=",
"[",
"]",
"last_write",
"=",
"-",
"1",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"sorted_list",
")",
")",
":",
"if",
"i",
"+",
"1",
"<",
"len",
"(",
"sorted_list",
")",
":",
"if",
"sorted_list",
"[",
"i",
"]",
"==",
"sorted_list",
"[",
"i",
"+",
"1",
"]",
"-",
"1",
":",
"continue",
"current_range",
"=",
"sorted_list",
"[",
"last_write",
"+",
"1",
":",
"i",
"+",
"1",
"]",
"range_tuple",
"=",
"(",
"current_range",
"[",
"0",
"]",
",",
"current_range",
"[",
"-",
"1",
"]",
"+",
"1",
")",
"ranges",
".",
"append",
"(",
"range_tuple",
")",
"last_write",
"=",
"i",
"return",
"tuple",
"(",
"ranges",
")"
] |
https://github.com/lazylibrarian/LazyLibrarian/blob/ae3c14e9db9328ce81765e094ab2a14ed7155624/lib/requests/packages/idna/intranges.py#L10-L28
|
|
nlloyd/SubliminalCollaborator
|
5c619e17ddbe8acb9eea8996ec038169ddcd50a1
|
libs/twisted/conch/ssh/agent.py
|
python
|
SSHAgentClient.signData
|
(self, blob, data)
|
return self.sendRequest(AGENTC_SIGN_REQUEST, req).addCallback(self._cbSignData)
|
Request that the agent sign the given C{data} with the private key
which corresponds to the public key given by C{blob}. The private
key should have been added to the agent already.
@type blob: C{str}
@type data: C{str}
@return: A L{Deferred} which fires with a signature for given data
created with the given key.
|
Request that the agent sign the given C{data} with the private key
which corresponds to the public key given by C{blob}. The private
key should have been added to the agent already.
|
[
"Request",
"that",
"the",
"agent",
"sign",
"the",
"given",
"C",
"{",
"data",
"}",
"with",
"the",
"private",
"key",
"which",
"corresponds",
"to",
"the",
"public",
"key",
"given",
"by",
"C",
"{",
"blob",
"}",
".",
"The",
"private",
"key",
"should",
"have",
"been",
"added",
"to",
"the",
"agent",
"already",
"."
] |
def signData(self, blob, data):
"""
Request that the agent sign the given C{data} with the private key
which corresponds to the public key given by C{blob}. The private
key should have been added to the agent already.
@type blob: C{str}
@type data: C{str}
@return: A L{Deferred} which fires with a signature for given data
created with the given key.
"""
req = NS(blob)
req += NS(data)
req += '\000\000\000\000' # flags
return self.sendRequest(AGENTC_SIGN_REQUEST, req).addCallback(self._cbSignData)
|
[
"def",
"signData",
"(",
"self",
",",
"blob",
",",
"data",
")",
":",
"req",
"=",
"NS",
"(",
"blob",
")",
"req",
"+=",
"NS",
"(",
"data",
")",
"req",
"+=",
"'\\000\\000\\000\\000'",
"# flags",
"return",
"self",
".",
"sendRequest",
"(",
"AGENTC_SIGN_REQUEST",
",",
"req",
")",
".",
"addCallback",
"(",
"self",
".",
"_cbSignData",
")"
] |
https://github.com/nlloyd/SubliminalCollaborator/blob/5c619e17ddbe8acb9eea8996ec038169ddcd50a1/libs/twisted/conch/ssh/agent.py#L97-L111
|
|
Tramac/Lightweight-Segmentation
|
e257478e20eb0e1f4476b19a36101694be208f65
|
light/utils/distributed.py
|
python
|
synchronize
|
()
|
Helper function to synchronize (barrier) among all processes when
using distributed training
|
Helper function to synchronize (barrier) among all processes when
using distributed training
|
[
"Helper",
"function",
"to",
"synchronize",
"(",
"barrier",
")",
"among",
"all",
"processes",
"when",
"using",
"distributed",
"training"
] |
def synchronize():
"""
Helper function to synchronize (barrier) among all processes when
using distributed training
"""
if not dist.is_available():
return
if not dist.is_initialized():
return
world_size = dist.get_world_size()
if world_size == 1:
return
dist.barrier()
|
[
"def",
"synchronize",
"(",
")",
":",
"if",
"not",
"dist",
".",
"is_available",
"(",
")",
":",
"return",
"if",
"not",
"dist",
".",
"is_initialized",
"(",
")",
":",
"return",
"world_size",
"=",
"dist",
".",
"get_world_size",
"(",
")",
"if",
"world_size",
"==",
"1",
":",
"return",
"dist",
".",
"barrier",
"(",
")"
] |
https://github.com/Tramac/Lightweight-Segmentation/blob/e257478e20eb0e1f4476b19a36101694be208f65/light/utils/distributed.py#L39-L51
|
||
pytorch/examples
|
151944ecaf9ba2c8288ee550143ae7ffdaa90a80
|
super_resolution/data.py
|
python
|
calculate_valid_crop_size
|
(crop_size, upscale_factor)
|
return crop_size - (crop_size % upscale_factor)
|
[] |
def calculate_valid_crop_size(crop_size, upscale_factor):
return crop_size - (crop_size % upscale_factor)
|
[
"def",
"calculate_valid_crop_size",
"(",
"crop_size",
",",
"upscale_factor",
")",
":",
"return",
"crop_size",
"-",
"(",
"crop_size",
"%",
"upscale_factor",
")"
] |
https://github.com/pytorch/examples/blob/151944ecaf9ba2c8288ee550143ae7ffdaa90a80/super_resolution/data.py#L34-L35
|
|||
numba/numba
|
bf480b9e0da858a65508c2b17759a72ee6a44c51
|
numba/np/npyfuncs.py
|
python
|
np_int_sdiv_impl
|
(context, builder, sig, args)
|
return result
|
[] |
def np_int_sdiv_impl(context, builder, sig, args):
# based on the actual code in NumPy loops.c.src for signed integer types
_check_arity_and_homogeneity(sig, args, 2)
num, den = args
ty = sig.args[0] # any arg type will do, homogeneous
ZERO = context.get_constant(ty, 0)
MINUS_ONE = context.get_constant(ty, -1)
MIN_INT = context.get_constant(ty, 1 << (den.type.width-1))
den_is_zero = builder.icmp(lc.ICMP_EQ, ZERO, den)
den_is_minus_one = builder.icmp(lc.ICMP_EQ, MINUS_ONE, den)
num_is_min_int = builder.icmp(lc.ICMP_EQ, MIN_INT, num)
could_cause_sigfpe = builder.and_(den_is_minus_one, num_is_min_int)
force_zero = builder.or_(den_is_zero, could_cause_sigfpe)
with builder.if_else(force_zero, likely=False) as (then, otherwise):
with then:
bb_then = builder.basic_block
with otherwise:
bb_otherwise = builder.basic_block
div = builder.sdiv(num, den)
mod = builder.srem(num, den)
num_gt_zero = builder.icmp(lc.ICMP_SGT, num, ZERO)
den_gt_zero = builder.icmp(lc.ICMP_SGT, den, ZERO)
not_same_sign = builder.xor(num_gt_zero, den_gt_zero)
mod_not_zero = builder.icmp(lc.ICMP_NE, mod, ZERO)
needs_fixing = builder.and_(not_same_sign, mod_not_zero)
fix_value = builder.select(needs_fixing, MINUS_ONE, ZERO)
result_otherwise = builder.add(div, fix_value)
result = builder.phi(ZERO.type)
result.add_incoming(ZERO, bb_then)
result.add_incoming(result_otherwise, bb_otherwise)
return result
|
[
"def",
"np_int_sdiv_impl",
"(",
"context",
",",
"builder",
",",
"sig",
",",
"args",
")",
":",
"# based on the actual code in NumPy loops.c.src for signed integer types",
"_check_arity_and_homogeneity",
"(",
"sig",
",",
"args",
",",
"2",
")",
"num",
",",
"den",
"=",
"args",
"ty",
"=",
"sig",
".",
"args",
"[",
"0",
"]",
"# any arg type will do, homogeneous",
"ZERO",
"=",
"context",
".",
"get_constant",
"(",
"ty",
",",
"0",
")",
"MINUS_ONE",
"=",
"context",
".",
"get_constant",
"(",
"ty",
",",
"-",
"1",
")",
"MIN_INT",
"=",
"context",
".",
"get_constant",
"(",
"ty",
",",
"1",
"<<",
"(",
"den",
".",
"type",
".",
"width",
"-",
"1",
")",
")",
"den_is_zero",
"=",
"builder",
".",
"icmp",
"(",
"lc",
".",
"ICMP_EQ",
",",
"ZERO",
",",
"den",
")",
"den_is_minus_one",
"=",
"builder",
".",
"icmp",
"(",
"lc",
".",
"ICMP_EQ",
",",
"MINUS_ONE",
",",
"den",
")",
"num_is_min_int",
"=",
"builder",
".",
"icmp",
"(",
"lc",
".",
"ICMP_EQ",
",",
"MIN_INT",
",",
"num",
")",
"could_cause_sigfpe",
"=",
"builder",
".",
"and_",
"(",
"den_is_minus_one",
",",
"num_is_min_int",
")",
"force_zero",
"=",
"builder",
".",
"or_",
"(",
"den_is_zero",
",",
"could_cause_sigfpe",
")",
"with",
"builder",
".",
"if_else",
"(",
"force_zero",
",",
"likely",
"=",
"False",
")",
"as",
"(",
"then",
",",
"otherwise",
")",
":",
"with",
"then",
":",
"bb_then",
"=",
"builder",
".",
"basic_block",
"with",
"otherwise",
":",
"bb_otherwise",
"=",
"builder",
".",
"basic_block",
"div",
"=",
"builder",
".",
"sdiv",
"(",
"num",
",",
"den",
")",
"mod",
"=",
"builder",
".",
"srem",
"(",
"num",
",",
"den",
")",
"num_gt_zero",
"=",
"builder",
".",
"icmp",
"(",
"lc",
".",
"ICMP_SGT",
",",
"num",
",",
"ZERO",
")",
"den_gt_zero",
"=",
"builder",
".",
"icmp",
"(",
"lc",
".",
"ICMP_SGT",
",",
"den",
",",
"ZERO",
")",
"not_same_sign",
"=",
"builder",
".",
"xor",
"(",
"num_gt_zero",
",",
"den_gt_zero",
")",
"mod_not_zero",
"=",
"builder",
".",
"icmp",
"(",
"lc",
".",
"ICMP_NE",
",",
"mod",
",",
"ZERO",
")",
"needs_fixing",
"=",
"builder",
".",
"and_",
"(",
"not_same_sign",
",",
"mod_not_zero",
")",
"fix_value",
"=",
"builder",
".",
"select",
"(",
"needs_fixing",
",",
"MINUS_ONE",
",",
"ZERO",
")",
"result_otherwise",
"=",
"builder",
".",
"add",
"(",
"div",
",",
"fix_value",
")",
"result",
"=",
"builder",
".",
"phi",
"(",
"ZERO",
".",
"type",
")",
"result",
".",
"add_incoming",
"(",
"ZERO",
",",
"bb_then",
")",
"result",
".",
"add_incoming",
"(",
"result_otherwise",
",",
"bb_otherwise",
")",
"return",
"result"
] |
https://github.com/numba/numba/blob/bf480b9e0da858a65508c2b17759a72ee6a44c51/numba/np/npyfuncs.py#L125-L159
|
|||
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/icloud/device_tracker.py
|
python
|
IcloudTrackerEntity.latitude
|
(self)
|
return self._device.location[DEVICE_LOCATION_LATITUDE]
|
Return latitude value of the device.
|
Return latitude value of the device.
|
[
"Return",
"latitude",
"value",
"of",
"the",
"device",
"."
] |
def latitude(self):
"""Return latitude value of the device."""
return self._device.location[DEVICE_LOCATION_LATITUDE]
|
[
"def",
"latitude",
"(",
"self",
")",
":",
"return",
"self",
".",
"_device",
".",
"location",
"[",
"DEVICE_LOCATION_LATITUDE",
"]"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/icloud/device_tracker.py#L94-L96
|
|
playframework/play1
|
0ecac3bc2421ae2dbec27a368bf671eda1c9cba5
|
python/Lib/calendar.py
|
python
|
Calendar.monthdayscalendar
|
(self, year, month)
|
return [ days[i:i+7] for i in range(0, len(days), 7) ]
|
Return a matrix representing a month's calendar.
Each row represents a week; days outside this month are zero.
|
Return a matrix representing a month's calendar.
Each row represents a week; days outside this month are zero.
|
[
"Return",
"a",
"matrix",
"representing",
"a",
"month",
"s",
"calendar",
".",
"Each",
"row",
"represents",
"a",
"week",
";",
"days",
"outside",
"this",
"month",
"are",
"zero",
"."
] |
def monthdayscalendar(self, year, month):
"""
Return a matrix representing a month's calendar.
Each row represents a week; days outside this month are zero.
"""
days = list(self.itermonthdays(year, month))
return [ days[i:i+7] for i in range(0, len(days), 7) ]
|
[
"def",
"monthdayscalendar",
"(",
"self",
",",
"year",
",",
"month",
")",
":",
"days",
"=",
"list",
"(",
"self",
".",
"itermonthdays",
"(",
"year",
",",
"month",
")",
")",
"return",
"[",
"days",
"[",
"i",
":",
"i",
"+",
"7",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"days",
")",
",",
"7",
")",
"]"
] |
https://github.com/playframework/play1/blob/0ecac3bc2421ae2dbec27a368bf671eda1c9cba5/python/Lib/calendar.py#L213-L219
|
|
beeware/ouroboros
|
a29123c6fab6a807caffbb7587cf548e0c370296
|
ouroboros/lib2to3/pgen2/grammar.py
|
python
|
Grammar.copy
|
(self)
|
return new
|
Copy the grammar.
|
Copy the grammar.
|
[
"Copy",
"the",
"grammar",
"."
] |
def copy(self):
"""
Copy the grammar.
"""
new = self.__class__()
for dict_attr in ("symbol2number", "number2symbol", "dfas", "keywords",
"tokens", "symbol2label"):
setattr(new, dict_attr, getattr(self, dict_attr).copy())
new.labels = self.labels[:]
new.states = self.states[:]
new.start = self.start
return new
|
[
"def",
"copy",
"(",
"self",
")",
":",
"new",
"=",
"self",
".",
"__class__",
"(",
")",
"for",
"dict_attr",
"in",
"(",
"\"symbol2number\"",
",",
"\"number2symbol\"",
",",
"\"dfas\"",
",",
"\"keywords\"",
",",
"\"tokens\"",
",",
"\"symbol2label\"",
")",
":",
"setattr",
"(",
"new",
",",
"dict_attr",
",",
"getattr",
"(",
"self",
",",
"dict_attr",
")",
".",
"copy",
"(",
")",
")",
"new",
".",
"labels",
"=",
"self",
".",
"labels",
"[",
":",
"]",
"new",
".",
"states",
"=",
"self",
".",
"states",
"[",
":",
"]",
"new",
".",
"start",
"=",
"self",
".",
"start",
"return",
"new"
] |
https://github.com/beeware/ouroboros/blob/a29123c6fab6a807caffbb7587cf548e0c370296/ouroboros/lib2to3/pgen2/grammar.py#L98-L109
|
|
crflynn/stochastic
|
a563762682a784a24e8b8705995b0e4cafad469f
|
stochastic/processes/continuous/mixed_poisson.py
|
python
|
MixedPoissonProcess.rate_args
|
(self)
|
return self._rate_args
|
Positional arguments for the rate function.
|
Positional arguments for the rate function.
|
[
"Positional",
"arguments",
"for",
"the",
"rate",
"function",
"."
] |
def rate_args(self):
"""Positional arguments for the rate function."""
return self._rate_args
|
[
"def",
"rate_args",
"(",
"self",
")",
":",
"return",
"self",
".",
"_rate_args"
] |
https://github.com/crflynn/stochastic/blob/a563762682a784a24e8b8705995b0e4cafad469f/stochastic/processes/continuous/mixed_poisson.py#L53-L55
|
|
oracle/oci-python-sdk
|
3c1604e4e212008fb6718e2f68cdb5ef71fd5793
|
src/oci/vulnerability_scanning/vulnerability_scanning_client_composite_operations.py
|
python
|
VulnerabilityScanningClientCompositeOperations.create_container_scan_recipe_and_wait_for_state
|
(self, create_container_scan_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={})
|
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_container_scan_recipe` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param oci.vulnerability_scanning.models.CreateContainerScanRecipeDetails create_container_scan_recipe_details: (required)
Details for the new ContainerScanRecipe
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_container_scan_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
|
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_container_scan_recipe` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
|
[
"Calls",
":",
"py",
":",
"func",
":",
"~oci",
".",
"vulnerability_scanning",
".",
"VulnerabilityScanningClient",
".",
"create_container_scan_recipe",
"and",
"waits",
"for",
"the",
":",
"py",
":",
"class",
":",
"~oci",
".",
"vulnerability_scanning",
".",
"models",
".",
"WorkRequest",
"to",
"enter",
"the",
"given",
"state",
"(",
"s",
")",
"."
] |
def create_container_scan_recipe_and_wait_for_state(self, create_container_scan_recipe_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_container_scan_recipe` and waits for the :py:class:`~oci.vulnerability_scanning.models.WorkRequest`
to enter the given state(s).
:param oci.vulnerability_scanning.models.CreateContainerScanRecipeDetails create_container_scan_recipe_details: (required)
Details for the new ContainerScanRecipe
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.vulnerability_scanning.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.vulnerability_scanning.VulnerabilityScanningClient.create_container_scan_recipe`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_container_scan_recipe(create_container_scan_recipe_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
|
[
"def",
"create_container_scan_recipe_and_wait_for_state",
"(",
"self",
",",
"create_container_scan_recipe_details",
",",
"wait_for_states",
"=",
"[",
"]",
",",
"operation_kwargs",
"=",
"{",
"}",
",",
"waiter_kwargs",
"=",
"{",
"}",
")",
":",
"operation_result",
"=",
"self",
".",
"client",
".",
"create_container_scan_recipe",
"(",
"create_container_scan_recipe_details",
",",
"*",
"*",
"operation_kwargs",
")",
"if",
"not",
"wait_for_states",
":",
"return",
"operation_result",
"lowered_wait_for_states",
"=",
"[",
"w",
".",
"lower",
"(",
")",
"for",
"w",
"in",
"wait_for_states",
"]",
"wait_for_resource_id",
"=",
"operation_result",
".",
"headers",
"[",
"'opc-work-request-id'",
"]",
"try",
":",
"waiter_result",
"=",
"oci",
".",
"wait_until",
"(",
"self",
".",
"client",
",",
"self",
".",
"client",
".",
"get_work_request",
"(",
"wait_for_resource_id",
")",
",",
"evaluate_response",
"=",
"lambda",
"r",
":",
"getattr",
"(",
"r",
".",
"data",
",",
"'status'",
")",
"and",
"getattr",
"(",
"r",
".",
"data",
",",
"'status'",
")",
".",
"lower",
"(",
")",
"in",
"lowered_wait_for_states",
",",
"*",
"*",
"waiter_kwargs",
")",
"result_to_return",
"=",
"waiter_result",
"return",
"result_to_return",
"except",
"Exception",
"as",
"e",
":",
"raise",
"oci",
".",
"exceptions",
".",
"CompositeOperationError",
"(",
"partial_results",
"=",
"[",
"operation_result",
"]",
",",
"cause",
"=",
"e",
")"
] |
https://github.com/oracle/oci-python-sdk/blob/3c1604e4e212008fb6718e2f68cdb5ef71fd5793/src/oci/vulnerability_scanning/vulnerability_scanning_client_composite_operations.py#L26-L62
|
||
redis/redis-py
|
0affa0ed3f3cbcb6dec29b34a580f769f69ae9f7
|
redis/connection.py
|
python
|
Connection.set_parser
|
(self, parser_class)
|
Creates a new instance of parser_class with socket size:
_socket_read_size and assigns it to the parser for the connection
:param parser_class: The required parser class
|
Creates a new instance of parser_class with socket size:
_socket_read_size and assigns it to the parser for the connection
:param parser_class: The required parser class
|
[
"Creates",
"a",
"new",
"instance",
"of",
"parser_class",
"with",
"socket",
"size",
":",
"_socket_read_size",
"and",
"assigns",
"it",
"to",
"the",
"parser",
"for",
"the",
"connection",
":",
"param",
"parser_class",
":",
"The",
"required",
"parser",
"class"
] |
def set_parser(self, parser_class):
"""
Creates a new instance of parser_class with socket size:
_socket_read_size and assigns it to the parser for the connection
:param parser_class: The required parser class
"""
self._parser = parser_class(socket_read_size=self._socket_read_size)
|
[
"def",
"set_parser",
"(",
"self",
",",
"parser_class",
")",
":",
"self",
".",
"_parser",
"=",
"parser_class",
"(",
"socket_read_size",
"=",
"self",
".",
"_socket_read_size",
")"
] |
https://github.com/redis/redis-py/blob/0affa0ed3f3cbcb6dec29b34a580f769f69ae9f7/redis/connection.py#L594-L600
|
||
jgagneastro/coffeegrindsize
|
22661ebd21831dba4cf32bfc6ba59fe3d49f879c
|
App/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/response.py
|
python
|
HTTPResponse._flush_decoder
|
(self)
|
return b''
|
Flushes the decoder. Should only be called if the decoder is actually
being used.
|
Flushes the decoder. Should only be called if the decoder is actually
being used.
|
[
"Flushes",
"the",
"decoder",
".",
"Should",
"only",
"be",
"called",
"if",
"the",
"decoder",
"is",
"actually",
"being",
"used",
"."
] |
def _flush_decoder(self):
"""
Flushes the decoder. Should only be called if the decoder is actually
being used.
"""
if self._decoder:
buf = self._decoder.decompress(b'')
return buf + self._decoder.flush()
return b''
|
[
"def",
"_flush_decoder",
"(",
"self",
")",
":",
"if",
"self",
".",
"_decoder",
":",
"buf",
"=",
"self",
".",
"_decoder",
".",
"decompress",
"(",
"b''",
")",
"return",
"buf",
"+",
"self",
".",
"_decoder",
".",
"flush",
"(",
")",
"return",
"b''"
] |
https://github.com/jgagneastro/coffeegrindsize/blob/22661ebd21831dba4cf32bfc6ba59fe3d49f879c/App/venv/lib/python3.7/site-packages/pip/_vendor/urllib3/response.py#L336-L345
|
|
dbt-labs/dbt-core
|
e943b9fc842535e958ef4fd0b8703adc91556bc6
|
core/dbt/adapters/protocol.py
|
python
|
AdapterProtocol.get_thread_identifier
|
()
|
[] |
def get_thread_identifier() -> Hashable:
...
|
[
"def",
"get_thread_identifier",
"(",
")",
"->",
"Hashable",
":",
"..."
] |
https://github.com/dbt-labs/dbt-core/blob/e943b9fc842535e958ef4fd0b8703adc91556bc6/core/dbt/adapters/protocol.py#L109-L110
|
||||
pytroll/satpy
|
09e51f932048f98cce7919a4ff8bd2ec01e1ae98
|
satpy/readers/fci_l1c_nc.py
|
python
|
FCIL1cNCFileHandler.calibrate_counts_to_rad
|
(self, data, key)
|
return data
|
Calibrate counts to radiances.
|
Calibrate counts to radiances.
|
[
"Calibrate",
"counts",
"to",
"radiances",
"."
] |
def calibrate_counts_to_rad(self, data, key):
"""Calibrate counts to radiances."""
if key['name'] == 'ir_38':
data = xr.where(((2 ** 12 - 1 < data) & (data <= 2 ** 13 - 1)),
(data * data.attrs.get("warm_scale_factor", 1) +
data.attrs.get("warm_add_offset", 0)),
(data * data.attrs.get("scale_factor", 1) +
data.attrs.get("add_offset", 0))
)
else:
data = (data * data.attrs.get("scale_factor", 1) +
data.attrs.get("add_offset", 0))
measured = self.get_channel_measured_group_path(key['name'])
data.attrs.update({'radiance_unit_conversion_coefficient': self[measured +
'/radiance_unit_conversion_coefficient']})
return data
|
[
"def",
"calibrate_counts_to_rad",
"(",
"self",
",",
"data",
",",
"key",
")",
":",
"if",
"key",
"[",
"'name'",
"]",
"==",
"'ir_38'",
":",
"data",
"=",
"xr",
".",
"where",
"(",
"(",
"(",
"2",
"**",
"12",
"-",
"1",
"<",
"data",
")",
"&",
"(",
"data",
"<=",
"2",
"**",
"13",
"-",
"1",
")",
")",
",",
"(",
"data",
"*",
"data",
".",
"attrs",
".",
"get",
"(",
"\"warm_scale_factor\"",
",",
"1",
")",
"+",
"data",
".",
"attrs",
".",
"get",
"(",
"\"warm_add_offset\"",
",",
"0",
")",
")",
",",
"(",
"data",
"*",
"data",
".",
"attrs",
".",
"get",
"(",
"\"scale_factor\"",
",",
"1",
")",
"+",
"data",
".",
"attrs",
".",
"get",
"(",
"\"add_offset\"",
",",
"0",
")",
")",
")",
"else",
":",
"data",
"=",
"(",
"data",
"*",
"data",
".",
"attrs",
".",
"get",
"(",
"\"scale_factor\"",
",",
"1",
")",
"+",
"data",
".",
"attrs",
".",
"get",
"(",
"\"add_offset\"",
",",
"0",
")",
")",
"measured",
"=",
"self",
".",
"get_channel_measured_group_path",
"(",
"key",
"[",
"'name'",
"]",
")",
"data",
".",
"attrs",
".",
"update",
"(",
"{",
"'radiance_unit_conversion_coefficient'",
":",
"self",
"[",
"measured",
"+",
"'/radiance_unit_conversion_coefficient'",
"]",
"}",
")",
"return",
"data"
] |
https://github.com/pytroll/satpy/blob/09e51f932048f98cce7919a4ff8bd2ec01e1ae98/satpy/readers/fci_l1c_nc.py#L466-L482
|
|
HymanLiuTS/flaskTs
|
286648286976e85d9b9a5873632331efcafe0b21
|
flasky/lib/python2.7/warnings.py
|
python
|
warn
|
(message, category=None, stacklevel=1)
|
Issue a warning, or maybe ignore it or raise an exception.
|
Issue a warning, or maybe ignore it or raise an exception.
|
[
"Issue",
"a",
"warning",
"or",
"maybe",
"ignore",
"it",
"or",
"raise",
"an",
"exception",
"."
] |
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
|
[
"def",
"warn",
"(",
"message",
",",
"category",
"=",
"None",
",",
"stacklevel",
"=",
"1",
")",
":",
"# Check if message is already a Warning object",
"if",
"isinstance",
"(",
"message",
",",
"Warning",
")",
":",
"category",
"=",
"message",
".",
"__class__",
"# Check category argument",
"if",
"category",
"is",
"None",
":",
"category",
"=",
"UserWarning",
"assert",
"issubclass",
"(",
"category",
",",
"Warning",
")",
"# Get context information",
"try",
":",
"caller",
"=",
"sys",
".",
"_getframe",
"(",
"stacklevel",
")",
"except",
"ValueError",
":",
"globals",
"=",
"sys",
".",
"__dict__",
"lineno",
"=",
"1",
"else",
":",
"globals",
"=",
"caller",
".",
"f_globals",
"lineno",
"=",
"caller",
".",
"f_lineno",
"if",
"'__name__'",
"in",
"globals",
":",
"module",
"=",
"globals",
"[",
"'__name__'",
"]",
"else",
":",
"module",
"=",
"\"<string>\"",
"filename",
"=",
"globals",
".",
"get",
"(",
"'__file__'",
")",
"if",
"filename",
":",
"fnl",
"=",
"filename",
".",
"lower",
"(",
")",
"if",
"fnl",
".",
"endswith",
"(",
"(",
"\".pyc\"",
",",
"\".pyo\"",
")",
")",
":",
"filename",
"=",
"filename",
"[",
":",
"-",
"1",
"]",
"else",
":",
"if",
"module",
"==",
"\"__main__\"",
":",
"try",
":",
"filename",
"=",
"sys",
".",
"argv",
"[",
"0",
"]",
"except",
"AttributeError",
":",
"# embedded interpreters don't have sys.argv, see bug #839151",
"filename",
"=",
"'__main__'",
"if",
"not",
"filename",
":",
"filename",
"=",
"module",
"registry",
"=",
"globals",
".",
"setdefault",
"(",
"\"__warningregistry__\"",
",",
"{",
"}",
")",
"warn_explicit",
"(",
"message",
",",
"category",
",",
"filename",
",",
"lineno",
",",
"module",
",",
"registry",
",",
"globals",
")"
] |
https://github.com/HymanLiuTS/flaskTs/blob/286648286976e85d9b9a5873632331efcafe0b21/flasky/lib/python2.7/warnings.py#L197-L235
|
||
paulwinex/pw_MultiScriptEditor
|
e447e99f87cb07e238baf693b7e124e50efdbc51
|
multi_script_editor/managers/nuke/callbacks.py
|
python
|
addOnDestroy
|
(call, args=(), kwargs={}, nodeClass='*')
|
Add code to execute when a node is destroyed
|
Add code to execute when a node is destroyed
|
[
"Add",
"code",
"to",
"execute",
"when",
"a",
"node",
"is",
"destroyed"
] |
def addOnDestroy(call, args=(), kwargs={}, nodeClass='*'):
"""Add code to execute when a node is destroyed"""
pass
|
[
"def",
"addOnDestroy",
"(",
"call",
",",
"args",
"=",
"(",
")",
",",
"kwargs",
"=",
"{",
"}",
",",
"nodeClass",
"=",
"'*'",
")",
":",
"pass"
] |
https://github.com/paulwinex/pw_MultiScriptEditor/blob/e447e99f87cb07e238baf693b7e124e50efdbc51/multi_script_editor/managers/nuke/callbacks.py#L75-L77
|
||
Source-Python-Dev-Team/Source.Python
|
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
|
addons/source-python/packages/site-packages/babel/messages/checkers.py
|
python
|
num_plurals
|
(catalog, message)
|
Verify the number of plurals in the translation.
|
Verify the number of plurals in the translation.
|
[
"Verify",
"the",
"number",
"of",
"plurals",
"in",
"the",
"translation",
"."
] |
def num_plurals(catalog, message):
"""Verify the number of plurals in the translation."""
if not message.pluralizable:
if not isinstance(message.string, string_types):
raise TranslationError("Found plural forms for non-pluralizable "
"message")
return
# skip further tests if no catalog is provided.
elif catalog is None:
return
msgstrs = message.string
if not isinstance(msgstrs, (list, tuple)):
msgstrs = (msgstrs,)
if len(msgstrs) != catalog.num_plurals:
raise TranslationError("Wrong number of plural forms (expected %d)" %
catalog.num_plurals)
|
[
"def",
"num_plurals",
"(",
"catalog",
",",
"message",
")",
":",
"if",
"not",
"message",
".",
"pluralizable",
":",
"if",
"not",
"isinstance",
"(",
"message",
".",
"string",
",",
"string_types",
")",
":",
"raise",
"TranslationError",
"(",
"\"Found plural forms for non-pluralizable \"",
"\"message\"",
")",
"return",
"# skip further tests if no catalog is provided.",
"elif",
"catalog",
"is",
"None",
":",
"return",
"msgstrs",
"=",
"message",
".",
"string",
"if",
"not",
"isinstance",
"(",
"msgstrs",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"msgstrs",
"=",
"(",
"msgstrs",
",",
")",
"if",
"len",
"(",
"msgstrs",
")",
"!=",
"catalog",
".",
"num_plurals",
":",
"raise",
"TranslationError",
"(",
"\"Wrong number of plural forms (expected %d)\"",
"%",
"catalog",
".",
"num_plurals",
")"
] |
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/packages/site-packages/babel/messages/checkers.py#L26-L43
|
||
laughingman7743/PyAthena
|
417749914247cabca2325368c6eda337b28b47f0
|
pyathena/formatter.py
|
python
|
Formatter.remove
|
(self, type_: Type[Any])
|
[] |
def remove(self, type_: Type[Any]) -> None:
self.mappings.pop(type_, None)
|
[
"def",
"remove",
"(",
"self",
",",
"type_",
":",
"Type",
"[",
"Any",
"]",
")",
"->",
"None",
":",
"self",
".",
"mappings",
".",
"pop",
"(",
"type_",
",",
"None",
")"
] |
https://github.com/laughingman7743/PyAthena/blob/417749914247cabca2325368c6eda337b28b47f0/pyathena/formatter.py#L40-L41
|
||||
ARM-DOE/pyart
|
72affe5b669f1996cd3cc39ec7d8dd29b838bd48
|
pyart/io/nexrad_level2.py
|
python
|
NEXRADLevel2File.location
|
(self)
|
Find the location of the radar.
Returns all zeros if location is not available.
Returns
-------
latitude : float
Latitude of the radar in degrees.
longitude : float
Longitude of the radar in degrees.
height : int
Height of radar and feedhorn in meters above mean sea level.
|
Find the location of the radar.
|
[
"Find",
"the",
"location",
"of",
"the",
"radar",
"."
] |
def location(self):
"""
Find the location of the radar.
Returns all zeros if location is not available.
Returns
-------
latitude : float
Latitude of the radar in degrees.
longitude : float
Longitude of the radar in degrees.
height : int
Height of radar and feedhorn in meters above mean sea level.
"""
if self._msg_type == '31':
dic = self.radial_records[0]['VOL']
height = dic['height'] + dic['feedhorn_height']
return dic['lat'], dic['lon'], height
else:
return 0.0, 0.0, 0.0
|
[
"def",
"location",
"(",
"self",
")",
":",
"if",
"self",
".",
"_msg_type",
"==",
"'31'",
":",
"dic",
"=",
"self",
".",
"radial_records",
"[",
"0",
"]",
"[",
"'VOL'",
"]",
"height",
"=",
"dic",
"[",
"'height'",
"]",
"+",
"dic",
"[",
"'feedhorn_height'",
"]",
"return",
"dic",
"[",
"'lat'",
"]",
",",
"dic",
"[",
"'lon'",
"]",
",",
"height",
"else",
":",
"return",
"0.0",
",",
"0.0",
",",
"0.0"
] |
https://github.com/ARM-DOE/pyart/blob/72affe5b669f1996cd3cc39ec7d8dd29b838bd48/pyart/io/nexrad_level2.py#L180-L201
|
||
conjure-up/conjure-up
|
d2bf8ab8e71ff01321d0e691a8d3e3833a047678
|
conjureup/ui/views/applicationconfigure.py
|
python
|
ApplicationConfigureView.get_non_whitelisted_option_widgets
|
(self)
|
return self._get_option_widgets(hidden, options['Options'])
|
[] |
async def get_non_whitelisted_option_widgets(self):
options = await app.juju.charmstore.config(self.application.charm)
svc_opts_whitelist = utils.get_options_whitelist(
self.application.name)
hidden = [n for n in options['Options'].keys()
if n not in svc_opts_whitelist]
return self._get_option_widgets(hidden, options['Options'])
|
[
"async",
"def",
"get_non_whitelisted_option_widgets",
"(",
"self",
")",
":",
"options",
"=",
"await",
"app",
".",
"juju",
".",
"charmstore",
".",
"config",
"(",
"self",
".",
"application",
".",
"charm",
")",
"svc_opts_whitelist",
"=",
"utils",
".",
"get_options_whitelist",
"(",
"self",
".",
"application",
".",
"name",
")",
"hidden",
"=",
"[",
"n",
"for",
"n",
"in",
"options",
"[",
"'Options'",
"]",
".",
"keys",
"(",
")",
"if",
"n",
"not",
"in",
"svc_opts_whitelist",
"]",
"return",
"self",
".",
"_get_option_widgets",
"(",
"hidden",
",",
"options",
"[",
"'Options'",
"]",
")"
] |
https://github.com/conjure-up/conjure-up/blob/d2bf8ab8e71ff01321d0e691a8d3e3833a047678/conjureup/ui/views/applicationconfigure.py#L91-L98
|
|||
rhinstaller/anaconda
|
63edc8680f1b05cbfe11bef28703acba808c5174
|
pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_manager.py
|
python
|
BaseRefsList.get_refs_full_format
|
(self)
|
return result
|
Get list of refs in full format.
This formatting is used for example for installation.
The format looks like:
[app|runtime]/ref/arch/branch
examples:
runtime/org.videolan.VLC.Plugin.bdj/x86_64/3-18.08
app/org.gnome.Gnote/x86_64/stable
:return: list of refs in the full format
:rtype: [str]
|
Get list of refs in full format.
|
[
"Get",
"list",
"of",
"refs",
"in",
"full",
"format",
"."
] |
def get_refs_full_format(self):
"""Get list of refs in full format.
This formatting is used for example for installation.
The format looks like:
[app|runtime]/ref/arch/branch
examples:
runtime/org.videolan.VLC.Plugin.bdj/x86_64/3-18.08
app/org.gnome.Gnote/x86_64/stable
:return: list of refs in the full format
:rtype: [str]
"""
result = []
for ref in self.refs:
# create ref string in format "runtime/org.example.app/x86_64/f30"
result.append(ref.format_ref())
return result
|
[
"def",
"get_refs_full_format",
"(",
"self",
")",
":",
"result",
"=",
"[",
"]",
"for",
"ref",
"in",
"self",
".",
"refs",
":",
"# create ref string in format \"runtime/org.example.app/x86_64/f30\"",
"result",
".",
"append",
"(",
"ref",
".",
"format_ref",
"(",
")",
")",
"return",
"result"
] |
https://github.com/rhinstaller/anaconda/blob/63edc8680f1b05cbfe11bef28703acba808c5174/pyanaconda/modules/payloads/payload/rpm_ostree/flatpak_manager.py#L270-L290
|
|
jupyter/nbdime
|
17d103c5102f98fb83417fc54a99237795110b3a
|
nbdime/diffing/notebooks.py
|
python
|
diff_attachments
|
(a, b, path="/cells/*/attachments",
predicates=None, differs=None)
|
return di.validated()
|
Diff a pair of attachment collections
|
Diff a pair of attachment collections
|
[
"Diff",
"a",
"pair",
"of",
"attachment",
"collections"
] |
def diff_attachments(a, b, path="/cells/*/attachments",
predicates=None, differs=None):
"""Diff a pair of attachment collections"""
assert path == "/cells/*/attachments", 'Invalid path for attachment: %r' % path
# Two events can happen:
# 1: An attachment is added/removed/patched
# 2: An attachment is renamed (key change)
# Currently, #2 is handled as two ops (an add and a remove)
# keys here are 'filenames' of the attachments
if not isinstance(a, dict) or not isinstance(b, dict):
raise TypeError('Attachments stores should be dictionaries. Got %r and %r' % (a, b))
akeys = set(a.keys())
bkeys = set(b.keys())
di = MappingDiffBuilder()
# Sorting keys in loops to get a deterministic diff result
for key in sorted(akeys - bkeys):
di.remove(key)
# Handle values for keys in both a and b
for key in sorted(akeys & bkeys):
avalue = a[key]
bvalue = b[key]
dd = diff_mime_bundle(avalue, bvalue)
if dd:
di.patch(key, dd)
for key in sorted(bkeys - akeys):
di.add(key, b[key])
return di.validated()
|
[
"def",
"diff_attachments",
"(",
"a",
",",
"b",
",",
"path",
"=",
"\"/cells/*/attachments\"",
",",
"predicates",
"=",
"None",
",",
"differs",
"=",
"None",
")",
":",
"assert",
"path",
"==",
"\"/cells/*/attachments\"",
",",
"'Invalid path for attachment: %r'",
"%",
"path",
"# Two events can happen:",
"# 1: An attachment is added/removed/patched",
"# 2: An attachment is renamed (key change)",
"# Currently, #2 is handled as two ops (an add and a remove)",
"# keys here are 'filenames' of the attachments",
"if",
"not",
"isinstance",
"(",
"a",
",",
"dict",
")",
"or",
"not",
"isinstance",
"(",
"b",
",",
"dict",
")",
":",
"raise",
"TypeError",
"(",
"'Attachments stores should be dictionaries. Got %r and %r'",
"%",
"(",
"a",
",",
"b",
")",
")",
"akeys",
"=",
"set",
"(",
"a",
".",
"keys",
"(",
")",
")",
"bkeys",
"=",
"set",
"(",
"b",
".",
"keys",
"(",
")",
")",
"di",
"=",
"MappingDiffBuilder",
"(",
")",
"# Sorting keys in loops to get a deterministic diff result",
"for",
"key",
"in",
"sorted",
"(",
"akeys",
"-",
"bkeys",
")",
":",
"di",
".",
"remove",
"(",
"key",
")",
"# Handle values for keys in both a and b",
"for",
"key",
"in",
"sorted",
"(",
"akeys",
"&",
"bkeys",
")",
":",
"avalue",
"=",
"a",
"[",
"key",
"]",
"bvalue",
"=",
"b",
"[",
"key",
"]",
"dd",
"=",
"diff_mime_bundle",
"(",
"avalue",
",",
"bvalue",
")",
"if",
"dd",
":",
"di",
".",
"patch",
"(",
"key",
",",
"dd",
")",
"for",
"key",
"in",
"sorted",
"(",
"bkeys",
"-",
"akeys",
")",
":",
"di",
".",
"add",
"(",
"key",
",",
"b",
"[",
"key",
"]",
")",
"return",
"di",
".",
"validated",
"(",
")"
] |
https://github.com/jupyter/nbdime/blob/17d103c5102f98fb83417fc54a99237795110b3a/nbdime/diffing/notebooks.py#L421-L454
|
|
pret/pokemon-reverse-engineering-tools
|
5e0715f2579adcfeb683448c9a7826cfd3afa57d
|
redtools/insert_texts.py
|
python
|
txt_to_tx_far_pretty_printer
|
(address, label, target_label, include_byte=False)
|
return output
|
[] |
def txt_to_tx_far_pretty_printer(address, label, target_label, include_byte=False):
output = "\n" + label + ": ; " + hex(address) + "\n"
output += spacing + "TX_FAR " + target_label + "\n"
if include_byte:
output += spacing + "db $50\n"
return output
|
[
"def",
"txt_to_tx_far_pretty_printer",
"(",
"address",
",",
"label",
",",
"target_label",
",",
"include_byte",
"=",
"False",
")",
":",
"output",
"=",
"\"\\n\"",
"+",
"label",
"+",
"\": ; \"",
"+",
"hex",
"(",
"address",
")",
"+",
"\"\\n\"",
"output",
"+=",
"spacing",
"+",
"\"TX_FAR \"",
"+",
"target_label",
"+",
"\"\\n\"",
"if",
"include_byte",
":",
"output",
"+=",
"spacing",
"+",
"\"db $50\\n\"",
"return",
"output"
] |
https://github.com/pret/pokemon-reverse-engineering-tools/blob/5e0715f2579adcfeb683448c9a7826cfd3afa57d/redtools/insert_texts.py#L237-L242
|
|||
lohriialo/photoshop-scripting-python
|
6b97da967a5d0a45e54f7c99631b29773b923f09
|
api_reference/photoshop_2020.py
|
python
|
ColorSampler.Move
|
(self, Position=defaultNamedNotOptArg)
|
return self._oleobj_.InvokeTypes(1129534261, LCID, 1, (24, 0), ((12, 1),),Position
)
|
move the color sampler to a new location
|
move the color sampler to a new location
|
[
"move",
"the",
"color",
"sampler",
"to",
"a",
"new",
"location"
] |
def Move(self, Position=defaultNamedNotOptArg):
'move the color sampler to a new location'
return self._oleobj_.InvokeTypes(1129534261, LCID, 1, (24, 0), ((12, 1),),Position
)
|
[
"def",
"Move",
"(",
"self",
",",
"Position",
"=",
"defaultNamedNotOptArg",
")",
":",
"return",
"self",
".",
"_oleobj_",
".",
"InvokeTypes",
"(",
"1129534261",
",",
"LCID",
",",
"1",
",",
"(",
"24",
",",
"0",
")",
",",
"(",
"(",
"12",
",",
"1",
")",
",",
")",
",",
"Position",
")"
] |
https://github.com/lohriialo/photoshop-scripting-python/blob/6b97da967a5d0a45e54f7c99631b29773b923f09/api_reference/photoshop_2020.py#L1381-L1384
|
|
openwisp/django-freeradius
|
3369ab046637cc4e2f9756926b98e01e1ab100d6
|
django_freeradius/base/admin.py
|
python
|
AbstractRadiusCheckAdmin.get_fields
|
(self, request, obj=None)
|
return fields
|
do not show raw value (readonly) when adding a new item
|
do not show raw value (readonly) when adding a new item
|
[
"do",
"not",
"show",
"raw",
"value",
"(",
"readonly",
")",
"when",
"adding",
"a",
"new",
"item"
] |
def get_fields(self, request, obj=None):
""" do not show raw value (readonly) when adding a new item """
fields = self.fields[:]
if not obj:
fields.remove('value')
return fields
|
[
"def",
"get_fields",
"(",
"self",
",",
"request",
",",
"obj",
"=",
"None",
")",
":",
"fields",
"=",
"self",
".",
"fields",
"[",
":",
"]",
"if",
"not",
"obj",
":",
"fields",
".",
"remove",
"(",
"'value'",
")",
"return",
"fields"
] |
https://github.com/openwisp/django-freeradius/blob/3369ab046637cc4e2f9756926b98e01e1ab100d6/django_freeradius/base/admin.py#L55-L60
|
|
numenta/nupic
|
b9ebedaf54f49a33de22d8d44dff7c765cdb5548
|
src/nupic/data/generators/pattern_machine.py
|
python
|
PatternMachine.numbersForBit
|
(self, bit)
|
return numbers
|
Return the set of pattern numbers that match a bit.
@param bit (int) Index of bit
@return (set) Indices of numbers
|
Return the set of pattern numbers that match a bit.
|
[
"Return",
"the",
"set",
"of",
"pattern",
"numbers",
"that",
"match",
"a",
"bit",
"."
] |
def numbersForBit(self, bit):
"""
Return the set of pattern numbers that match a bit.
@param bit (int) Index of bit
@return (set) Indices of numbers
"""
if bit >= self._n:
raise IndexError("Invalid bit")
numbers = set()
for index, pattern in self._patterns.iteritems():
if bit in pattern:
numbers.add(index)
return numbers
|
[
"def",
"numbersForBit",
"(",
"self",
",",
"bit",
")",
":",
"if",
"bit",
">=",
"self",
".",
"_n",
":",
"raise",
"IndexError",
"(",
"\"Invalid bit\"",
")",
"numbers",
"=",
"set",
"(",
")",
"for",
"index",
",",
"pattern",
"in",
"self",
".",
"_patterns",
".",
"iteritems",
"(",
")",
":",
"if",
"bit",
"in",
"pattern",
":",
"numbers",
".",
"add",
"(",
"index",
")",
"return",
"numbers"
] |
https://github.com/numenta/nupic/blob/b9ebedaf54f49a33de22d8d44dff7c765cdb5548/src/nupic/data/generators/pattern_machine.py#L95-L112
|
|
viewfinderco/viewfinder
|
453845b5d64ab5b3b826c08b02546d1ca0a07c14
|
backend/logs/logs_util.py
|
python
|
UserAnalyticsLogsPaths.MergedDirectory
|
(self)
|
return self.kMergedLogsPrefix + '/'
|
Base directory for the merged logs.
|
Base directory for the merged logs.
|
[
"Base",
"directory",
"for",
"the",
"merged",
"logs",
"."
] |
def MergedDirectory(self):
"""Base directory for the merged logs."""
return self.kMergedLogsPrefix + '/'
|
[
"def",
"MergedDirectory",
"(",
"self",
")",
":",
"return",
"self",
".",
"kMergedLogsPrefix",
"+",
"'/'"
] |
https://github.com/viewfinderco/viewfinder/blob/453845b5d64ab5b3b826c08b02546d1ca0a07c14/backend/logs/logs_util.py#L148-L150
|
|
IronLanguages/ironpython3
|
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
|
Src/StdLib/Lib/tarfile.py
|
python
|
TarFile.extract
|
(self, member, path="", set_attrs=True)
|
Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
specify a different directory using `path'. File attributes (owner,
mtime, mode) are set unless `set_attrs' is False.
|
Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
specify a different directory using `path'. File attributes (owner,
mtime, mode) are set unless `set_attrs' is False.
|
[
"Extract",
"a",
"member",
"from",
"the",
"archive",
"to",
"the",
"current",
"working",
"directory",
"using",
"its",
"full",
"name",
".",
"Its",
"file",
"information",
"is",
"extracted",
"as",
"accurately",
"as",
"possible",
".",
"member",
"may",
"be",
"a",
"filename",
"or",
"a",
"TarInfo",
"object",
".",
"You",
"can",
"specify",
"a",
"different",
"directory",
"using",
"path",
".",
"File",
"attributes",
"(",
"owner",
"mtime",
"mode",
")",
"are",
"set",
"unless",
"set_attrs",
"is",
"False",
"."
] |
def extract(self, member, path="", set_attrs=True):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
specify a different directory using `path'. File attributes (owner,
mtime, mode) are set unless `set_attrs' is False.
"""
self._check("r")
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
# Prepare the link target for makelink().
if tarinfo.islnk():
tarinfo._link_target = os.path.join(path, tarinfo.linkname)
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs)
except OSError as e:
if self.errorlevel > 0:
raise
else:
if e.filename is None:
self._dbg(1, "tarfile: %s" % e.strerror)
else:
self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
except ExtractError as e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
|
[
"def",
"extract",
"(",
"self",
",",
"member",
",",
"path",
"=",
"\"\"",
",",
"set_attrs",
"=",
"True",
")",
":",
"self",
".",
"_check",
"(",
"\"r\"",
")",
"if",
"isinstance",
"(",
"member",
",",
"str",
")",
":",
"tarinfo",
"=",
"self",
".",
"getmember",
"(",
"member",
")",
"else",
":",
"tarinfo",
"=",
"member",
"# Prepare the link target for makelink().",
"if",
"tarinfo",
".",
"islnk",
"(",
")",
":",
"tarinfo",
".",
"_link_target",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"tarinfo",
".",
"linkname",
")",
"try",
":",
"self",
".",
"_extract_member",
"(",
"tarinfo",
",",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"tarinfo",
".",
"name",
")",
",",
"set_attrs",
"=",
"set_attrs",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"self",
".",
"errorlevel",
">",
"0",
":",
"raise",
"else",
":",
"if",
"e",
".",
"filename",
"is",
"None",
":",
"self",
".",
"_dbg",
"(",
"1",
",",
"\"tarfile: %s\"",
"%",
"e",
".",
"strerror",
")",
"else",
":",
"self",
".",
"_dbg",
"(",
"1",
",",
"\"tarfile: %s %r\"",
"%",
"(",
"e",
".",
"strerror",
",",
"e",
".",
"filename",
")",
")",
"except",
"ExtractError",
"as",
"e",
":",
"if",
"self",
".",
"errorlevel",
">",
"1",
":",
"raise",
"else",
":",
"self",
".",
"_dbg",
"(",
"1",
",",
"\"tarfile: %s\"",
"%",
"e",
")"
] |
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/tarfile.py#L2005-L2038
|
||
GoogleCloudPlatform/professional-services
|
0c707aa97437f3d154035ef8548109b7882f71da
|
examples/ml-audio-content-profiling/perspective_api_function/main.py
|
python
|
format_api_results
|
(response: dict, text: dict)
|
Extracts relevant fields from Perspective API
Args:
response: Dict holding perspective API results
text: Dict in hodling transcript, start_time, and end_time
Returns:
Dict with text, start_time, end_time, toxicity
|
Extracts relevant fields from Perspective API
|
[
"Extracts",
"relevant",
"fields",
"from",
"Perspective",
"API"
] |
def format_api_results(response: dict, text: dict) -> Union[dict, None]:
"""Extracts relevant fields from Perspective API
Args:
response: Dict holding perspective API results
text: Dict in hodling transcript, start_time, and end_time
Returns:
Dict with text, start_time, end_time, toxicity
"""
logging.info(f'Starting format_api_results with {json.dumps(response)} '
f'and {text}.')
try:
toxicity = response['attributeScores']['TOXICITY']['summaryScore']['value']
return {'text': text['transcript'],
'start_time': text['start_time'] if 'start_time' in text else '',
'end_time': text['end_time'] if 'end_time' in text else '',
'toxicity': round(toxicity, 2)}
except Exception as e:
logging.error(f'Extracting toxicity fields failed for '
f'{json.dumps(response)}')
logging.error(e)
|
[
"def",
"format_api_results",
"(",
"response",
":",
"dict",
",",
"text",
":",
"dict",
")",
"->",
"Union",
"[",
"dict",
",",
"None",
"]",
":",
"logging",
".",
"info",
"(",
"f'Starting format_api_results with {json.dumps(response)} '",
"f'and {text}.'",
")",
"try",
":",
"toxicity",
"=",
"response",
"[",
"'attributeScores'",
"]",
"[",
"'TOXICITY'",
"]",
"[",
"'summaryScore'",
"]",
"[",
"'value'",
"]",
"return",
"{",
"'text'",
":",
"text",
"[",
"'transcript'",
"]",
",",
"'start_time'",
":",
"text",
"[",
"'start_time'",
"]",
"if",
"'start_time'",
"in",
"text",
"else",
"''",
",",
"'end_time'",
":",
"text",
"[",
"'end_time'",
"]",
"if",
"'end_time'",
"in",
"text",
"else",
"''",
",",
"'toxicity'",
":",
"round",
"(",
"toxicity",
",",
"2",
")",
"}",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"error",
"(",
"f'Extracting toxicity fields failed for '",
"f'{json.dumps(response)}'",
")",
"logging",
".",
"error",
"(",
"e",
")"
] |
https://github.com/GoogleCloudPlatform/professional-services/blob/0c707aa97437f3d154035ef8548109b7882f71da/examples/ml-audio-content-profiling/perspective_api_function/main.py#L90-L113
|
||
realpython/book2-exercises
|
cde325eac8e6d8cff2316601c2e5b36bb46af7d0
|
web2py/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py
|
python
|
OrderedDict.keys
|
(self)
|
return list(self)
|
od.keys() -> list of keys in od
|
od.keys() -> list of keys in od
|
[
"od",
".",
"keys",
"()",
"-",
">",
"list",
"of",
"keys",
"in",
"od"
] |
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
|
[
"def",
"keys",
"(",
"self",
")",
":",
"return",
"list",
"(",
"self",
")"
] |
https://github.com/realpython/book2-exercises/blob/cde325eac8e6d8cff2316601c2e5b36bb46af7d0/web2py/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py#L116-L118
|
|
magenta/ddsp
|
8536a366c7834908f418a6721547268e8f2083cc
|
ddsp/colab/colab_utils.py
|
python
|
record
|
(seconds=3, sample_rate=DEFAULT_SAMPLE_RATE, normalize_db=0.1)
|
return audio_bytes_to_np(audio_bytes,
sample_rate=sample_rate,
normalize_db=normalize_db)
|
Record audio from the browser in colab using javascript.
Based on: https://gist.github.com/korakot/c21c3476c024ad6d56d5f48b0bca92be
Args:
seconds: Number of seconds to record.
sample_rate: Resample recorded audio to this sample rate.
normalize_db: Normalize the audio to this many decibels. Set to None to skip
normalization step.
Returns:
An array of the recorded audio at sample_rate.
|
Record audio from the browser in colab using javascript.
|
[
"Record",
"audio",
"from",
"the",
"browser",
"in",
"colab",
"using",
"javascript",
"."
] |
def record(seconds=3, sample_rate=DEFAULT_SAMPLE_RATE, normalize_db=0.1):
"""Record audio from the browser in colab using javascript.
Based on: https://gist.github.com/korakot/c21c3476c024ad6d56d5f48b0bca92be
Args:
seconds: Number of seconds to record.
sample_rate: Resample recorded audio to this sample rate.
normalize_db: Normalize the audio to this many decibels. Set to None to skip
normalization step.
Returns:
An array of the recorded audio at sample_rate.
"""
# Use Javascript to record audio.
record_js_code = """
const sleep = time => new Promise(resolve => setTimeout(resolve, time))
const b2text = blob => new Promise(resolve => {
const reader = new FileReader()
reader.onloadend = e => resolve(e.srcElement.result)
reader.readAsDataURL(blob)
})
var record = time => new Promise(async resolve => {
stream = await navigator.mediaDevices.getUserMedia({ audio: true })
recorder = new MediaRecorder(stream)
chunks = []
recorder.ondataavailable = e => chunks.push(e.data)
recorder.start()
await sleep(time)
recorder.onstop = async ()=>{
blob = new Blob(chunks)
text = await b2text(blob)
resolve(text)
}
recorder.stop()
})
"""
print('Starting recording for {} seconds...'.format(seconds))
display.display(display.Javascript(record_js_code))
audio_string = output.eval_js('record(%d)' % (seconds * 1000.0))
print('Finished recording!')
audio_bytes = base64.b64decode(audio_string.split(',')[1])
return audio_bytes_to_np(audio_bytes,
sample_rate=sample_rate,
normalize_db=normalize_db)
|
[
"def",
"record",
"(",
"seconds",
"=",
"3",
",",
"sample_rate",
"=",
"DEFAULT_SAMPLE_RATE",
",",
"normalize_db",
"=",
"0.1",
")",
":",
"# Use Javascript to record audio.",
"record_js_code",
"=",
"\"\"\"\n const sleep = time => new Promise(resolve => setTimeout(resolve, time))\n const b2text = blob => new Promise(resolve => {\n const reader = new FileReader()\n reader.onloadend = e => resolve(e.srcElement.result)\n reader.readAsDataURL(blob)\n })\n\n var record = time => new Promise(async resolve => {\n stream = await navigator.mediaDevices.getUserMedia({ audio: true })\n recorder = new MediaRecorder(stream)\n chunks = []\n recorder.ondataavailable = e => chunks.push(e.data)\n recorder.start()\n await sleep(time)\n recorder.onstop = async ()=>{\n blob = new Blob(chunks)\n text = await b2text(blob)\n resolve(text)\n }\n recorder.stop()\n })\n \"\"\"",
"print",
"(",
"'Starting recording for {} seconds...'",
".",
"format",
"(",
"seconds",
")",
")",
"display",
".",
"display",
"(",
"display",
".",
"Javascript",
"(",
"record_js_code",
")",
")",
"audio_string",
"=",
"output",
".",
"eval_js",
"(",
"'record(%d)'",
"%",
"(",
"seconds",
"*",
"1000.0",
")",
")",
"print",
"(",
"'Finished recording!'",
")",
"audio_bytes",
"=",
"base64",
".",
"b64decode",
"(",
"audio_string",
".",
"split",
"(",
"','",
")",
"[",
"1",
"]",
")",
"return",
"audio_bytes_to_np",
"(",
"audio_bytes",
",",
"sample_rate",
"=",
"sample_rate",
",",
"normalize_db",
"=",
"normalize_db",
")"
] |
https://github.com/magenta/ddsp/blob/8536a366c7834908f418a6721547268e8f2083cc/ddsp/colab/colab_utils.py#L93-L137
|
|
ShreyAmbesh/Traffic-Rule-Violation-Detection-System
|
ae0c327ce014ce6a427da920b5798a0d4bbf001e
|
openalpr_api/apis/default_api.py
|
python
|
DefaultApi.recognize_url_with_http_info
|
(self, image_url, secret_key, country, **kwargs)
|
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
|
Send an image for OpenALPR to analyze and provide metadata back The image is sent as a URL. The OpenALPR service will download the image and process it
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.recognize_url_with_http_info(image_url, secret_key, country, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_url: A URL to an image that you wish to analyze (required)
:param str secret_key: The secret key used to authenticate your account. You can view your secret key by visiting https://cloud.openalpr.com/ (required)
:param str country: Defines the training data used by OpenALPR. \"us\" analyzes North-American style plates. \"eu\" analyzes European-style plates. This field is required if using the \"plate\" task You may use multiple datasets by using commas between the country codes. For example, 'au,auwide' would analyze using both the Australian plate styles. A full list of supported country codes can be found here https://github.com/openalpr/openalpr/tree/master/runtime_data/config (required)
:param int recognize_vehicle: If set to 1, the vehicle will also be recognized in the image This requires an additional credit per request
:param str state: Corresponds to a US state or EU country code used by OpenALPR pattern recognition. For example, using \"md\" matches US plates against the Maryland plate patterns. Using \"fr\" matches European plates against the French plate patterns.
:param int return_image: If set to 1, the image you uploaded will be encoded in base64 and sent back along with the response
:param int topn: The number of results you would like to be returned for plate candidates and vehicle classifications
:param str prewarp: Prewarp configuration is used to calibrate the analyses for the angle of a particular camera. More information is available here http://doc.openalpr.com/accuracy_improvements.html#calibration
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
|
Send an image for OpenALPR to analyze and provide metadata back The image is sent as a URL. The OpenALPR service will download the image and process it
|
[
"Send",
"an",
"image",
"for",
"OpenALPR",
"to",
"analyze",
"and",
"provide",
"metadata",
"back",
"The",
"image",
"is",
"sent",
"as",
"a",
"URL",
".",
"The",
"OpenALPR",
"service",
"will",
"download",
"the",
"image",
"and",
"process",
"it"
] |
def recognize_url_with_http_info(self, image_url, secret_key, country, **kwargs):
"""
Send an image for OpenALPR to analyze and provide metadata back The image is sent as a URL. The OpenALPR service will download the image and process it
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.recognize_url_with_http_info(image_url, secret_key, country, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_url: A URL to an image that you wish to analyze (required)
:param str secret_key: The secret key used to authenticate your account. You can view your secret key by visiting https://cloud.openalpr.com/ (required)
:param str country: Defines the training data used by OpenALPR. \"us\" analyzes North-American style plates. \"eu\" analyzes European-style plates. This field is required if using the \"plate\" task You may use multiple datasets by using commas between the country codes. For example, 'au,auwide' would analyze using both the Australian plate styles. A full list of supported country codes can be found here https://github.com/openalpr/openalpr/tree/master/runtime_data/config (required)
:param int recognize_vehicle: If set to 1, the vehicle will also be recognized in the image This requires an additional credit per request
:param str state: Corresponds to a US state or EU country code used by OpenALPR pattern recognition. For example, using \"md\" matches US plates against the Maryland plate patterns. Using \"fr\" matches European plates against the French plate patterns.
:param int return_image: If set to 1, the image you uploaded will be encoded in base64 and sent back along with the response
:param int topn: The number of results you would like to be returned for plate candidates and vehicle classifications
:param str prewarp: Prewarp configuration is used to calibrate the analyses for the angle of a particular camera. More information is available here http://doc.openalpr.com/accuracy_improvements.html#calibration
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['image_url', 'secret_key', 'country', 'recognize_vehicle', 'state', 'return_image', 'topn', 'prewarp']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method recognize_url" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'image_url' is set
if ('image_url' not in params) or (params['image_url'] is None):
raise ValueError("Missing the required parameter `image_url` when calling `recognize_url`")
# verify the required parameter 'secret_key' is set
if ('secret_key' not in params) or (params['secret_key'] is None):
raise ValueError("Missing the required parameter `secret_key` when calling `recognize_url`")
# verify the required parameter 'country' is set
if ('country' not in params) or (params['country'] is None):
raise ValueError("Missing the required parameter `country` when calling `recognize_url`")
if 'topn' in params and params['topn'] > 1000.0:
raise ValueError("Invalid value for parameter `topn` when calling `recognize_url`, must be a value less than or equal to `1000.0`")
if 'topn' in params and params['topn'] < 1.0:
raise ValueError("Invalid value for parameter `topn` when calling `recognize_url`, must be a value greater than or equal to `1.0`")
resource_path = '/recognize_url'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'image_url' in params:
query_params['image_url'] = params['image_url']
if 'secret_key' in params:
query_params['secret_key'] = params['secret_key']
if 'recognize_vehicle' in params:
query_params['recognize_vehicle'] = params['recognize_vehicle']
if 'country' in params:
query_params['country'] = params['country']
if 'state' in params:
query_params['state'] = params['state']
if 'return_image' in params:
query_params['return_image'] = params['return_image']
if 'topn' in params:
query_params['topn'] = params['topn']
if 'prewarp' in params:
query_params['prewarp'] = params['prewarp']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
|
[
"def",
"recognize_url_with_http_info",
"(",
"self",
",",
"image_url",
",",
"secret_key",
",",
"country",
",",
"*",
"*",
"kwargs",
")",
":",
"all_params",
"=",
"[",
"'image_url'",
",",
"'secret_key'",
",",
"'country'",
",",
"'recognize_vehicle'",
",",
"'state'",
",",
"'return_image'",
",",
"'topn'",
",",
"'prewarp'",
"]",
"all_params",
".",
"append",
"(",
"'callback'",
")",
"all_params",
".",
"append",
"(",
"'_return_http_data_only'",
")",
"params",
"=",
"locals",
"(",
")",
"for",
"key",
",",
"val",
"in",
"iteritems",
"(",
"params",
"[",
"'kwargs'",
"]",
")",
":",
"if",
"key",
"not",
"in",
"all_params",
":",
"raise",
"TypeError",
"(",
"\"Got an unexpected keyword argument '%s'\"",
"\" to method recognize_url\"",
"%",
"key",
")",
"params",
"[",
"key",
"]",
"=",
"val",
"del",
"params",
"[",
"'kwargs'",
"]",
"# verify the required parameter 'image_url' is set",
"if",
"(",
"'image_url'",
"not",
"in",
"params",
")",
"or",
"(",
"params",
"[",
"'image_url'",
"]",
"is",
"None",
")",
":",
"raise",
"ValueError",
"(",
"\"Missing the required parameter `image_url` when calling `recognize_url`\"",
")",
"# verify the required parameter 'secret_key' is set",
"if",
"(",
"'secret_key'",
"not",
"in",
"params",
")",
"or",
"(",
"params",
"[",
"'secret_key'",
"]",
"is",
"None",
")",
":",
"raise",
"ValueError",
"(",
"\"Missing the required parameter `secret_key` when calling `recognize_url`\"",
")",
"# verify the required parameter 'country' is set",
"if",
"(",
"'country'",
"not",
"in",
"params",
")",
"or",
"(",
"params",
"[",
"'country'",
"]",
"is",
"None",
")",
":",
"raise",
"ValueError",
"(",
"\"Missing the required parameter `country` when calling `recognize_url`\"",
")",
"if",
"'topn'",
"in",
"params",
"and",
"params",
"[",
"'topn'",
"]",
">",
"1000.0",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for parameter `topn` when calling `recognize_url`, must be a value less than or equal to `1000.0`\"",
")",
"if",
"'topn'",
"in",
"params",
"and",
"params",
"[",
"'topn'",
"]",
"<",
"1.0",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for parameter `topn` when calling `recognize_url`, must be a value greater than or equal to `1.0`\"",
")",
"resource_path",
"=",
"'/recognize_url'",
".",
"replace",
"(",
"'{format}'",
",",
"'json'",
")",
"path_params",
"=",
"{",
"}",
"query_params",
"=",
"{",
"}",
"if",
"'image_url'",
"in",
"params",
":",
"query_params",
"[",
"'image_url'",
"]",
"=",
"params",
"[",
"'image_url'",
"]",
"if",
"'secret_key'",
"in",
"params",
":",
"query_params",
"[",
"'secret_key'",
"]",
"=",
"params",
"[",
"'secret_key'",
"]",
"if",
"'recognize_vehicle'",
"in",
"params",
":",
"query_params",
"[",
"'recognize_vehicle'",
"]",
"=",
"params",
"[",
"'recognize_vehicle'",
"]",
"if",
"'country'",
"in",
"params",
":",
"query_params",
"[",
"'country'",
"]",
"=",
"params",
"[",
"'country'",
"]",
"if",
"'state'",
"in",
"params",
":",
"query_params",
"[",
"'state'",
"]",
"=",
"params",
"[",
"'state'",
"]",
"if",
"'return_image'",
"in",
"params",
":",
"query_params",
"[",
"'return_image'",
"]",
"=",
"params",
"[",
"'return_image'",
"]",
"if",
"'topn'",
"in",
"params",
":",
"query_params",
"[",
"'topn'",
"]",
"=",
"params",
"[",
"'topn'",
"]",
"if",
"'prewarp'",
"in",
"params",
":",
"query_params",
"[",
"'prewarp'",
"]",
"=",
"params",
"[",
"'prewarp'",
"]",
"header_params",
"=",
"{",
"}",
"form_params",
"=",
"[",
"]",
"local_var_files",
"=",
"{",
"}",
"body_params",
"=",
"None",
"# HTTP header `Accept`",
"header_params",
"[",
"'Accept'",
"]",
"=",
"self",
".",
"api_client",
".",
"select_header_accept",
"(",
"[",
"'application/json'",
"]",
")",
"if",
"not",
"header_params",
"[",
"'Accept'",
"]",
":",
"del",
"header_params",
"[",
"'Accept'",
"]",
"# HTTP header `Content-Type`",
"header_params",
"[",
"'Content-Type'",
"]",
"=",
"self",
".",
"api_client",
".",
"select_header_content_type",
"(",
"[",
"'application/json'",
"]",
")",
"# Authentication setting",
"auth_settings",
"=",
"[",
"]",
"return",
"self",
".",
"api_client",
".",
"call_api",
"(",
"resource_path",
",",
"'POST'",
",",
"path_params",
",",
"query_params",
",",
"header_params",
",",
"body",
"=",
"body_params",
",",
"post_params",
"=",
"form_params",
",",
"files",
"=",
"local_var_files",
",",
"response_type",
"=",
"'InlineResponse200'",
",",
"auth_settings",
"=",
"auth_settings",
",",
"callback",
"=",
"params",
".",
"get",
"(",
"'callback'",
")",
",",
"_return_http_data_only",
"=",
"params",
".",
"get",
"(",
"'_return_http_data_only'",
")",
")"
] |
https://github.com/ShreyAmbesh/Traffic-Rule-Violation-Detection-System/blob/ae0c327ce014ce6a427da920b5798a0d4bbf001e/openalpr_api/apis/default_api.py#L372-L478
|
|
mozillazg/pypy
|
2ff5cd960c075c991389f842c6d59e71cf0cb7d0
|
pypy/module/cpyext/iterator.py
|
python
|
PyCallIter_New
|
(space, w_callable, w_sentinel)
|
return operation.iter_sentinel(space, w_callable, w_sentinel)
|
Return a new iterator. The first parameter, callable, can be any Python
callable object that can be called with no parameters; each call to it should
return the next item in the iteration. When callable returns a value equal to
sentinel, the iteration will be terminated.
|
Return a new iterator. The first parameter, callable, can be any Python
callable object that can be called with no parameters; each call to it should
return the next item in the iteration. When callable returns a value equal to
sentinel, the iteration will be terminated.
|
[
"Return",
"a",
"new",
"iterator",
".",
"The",
"first",
"parameter",
"callable",
"can",
"be",
"any",
"Python",
"callable",
"object",
"that",
"can",
"be",
"called",
"with",
"no",
"parameters",
";",
"each",
"call",
"to",
"it",
"should",
"return",
"the",
"next",
"item",
"in",
"the",
"iteration",
".",
"When",
"callable",
"returns",
"a",
"value",
"equal",
"to",
"sentinel",
"the",
"iteration",
"will",
"be",
"terminated",
"."
] |
def PyCallIter_New(space, w_callable, w_sentinel):
"""Return a new iterator. The first parameter, callable, can be any Python
callable object that can be called with no parameters; each call to it should
return the next item in the iteration. When callable returns a value equal to
sentinel, the iteration will be terminated.
"""
return operation.iter_sentinel(space, w_callable, w_sentinel)
|
[
"def",
"PyCallIter_New",
"(",
"space",
",",
"w_callable",
",",
"w_sentinel",
")",
":",
"return",
"operation",
".",
"iter_sentinel",
"(",
"space",
",",
"w_callable",
",",
"w_sentinel",
")"
] |
https://github.com/mozillazg/pypy/blob/2ff5cd960c075c991389f842c6d59e71cf0cb7d0/pypy/module/cpyext/iterator.py#L9-L15
|
|
KalleHallden/AutoTimer
|
2d954216700c4930baa154e28dbddc34609af7ce
|
env/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.py
|
python
|
StringStart.parseImpl
|
( self, instring, loc, doActions=True )
|
return loc, []
|
[] |
def parseImpl( self, instring, loc, doActions=True ):
if loc != 0:
# see if entire string up to here is just whitespace and ignoreables
if loc != self.preParse( instring, 0 ):
raise ParseException(instring, loc, self.errmsg, self)
return loc, []
|
[
"def",
"parseImpl",
"(",
"self",
",",
"instring",
",",
"loc",
",",
"doActions",
"=",
"True",
")",
":",
"if",
"loc",
"!=",
"0",
":",
"# see if entire string up to here is just whitespace and ignoreables",
"if",
"loc",
"!=",
"self",
".",
"preParse",
"(",
"instring",
",",
"0",
")",
":",
"raise",
"ParseException",
"(",
"instring",
",",
"loc",
",",
"self",
".",
"errmsg",
",",
"self",
")",
"return",
"loc",
",",
"[",
"]"
] |
https://github.com/KalleHallden/AutoTimer/blob/2d954216700c4930baa154e28dbddc34609af7ce/env/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.py#L3188-L3193
|
|||
MontrealCorpusTools/Montreal-Forced-Aligner
|
63473f9a4fabd31eec14e1e5022882f85cfdaf31
|
montreal_forced_aligner/helper.py
|
python
|
TerminalPrinter.print_sub_header
|
(self, header: str)
|
Print a subsection header
Parameters
----------
header: str
Subsection header string
|
Print a subsection header
|
[
"Print",
"a",
"subsection",
"header"
] |
def print_sub_header(self, header: str) -> None:
"""
Print a subsection header
Parameters
----------
header: str
Subsection header string
"""
underline = "=" * len(header)
print(self.indent_string + self.colorize(header, "bright"))
print(self.indent_string + self.colorize(underline, "bright"))
print()
self.indent_level += 1
|
[
"def",
"print_sub_header",
"(",
"self",
",",
"header",
":",
"str",
")",
"->",
"None",
":",
"underline",
"=",
"\"=\"",
"*",
"len",
"(",
"header",
")",
"print",
"(",
"self",
".",
"indent_string",
"+",
"self",
".",
"colorize",
"(",
"header",
",",
"\"bright\"",
")",
")",
"print",
"(",
"self",
".",
"indent_string",
"+",
"self",
".",
"colorize",
"(",
"underline",
",",
"\"bright\"",
")",
")",
"print",
"(",
")",
"self",
".",
"indent_level",
"+=",
"1"
] |
https://github.com/MontrealCorpusTools/Montreal-Forced-Aligner/blob/63473f9a4fabd31eec14e1e5022882f85cfdaf31/montreal_forced_aligner/helper.py#L205-L218
|
||
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/polys/rootisolation.py
|
python
|
_reverse_intervals
|
(intervals)
|
return [ ((b, a), indices, f) for (a, b), indices, f in reversed(intervals) ]
|
Reverse intervals for traversal from right to left and from top to bottom.
|
Reverse intervals for traversal from right to left and from top to bottom.
|
[
"Reverse",
"intervals",
"for",
"traversal",
"from",
"right",
"to",
"left",
"and",
"from",
"top",
"to",
"bottom",
"."
] |
def _reverse_intervals(intervals):
"""Reverse intervals for traversal from right to left and from top to bottom. """
return [ ((b, a), indices, f) for (a, b), indices, f in reversed(intervals) ]
|
[
"def",
"_reverse_intervals",
"(",
"intervals",
")",
":",
"return",
"[",
"(",
"(",
"b",
",",
"a",
")",
",",
"indices",
",",
"f",
")",
"for",
"(",
"a",
",",
"b",
")",
",",
"indices",
",",
"f",
"in",
"reversed",
"(",
"intervals",
")",
"]"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/polys/rootisolation.py#L1182-L1184
|
|
smart-mobile-software/gitstack
|
d9fee8f414f202143eb6e620529e8e5539a2af56
|
python/Lib/email/mime/audio.py
|
python
|
MIMEAudio.__init__
|
(self, _audiodata, _subtype=None,
_encoder=encoders.encode_base64, **_params)
|
Create an audio/* type MIME document.
_audiodata is a string containing the raw audio data. If this data
can be decoded by the standard Python `sndhdr' module, then the
subtype will be automatically included in the Content-Type header.
Otherwise, you can specify the specific audio subtype via the
_subtype parameter. If _subtype is not given, and no subtype can be
guessed, a TypeError is raised.
_encoder is a function which will perform the actual encoding for
transport of the image data. It takes one argument, which is this
Image instance. It should use get_payload() and set_payload() to
change the payload to the encoded form. It should also add any
Content-Transfer-Encoding or other headers to the message as
necessary. The default encoding is Base64.
Any additional keyword arguments are passed to the base class
constructor, which turns them into parameters on the Content-Type
header.
|
Create an audio/* type MIME document.
|
[
"Create",
"an",
"audio",
"/",
"*",
"type",
"MIME",
"document",
"."
] |
def __init__(self, _audiodata, _subtype=None,
_encoder=encoders.encode_base64, **_params):
"""Create an audio/* type MIME document.
_audiodata is a string containing the raw audio data. If this data
can be decoded by the standard Python `sndhdr' module, then the
subtype will be automatically included in the Content-Type header.
Otherwise, you can specify the specific audio subtype via the
_subtype parameter. If _subtype is not given, and no subtype can be
guessed, a TypeError is raised.
_encoder is a function which will perform the actual encoding for
transport of the image data. It takes one argument, which is this
Image instance. It should use get_payload() and set_payload() to
change the payload to the encoded form. It should also add any
Content-Transfer-Encoding or other headers to the message as
necessary. The default encoding is Base64.
Any additional keyword arguments are passed to the base class
constructor, which turns them into parameters on the Content-Type
header.
"""
if _subtype is None:
_subtype = _whatsnd(_audiodata)
if _subtype is None:
raise TypeError('Could not find audio MIME subtype')
MIMENonMultipart.__init__(self, 'audio', _subtype, **_params)
self.set_payload(_audiodata)
_encoder(self)
|
[
"def",
"__init__",
"(",
"self",
",",
"_audiodata",
",",
"_subtype",
"=",
"None",
",",
"_encoder",
"=",
"encoders",
".",
"encode_base64",
",",
"*",
"*",
"_params",
")",
":",
"if",
"_subtype",
"is",
"None",
":",
"_subtype",
"=",
"_whatsnd",
"(",
"_audiodata",
")",
"if",
"_subtype",
"is",
"None",
":",
"raise",
"TypeError",
"(",
"'Could not find audio MIME subtype'",
")",
"MIMENonMultipart",
".",
"__init__",
"(",
"self",
",",
"'audio'",
",",
"_subtype",
",",
"*",
"*",
"_params",
")",
"self",
".",
"set_payload",
"(",
"_audiodata",
")",
"_encoder",
"(",
"self",
")"
] |
https://github.com/smart-mobile-software/gitstack/blob/d9fee8f414f202143eb6e620529e8e5539a2af56/python/Lib/email/mime/audio.py#L45-L73
|
||
tensorflow/models
|
6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3
|
official/vision/utils/object_detection/visualization_utils.py
|
python
|
save_image_array_as_png
|
(image, output_path)
|
Saves an image (represented as a numpy array) to PNG.
Args:
image: a numpy array with shape [height, width, 3].
output_path: path to which image should be written.
|
Saves an image (represented as a numpy array) to PNG.
|
[
"Saves",
"an",
"image",
"(",
"represented",
"as",
"a",
"numpy",
"array",
")",
"to",
"PNG",
"."
] |
def save_image_array_as_png(image, output_path):
"""Saves an image (represented as a numpy array) to PNG.
Args:
image: a numpy array with shape [height, width, 3].
output_path: path to which image should be written.
"""
image_pil = Image.fromarray(np.uint8(image)).convert('RGB')
with tf.io.gfile.GFile(output_path, 'w') as fid:
image_pil.save(fid, 'PNG')
|
[
"def",
"save_image_array_as_png",
"(",
"image",
",",
"output_path",
")",
":",
"image_pil",
"=",
"Image",
".",
"fromarray",
"(",
"np",
".",
"uint8",
"(",
"image",
")",
")",
".",
"convert",
"(",
"'RGB'",
")",
"with",
"tf",
".",
"io",
".",
"gfile",
".",
"GFile",
"(",
"output_path",
",",
"'w'",
")",
"as",
"fid",
":",
"image_pil",
".",
"save",
"(",
"fid",
",",
"'PNG'",
")"
] |
https://github.com/tensorflow/models/blob/6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3/official/vision/utils/object_detection/visualization_utils.py#L69-L78
|
||
m-labs/artiq
|
eaa1505c947c7987cdbd31c24056823c740e84e0
|
artiq/coredevice/rtio.py
|
python
|
rtio_input_timestamped_data
|
(timeout_mu: TInt64,
channel: TInt32)
|
Wait for an input event up to timeout_mu on the given channel, and
return a tuple of timestamp and attached data, or (-1, 0) if the timeout is
reached.
|
Wait for an input event up to timeout_mu on the given channel, and
return a tuple of timestamp and attached data, or (-1, 0) if the timeout is
reached.
|
[
"Wait",
"for",
"an",
"input",
"event",
"up",
"to",
"timeout_mu",
"on",
"the",
"given",
"channel",
"and",
"return",
"a",
"tuple",
"of",
"timestamp",
"and",
"attached",
"data",
"or",
"(",
"-",
"1",
"0",
")",
"if",
"the",
"timeout",
"is",
"reached",
"."
] |
def rtio_input_timestamped_data(timeout_mu: TInt64,
channel: TInt32) -> TTuple([TInt64, TInt32]):
"""Wait for an input event up to timeout_mu on the given channel, and
return a tuple of timestamp and attached data, or (-1, 0) if the timeout is
reached."""
raise NotImplementedError("syscall not simulated")
|
[
"def",
"rtio_input_timestamped_data",
"(",
"timeout_mu",
":",
"TInt64",
",",
"channel",
":",
"TInt32",
")",
"->",
"TTuple",
"(",
"[",
"TInt64",
",",
"TInt32",
"]",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"syscall not simulated\"",
")"
] |
https://github.com/m-labs/artiq/blob/eaa1505c947c7987cdbd31c24056823c740e84e0/artiq/coredevice/rtio.py#L26-L31
|
||
psychopy/psychopy
|
01b674094f38d0e0bd51c45a6f66f671d7041696
|
psychopy/visual/window.py
|
python
|
Window.waitBlanking
|
(self, value)
|
After a call to :py:attr:`~Window.flip()` should we wait for the
blank before the script continues.
|
After a call to :py:attr:`~Window.flip()` should we wait for the
blank before the script continues.
|
[
"After",
"a",
"call",
"to",
":",
"py",
":",
"attr",
":",
"~Window",
".",
"flip",
"()",
"should",
"we",
"wait",
"for",
"the",
"blank",
"before",
"the",
"script",
"continues",
"."
] |
def waitBlanking(self, value):
"""After a call to :py:attr:`~Window.flip()` should we wait for the
blank before the script continues.
"""
self.__dict__['waitBlanking'] = value
|
[
"def",
"waitBlanking",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"__dict__",
"[",
"'waitBlanking'",
"]",
"=",
"value"
] |
https://github.com/psychopy/psychopy/blob/01b674094f38d0e0bd51c45a6f66f671d7041696/psychopy/visual/window.py#L699-L704
|
||
llSourcell/AI_Artist
|
3038c06c2e389b9c919c881c9a169efe2fd7810e
|
lib/python2.7/site-packages/pip/vcs/git.py
|
python
|
Git.check_version
|
(self, dest, rev_options)
|
return self.get_revision(dest).startswith(rev_options[0])
|
Compare the current sha to the ref. ref may be a branch or tag name,
but current rev will always point to a sha. This means that a branch
or tag will never compare as True. So this ultimately only matches
against exact shas.
|
Compare the current sha to the ref. ref may be a branch or tag name,
but current rev will always point to a sha. This means that a branch
or tag will never compare as True. So this ultimately only matches
against exact shas.
|
[
"Compare",
"the",
"current",
"sha",
"to",
"the",
"ref",
".",
"ref",
"may",
"be",
"a",
"branch",
"or",
"tag",
"name",
"but",
"current",
"rev",
"will",
"always",
"point",
"to",
"a",
"sha",
".",
"This",
"means",
"that",
"a",
"branch",
"or",
"tag",
"will",
"never",
"compare",
"as",
"True",
".",
"So",
"this",
"ultimately",
"only",
"matches",
"against",
"exact",
"shas",
"."
] |
def check_version(self, dest, rev_options):
"""
Compare the current sha to the ref. ref may be a branch or tag name,
but current rev will always point to a sha. This means that a branch
or tag will never compare as True. So this ultimately only matches
against exact shas.
"""
return self.get_revision(dest).startswith(rev_options[0])
|
[
"def",
"check_version",
"(",
"self",
",",
"dest",
",",
"rev_options",
")",
":",
"return",
"self",
".",
"get_revision",
"(",
"dest",
")",
".",
"startswith",
"(",
"rev_options",
"[",
"0",
"]",
")"
] |
https://github.com/llSourcell/AI_Artist/blob/3038c06c2e389b9c919c881c9a169efe2fd7810e/lib/python2.7/site-packages/pip/vcs/git.py#L85-L92
|
|
bruderstein/PythonScript
|
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
|
PythonLib/min/pathlib.py
|
python
|
Path._make_child_relpath
|
(self, part)
|
return self._from_parsed_parts(self._drv, self._root, parts)
|
[] |
def _make_child_relpath(self, part):
# This is an optimization used for dir walking. `part` must be
# a single part relative to this path.
parts = self._parts + [part]
return self._from_parsed_parts(self._drv, self._root, parts)
|
[
"def",
"_make_child_relpath",
"(",
"self",
",",
"part",
")",
":",
"# This is an optimization used for dir walking. `part` must be",
"# a single part relative to this path.",
"parts",
"=",
"self",
".",
"_parts",
"+",
"[",
"part",
"]",
"return",
"self",
".",
"_from_parsed_parts",
"(",
"self",
".",
"_drv",
",",
"self",
".",
"_root",
",",
"parts",
")"
] |
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/min/pathlib.py#L964-L968
|
|||
projecthamster/hamster
|
19d160090de30e756bdc3122ff935bdaa86e2843
|
waflib/Tools/ifort.py
|
python
|
configure
|
(conf)
|
Detects the Intel Fortran compilers
|
Detects the Intel Fortran compilers
|
[
"Detects",
"the",
"Intel",
"Fortran",
"compilers"
] |
def configure(conf):
"""
Detects the Intel Fortran compilers
"""
if Utils.is_win32:
compiler, version, path, includes, libdirs, arch = conf.detect_ifort()
v = conf.env
v.DEST_CPU = arch
v.PATH = path
v.INCLUDES = includes
v.LIBPATH = libdirs
v.MSVC_COMPILER = compiler
try:
v.MSVC_VERSION = float(version)
except ValueError:
v.MSVC_VERSION = float(version[:-3])
conf.find_ifort_win32()
conf.ifort_modifier_win32()
else:
conf.find_ifort()
conf.find_program('xiar', var='AR')
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.ifort_modifier_platform()
|
[
"def",
"configure",
"(",
"conf",
")",
":",
"if",
"Utils",
".",
"is_win32",
":",
"compiler",
",",
"version",
",",
"path",
",",
"includes",
",",
"libdirs",
",",
"arch",
"=",
"conf",
".",
"detect_ifort",
"(",
")",
"v",
"=",
"conf",
".",
"env",
"v",
".",
"DEST_CPU",
"=",
"arch",
"v",
".",
"PATH",
"=",
"path",
"v",
".",
"INCLUDES",
"=",
"includes",
"v",
".",
"LIBPATH",
"=",
"libdirs",
"v",
".",
"MSVC_COMPILER",
"=",
"compiler",
"try",
":",
"v",
".",
"MSVC_VERSION",
"=",
"float",
"(",
"version",
")",
"except",
"ValueError",
":",
"v",
".",
"MSVC_VERSION",
"=",
"float",
"(",
"version",
"[",
":",
"-",
"3",
"]",
")",
"conf",
".",
"find_ifort_win32",
"(",
")",
"conf",
".",
"ifort_modifier_win32",
"(",
")",
"else",
":",
"conf",
".",
"find_ifort",
"(",
")",
"conf",
".",
"find_program",
"(",
"'xiar'",
",",
"var",
"=",
"'AR'",
")",
"conf",
".",
"find_ar",
"(",
")",
"conf",
".",
"fc_flags",
"(",
")",
"conf",
".",
"fc_add_flags",
"(",
")",
"conf",
".",
"ifort_modifier_platform",
"(",
")"
] |
https://github.com/projecthamster/hamster/blob/19d160090de30e756bdc3122ff935bdaa86e2843/waflib/Tools/ifort.py#L74-L99
|
||
petercorke/robotics-toolbox-python
|
51aa8bbb3663a7c815f9880d538d61e7c85bc470
|
roboticstoolbox/robot/ETS.py
|
python
|
BaseETS.inv
|
(self)
|
return inv
|
r"""
Inverse of ETS
:return: [description]
:rtype: ETS instance
The inverse of a given ETS. It is computed as the inverse of the
individual ETs in the reverse order.
.. math::
(\mathbf{E}_0, \mathbf{E}_1 \cdots \mathbf{E}_{n-1} )^{-1} = (\mathbf{E}_{n-1}^{-1}, \mathbf{E}_{n-2}^{-1} \cdots \mathbf{E}_0^{-1}{n-1} )
Example:
.. runblock:: pycon
>>> from roboticstoolbox import ETS
>>> e = ETS.rz(j=2) * ETS.tx(1) * ETS.rx(j=3,flip=True) * ETS.tx(1)
>>> print(e)
>>> print(e.inv())
>>> q = [1,2,3,4]
>>> print(e.eval(q) * e.inv().eval(q))
.. note:: It is essential to use explicit joint indices to account for
the reversed order of the transforms.
|
r"""
Inverse of ETS
|
[
"r",
"Inverse",
"of",
"ETS"
] |
def inv(self):
r"""
Inverse of ETS
:return: [description]
:rtype: ETS instance
The inverse of a given ETS. It is computed as the inverse of the
individual ETs in the reverse order.
.. math::
(\mathbf{E}_0, \mathbf{E}_1 \cdots \mathbf{E}_{n-1} )^{-1} = (\mathbf{E}_{n-1}^{-1}, \mathbf{E}_{n-2}^{-1} \cdots \mathbf{E}_0^{-1}{n-1} )
Example:
.. runblock:: pycon
>>> from roboticstoolbox import ETS
>>> e = ETS.rz(j=2) * ETS.tx(1) * ETS.rx(j=3,flip=True) * ETS.tx(1)
>>> print(e)
>>> print(e.inv())
>>> q = [1,2,3,4]
>>> print(e.eval(q) * e.inv().eval(q))
.. note:: It is essential to use explicit joint indices to account for
the reversed order of the transforms.
""" # noqa
inv = self.__class__()
for ns in reversed(self.data):
# get the namespace from the list
# clone it, and invert the elements to create an inverse
nsi = copy.copy(ns)
if nsi.joint:
nsi.flip ^= True # toggle flip status
elif nsi.axis[0] == 'C':
nsi.T = self._inverse(nsi.T)
elif nsi.eta is not None:
nsi.T = self._inverse(nsi.T)
nsi.eta = -nsi.eta
et = self.__class__() # create a new ETS instance
et.data = [nsi] # set its data from the dict
inv *= et
return inv
|
[
"def",
"inv",
"(",
"self",
")",
":",
"# noqa",
"inv",
"=",
"self",
".",
"__class__",
"(",
")",
"for",
"ns",
"in",
"reversed",
"(",
"self",
".",
"data",
")",
":",
"# get the namespace from the list",
"# clone it, and invert the elements to create an inverse",
"nsi",
"=",
"copy",
".",
"copy",
"(",
"ns",
")",
"if",
"nsi",
".",
"joint",
":",
"nsi",
".",
"flip",
"^=",
"True",
"# toggle flip status",
"elif",
"nsi",
".",
"axis",
"[",
"0",
"]",
"==",
"'C'",
":",
"nsi",
".",
"T",
"=",
"self",
".",
"_inverse",
"(",
"nsi",
".",
"T",
")",
"elif",
"nsi",
".",
"eta",
"is",
"not",
"None",
":",
"nsi",
".",
"T",
"=",
"self",
".",
"_inverse",
"(",
"nsi",
".",
"T",
")",
"nsi",
".",
"eta",
"=",
"-",
"nsi",
".",
"eta",
"et",
"=",
"self",
".",
"__class__",
"(",
")",
"# create a new ETS instance",
"et",
".",
"data",
"=",
"[",
"nsi",
"]",
"# set its data from the dict",
"inv",
"*=",
"et",
"return",
"inv"
] |
https://github.com/petercorke/robotics-toolbox-python/blob/51aa8bbb3663a7c815f9880d538d61e7c85bc470/roboticstoolbox/robot/ETS.py#L885-L930
|
|
Komodo/KomodoEdit
|
61edab75dce2bdb03943b387b0608ea36f548e8e
|
contrib/pywin/wnd-0-1-20/controls/radiobox.py
|
python
|
RadioboxMethods.SetStyle
|
(self, *styles)
|
Sets the style for the checkbox.
Same as the SetStyle method for other controls, except
The styles 'radiobutton', 'autoradiobutton'
are mutually exclusive. You can not use the flags '-' and
'~' on them.
|
Sets the style for the checkbox.
Same as the SetStyle method for other controls, except
The styles 'radiobutton', 'autoradiobutton'
are mutually exclusive. You can not use the flags '-' and
'~' on them.
|
[
"Sets",
"the",
"style",
"for",
"the",
"checkbox",
".",
"Same",
"as",
"the",
"SetStyle",
"method",
"for",
"other",
"controls",
"except",
"The",
"styles",
"radiobutton",
"autoradiobutton",
"are",
"mutually",
"exclusive",
".",
"You",
"can",
"not",
"use",
"the",
"flags",
"-",
"and",
"~",
"on",
"them",
"."
] |
def SetStyle(self, *styles):
"""Sets the style for the checkbox.
Same as the SetStyle method for other controls, except
The styles 'radiobutton', 'autoradiobutton'
are mutually exclusive. You can not use the flags '-' and
'~' on them.
"""
out = []
st=('radiobutton', 'autoradiobutton')
for i in styles:
if i in st:
if i=='radiobutton': style = 4
elif i=='autoradiobutton': style = 9
self.SendMessage(self.Hwnd, self.Msg.BM_SETSTYLE, style, 1)
else:
out.append(i)
if out:
ControlMethods.SetStyle(self, *out)
|
[
"def",
"SetStyle",
"(",
"self",
",",
"*",
"styles",
")",
":",
"out",
"=",
"[",
"]",
"st",
"=",
"(",
"'radiobutton'",
",",
"'autoradiobutton'",
")",
"for",
"i",
"in",
"styles",
":",
"if",
"i",
"in",
"st",
":",
"if",
"i",
"==",
"'radiobutton'",
":",
"style",
"=",
"4",
"elif",
"i",
"==",
"'autoradiobutton'",
":",
"style",
"=",
"9",
"self",
".",
"SendMessage",
"(",
"self",
".",
"Hwnd",
",",
"self",
".",
"Msg",
".",
"BM_SETSTYLE",
",",
"style",
",",
"1",
")",
"else",
":",
"out",
".",
"append",
"(",
"i",
")",
"if",
"out",
":",
"ControlMethods",
".",
"SetStyle",
"(",
"self",
",",
"*",
"out",
")"
] |
https://github.com/Komodo/KomodoEdit/blob/61edab75dce2bdb03943b387b0608ea36f548e8e/contrib/pywin/wnd-0-1-20/controls/radiobox.py#L155-L172
|
||
saltstack/halite
|
1eacc7e3f51b670573aecb8980661e0c5dbfa0f4
|
halite/bottle.py
|
python
|
ResourceManager.add_path
|
(self, path, base=None, index=None, create=False)
|
return os.path.exists(path)
|
Add a new path to the list of search paths. Return False if the
path does not exist.
:param path: The new search path. Relative paths are turned into
an absolute and normalized form. If the path looks like a file
(not ending in `/`), the filename is stripped off.
:param base: Path used to absolutize relative search paths.
Defaults to :attr:`base` which defaults to ``os.getcwd()``.
:param index: Position within the list of search paths. Defaults
to last index (appends to the list).
The `base` parameter makes it easy to reference files installed
along with a python module or package::
res.add_path('./resources/', __file__)
|
Add a new path to the list of search paths. Return False if the
path does not exist.
|
[
"Add",
"a",
"new",
"path",
"to",
"the",
"list",
"of",
"search",
"paths",
".",
"Return",
"False",
"if",
"the",
"path",
"does",
"not",
"exist",
"."
] |
def add_path(self, path, base=None, index=None, create=False):
''' Add a new path to the list of search paths. Return False if the
path does not exist.
:param path: The new search path. Relative paths are turned into
an absolute and normalized form. If the path looks like a file
(not ending in `/`), the filename is stripped off.
:param base: Path used to absolutize relative search paths.
Defaults to :attr:`base` which defaults to ``os.getcwd()``.
:param index: Position within the list of search paths. Defaults
to last index (appends to the list).
The `base` parameter makes it easy to reference files installed
along with a python module or package::
res.add_path('./resources/', __file__)
'''
base = os.path.abspath(os.path.dirname(base or self.base))
path = os.path.abspath(os.path.join(base, os.path.dirname(path)))
path += os.sep
if path in self.path:
self.path.remove(path)
if create and not os.path.isdir(path):
os.makedirs(path)
if index is None:
self.path.append(path)
else:
self.path.insert(index, path)
self.cache.clear()
return os.path.exists(path)
|
[
"def",
"add_path",
"(",
"self",
",",
"path",
",",
"base",
"=",
"None",
",",
"index",
"=",
"None",
",",
"create",
"=",
"False",
")",
":",
"base",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"base",
"or",
"self",
".",
"base",
")",
")",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"base",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
")",
")",
"path",
"+=",
"os",
".",
"sep",
"if",
"path",
"in",
"self",
".",
"path",
":",
"self",
".",
"path",
".",
"remove",
"(",
"path",
")",
"if",
"create",
"and",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"os",
".",
"makedirs",
"(",
"path",
")",
"if",
"index",
"is",
"None",
":",
"self",
".",
"path",
".",
"append",
"(",
"path",
")",
"else",
":",
"self",
".",
"path",
".",
"insert",
"(",
"index",
",",
"path",
")",
"self",
".",
"cache",
".",
"clear",
"(",
")",
"return",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")"
] |
https://github.com/saltstack/halite/blob/1eacc7e3f51b670573aecb8980661e0c5dbfa0f4/halite/bottle.py#L2175-L2204
|
|
jantman/misc-scripts
|
dba5680bafbc5c5d2d9d4abcc305c57df373cd26
|
get_addons.py
|
python
|
Addongetter.run
|
(self)
|
return True
|
run it
|
run it
|
[
"run",
"it"
] |
def run(self):
""" run it """
failed = 0
updated = 0
total = 0
# bug - doesn't handle versions like "7.07"
"""
res = self.do_elvui()
if res == 3:
logger.error("UPDATE FAILED: ElvUI{d}")
failed += 1
elif res == 1:
logger.info("UPDATED: ElvUI")
updated += 1
if res != 2:
total += 1
"""
addons = self.find_installed_addons()
logger.info("Found {c} installed addons: {a}".format(c=len(addons), a=" ".join(addons)))
for dirname in addons:
res = self.update_addon(dirname)
if res == 3:
logger.error("UPDATE FAILED: {d}".format(d=dirname))
failed += 1
elif res == 1:
logger.info("UPDATED: {d}".format(d=dirname))
updated += 1
if res != 2:
total += 1
# other, generic addons
if self.dry_run:
logger.warning("Checked {t} modules; WOULD HAVE updated {u}; {f} failed".format(t=total, u=updated, f=failed))
else:
logger.warning("Checked {t} modules; updated {u}; {f} failed".format(t=total, u=updated, f=failed))
return True
|
[
"def",
"run",
"(",
"self",
")",
":",
"failed",
"=",
"0",
"updated",
"=",
"0",
"total",
"=",
"0",
"# bug - doesn't handle versions like \"7.07\"",
"\"\"\"\n res = self.do_elvui()\n if res == 3:\n logger.error(\"UPDATE FAILED: ElvUI{d}\")\n failed += 1\n elif res == 1:\n logger.info(\"UPDATED: ElvUI\")\n updated += 1\n if res != 2:\n total += 1\n \"\"\"",
"addons",
"=",
"self",
".",
"find_installed_addons",
"(",
")",
"logger",
".",
"info",
"(",
"\"Found {c} installed addons: {a}\"",
".",
"format",
"(",
"c",
"=",
"len",
"(",
"addons",
")",
",",
"a",
"=",
"\" \"",
".",
"join",
"(",
"addons",
")",
")",
")",
"for",
"dirname",
"in",
"addons",
":",
"res",
"=",
"self",
".",
"update_addon",
"(",
"dirname",
")",
"if",
"res",
"==",
"3",
":",
"logger",
".",
"error",
"(",
"\"UPDATE FAILED: {d}\"",
".",
"format",
"(",
"d",
"=",
"dirname",
")",
")",
"failed",
"+=",
"1",
"elif",
"res",
"==",
"1",
":",
"logger",
".",
"info",
"(",
"\"UPDATED: {d}\"",
".",
"format",
"(",
"d",
"=",
"dirname",
")",
")",
"updated",
"+=",
"1",
"if",
"res",
"!=",
"2",
":",
"total",
"+=",
"1",
"# other, generic addons",
"if",
"self",
".",
"dry_run",
":",
"logger",
".",
"warning",
"(",
"\"Checked {t} modules; WOULD HAVE updated {u}; {f} failed\"",
".",
"format",
"(",
"t",
"=",
"total",
",",
"u",
"=",
"updated",
",",
"f",
"=",
"failed",
")",
")",
"else",
":",
"logger",
".",
"warning",
"(",
"\"Checked {t} modules; updated {u}; {f} failed\"",
".",
"format",
"(",
"t",
"=",
"total",
",",
"u",
"=",
"updated",
",",
"f",
"=",
"failed",
")",
")",
"return",
"True"
] |
https://github.com/jantman/misc-scripts/blob/dba5680bafbc5c5d2d9d4abcc305c57df373cd26/get_addons.py#L186-L223
|
|
online-ml/river
|
3732f700da72642afe54095d4b252b05c5018c7d
|
river/tree/splitter/ebst_splitter.py
|
python
|
EBSTSplitter.cond_proba
|
(self, att_val, target_val)
|
Not implemented in regression splitters.
|
Not implemented in regression splitters.
|
[
"Not",
"implemented",
"in",
"regression",
"splitters",
"."
] |
def cond_proba(self, att_val, target_val):
"""Not implemented in regression splitters."""
raise NotImplementedError
|
[
"def",
"cond_proba",
"(",
"self",
",",
"att_val",
",",
"target_val",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/online-ml/river/blob/3732f700da72642afe54095d4b252b05c5018c7d/river/tree/splitter/ebst_splitter.py#L55-L57
|
||
jesse-ai/jesse
|
28759547138fbc76dff12741204833e39c93b083
|
jesse/modes/import_candles_mode/drivers/binance_inverse_futures.py
|
python
|
BinanceInverseFutures.fetch
|
(self, symbol: str, start_timestamp: int)
|
return [{
'id': jh.generate_unique_id(),
'symbol': symbol,
'exchange': self.name,
'timestamp': int(d[0]),
'open': float(d[1]),
'close': float(d[4]),
'high': float(d[2]),
'low': float(d[3]),
'volume': float(d[5])
} for d in data]
|
note1: unlike Bitfinex, Binance does NOT skip candles with volume=0.
note2: like Bitfinex, start_time includes the candle and so does the end_time.
|
note1: unlike Bitfinex, Binance does NOT skip candles with volume=0.
note2: like Bitfinex, start_time includes the candle and so does the end_time.
|
[
"note1",
":",
"unlike",
"Bitfinex",
"Binance",
"does",
"NOT",
"skip",
"candles",
"with",
"volume",
"=",
"0",
".",
"note2",
":",
"like",
"Bitfinex",
"start_time",
"includes",
"the",
"candle",
"and",
"so",
"does",
"the",
"end_time",
"."
] |
def fetch(self, symbol: str, start_timestamp: int) -> Union[list, None]:
"""
note1: unlike Bitfinex, Binance does NOT skip candles with volume=0.
note2: like Bitfinex, start_time includes the candle and so does the end_time.
"""
end_timestamp = start_timestamp + (self.count - 1) * 60000
payload = {
'interval': '1m',
'symbol': encode_symbol(symbol),
'startTime': start_timestamp,
'endTime': end_timestamp,
'limit': self.count,
}
response = requests.get(self.endpoint, params=payload)
# Exchange In Maintenance
if response.status_code == 502:
raise exceptions.ExchangeInMaintenance('ERROR: 502 Bad Gateway. Please try again later')
# unsupported symbol
if response.status_code == 400:
raise ValueError(response.json()['msg'])
if response.status_code != 200:
return
data = response.json()
return [{
'id': jh.generate_unique_id(),
'symbol': symbol,
'exchange': self.name,
'timestamp': int(d[0]),
'open': float(d[1]),
'close': float(d[4]),
'high': float(d[2]),
'low': float(d[3]),
'volume': float(d[5])
} for d in data]
|
[
"def",
"fetch",
"(",
"self",
",",
"symbol",
":",
"str",
",",
"start_timestamp",
":",
"int",
")",
"->",
"Union",
"[",
"list",
",",
"None",
"]",
":",
"end_timestamp",
"=",
"start_timestamp",
"+",
"(",
"self",
".",
"count",
"-",
"1",
")",
"*",
"60000",
"payload",
"=",
"{",
"'interval'",
":",
"'1m'",
",",
"'symbol'",
":",
"encode_symbol",
"(",
"symbol",
")",
",",
"'startTime'",
":",
"start_timestamp",
",",
"'endTime'",
":",
"end_timestamp",
",",
"'limit'",
":",
"self",
".",
"count",
",",
"}",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"endpoint",
",",
"params",
"=",
"payload",
")",
"# Exchange In Maintenance",
"if",
"response",
".",
"status_code",
"==",
"502",
":",
"raise",
"exceptions",
".",
"ExchangeInMaintenance",
"(",
"'ERROR: 502 Bad Gateway. Please try again later'",
")",
"# unsupported symbol",
"if",
"response",
".",
"status_code",
"==",
"400",
":",
"raise",
"ValueError",
"(",
"response",
".",
"json",
"(",
")",
"[",
"'msg'",
"]",
")",
"if",
"response",
".",
"status_code",
"!=",
"200",
":",
"return",
"data",
"=",
"response",
".",
"json",
"(",
")",
"return",
"[",
"{",
"'id'",
":",
"jh",
".",
"generate_unique_id",
"(",
")",
",",
"'symbol'",
":",
"symbol",
",",
"'exchange'",
":",
"self",
".",
"name",
",",
"'timestamp'",
":",
"int",
"(",
"d",
"[",
"0",
"]",
")",
",",
"'open'",
":",
"float",
"(",
"d",
"[",
"1",
"]",
")",
",",
"'close'",
":",
"float",
"(",
"d",
"[",
"4",
"]",
")",
",",
"'high'",
":",
"float",
"(",
"d",
"[",
"2",
"]",
")",
",",
"'low'",
":",
"float",
"(",
"d",
"[",
"3",
"]",
")",
",",
"'volume'",
":",
"float",
"(",
"d",
"[",
"5",
"]",
")",
"}",
"for",
"d",
"in",
"data",
"]"
] |
https://github.com/jesse-ai/jesse/blob/28759547138fbc76dff12741204833e39c93b083/jesse/modes/import_candles_mode/drivers/binance_inverse_futures.py#L49-L88
|
|
apache/libcloud
|
90971e17bfd7b6bb97b2489986472c531cc8e140
|
libcloud/compute/drivers/cloudwatt.py
|
python
|
CloudwattNodeDriver.__init__
|
(
self,
key,
secret,
tenant_id,
secure=True,
tenant_name=None,
host=None,
port=None,
**kwargs,
)
|
@inherits: :class:`NodeDriver.__init__`
:param tenant_id: ID of tenant required for Cloudwatt auth
:type tenant_id: ``str``
|
@inherits: :class:`NodeDriver.__init__`
|
[
"@inherits",
":",
":",
"class",
":",
"NodeDriver",
".",
"__init__"
] |
def __init__(
self,
key,
secret,
tenant_id,
secure=True,
tenant_name=None,
host=None,
port=None,
**kwargs,
):
"""
@inherits: :class:`NodeDriver.__init__`
:param tenant_id: ID of tenant required for Cloudwatt auth
:type tenant_id: ``str``
"""
self.ex_tenant_id = tenant_id
self.extra = {}
super(CloudwattNodeDriver, self).__init__(
key=key, secret=secret, secure=secure, host=host, port=port, **kwargs
)
|
[
"def",
"__init__",
"(",
"self",
",",
"key",
",",
"secret",
",",
"tenant_id",
",",
"secure",
"=",
"True",
",",
"tenant_name",
"=",
"None",
",",
"host",
"=",
"None",
",",
"port",
"=",
"None",
",",
"*",
"*",
"kwargs",
",",
")",
":",
"self",
".",
"ex_tenant_id",
"=",
"tenant_id",
"self",
".",
"extra",
"=",
"{",
"}",
"super",
"(",
"CloudwattNodeDriver",
",",
"self",
")",
".",
"__init__",
"(",
"key",
"=",
"key",
",",
"secret",
"=",
"secret",
",",
"secure",
"=",
"secure",
",",
"host",
"=",
"host",
",",
"port",
"=",
"port",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/apache/libcloud/blob/90971e17bfd7b6bb97b2489986472c531cc8e140/libcloud/compute/drivers/cloudwatt.py#L130-L151
|
||
pcah/python-clean-architecture
|
20630d0b3b4c00f6503a26cc98c45df12bc31b3b
|
pca/data/dao/abstract.py
|
python
|
QueryChain.remove
|
(self)
|
return self._dao._resolve_remove(self)
|
Removes all objects specified by the query from the collection.
|
Removes all objects specified by the query from the collection.
|
[
"Removes",
"all",
"objects",
"specified",
"by",
"the",
"query",
"from",
"the",
"collection",
"."
] |
def remove(self) -> Ids:
"""
Removes all objects specified by the query from the collection.
"""
return self._dao._resolve_remove(self)
|
[
"def",
"remove",
"(",
"self",
")",
"->",
"Ids",
":",
"return",
"self",
".",
"_dao",
".",
"_resolve_remove",
"(",
"self",
")"
] |
https://github.com/pcah/python-clean-architecture/blob/20630d0b3b4c00f6503a26cc98c45df12bc31b3b/pca/data/dao/abstract.py#L134-L138
|
|
python-social-auth/social-core
|
1ea27e8989657bb35dd37b6ee2e038e1358fbc96
|
social_core/backends/google_openidconnect.py
|
python
|
GoogleOpenIdConnect.user_data
|
(self, access_token, *args, **kwargs)
|
return self.get_json(
'https://openidconnect.googleapis.com/v1/userinfo',
params={'access_token': access_token, 'alt': 'json'}
)
|
Return user data from Google API
|
Return user data from Google API
|
[
"Return",
"user",
"data",
"from",
"Google",
"API"
] |
def user_data(self, access_token, *args, **kwargs):
"""Return user data from Google API"""
return self.get_json(
'https://openidconnect.googleapis.com/v1/userinfo',
params={'access_token': access_token, 'alt': 'json'}
)
|
[
"def",
"user_data",
"(",
"self",
",",
"access_token",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"get_json",
"(",
"'https://openidconnect.googleapis.com/v1/userinfo'",
",",
"params",
"=",
"{",
"'access_token'",
":",
"access_token",
",",
"'alt'",
":",
"'json'",
"}",
")"
] |
https://github.com/python-social-auth/social-core/blob/1ea27e8989657bb35dd37b6ee2e038e1358fbc96/social_core/backends/google_openidconnect.py#L16-L21
|
|
tokenika/eosfactory
|
ee00f662872690738a702fc05aca1f1c0c8d4783
|
eosfactory/shell/wallet.py
|
python
|
Wallet.stop
|
(self)
|
Stop keosd, the EOSIO wallet manager.
|
Stop keosd, the EOSIO wallet manager.
|
[
"Stop",
"keosd",
"the",
"EOSIO",
"wallet",
"manager",
"."
] |
def stop(self):
'''Stop keosd, the EOSIO wallet manager.
'''
cleos.WalletStop()
|
[
"def",
"stop",
"(",
"self",
")",
":",
"cleos",
".",
"WalletStop",
"(",
")"
] |
https://github.com/tokenika/eosfactory/blob/ee00f662872690738a702fc05aca1f1c0c8d4783/eosfactory/shell/wallet.py#L313-L316
|
||
spotify/luigi
|
c3b66f4a5fa7eaa52f9a72eb6704b1049035c789
|
luigi/tools/range.py
|
python
|
RangeBase.moving_start
|
(self, now)
|
Returns a datetime from which to ensure contiguousness in the case when
start is None or unfeasibly far back.
|
Returns a datetime from which to ensure contiguousness in the case when
start is None or unfeasibly far back.
|
[
"Returns",
"a",
"datetime",
"from",
"which",
"to",
"ensure",
"contiguousness",
"in",
"the",
"case",
"when",
"start",
"is",
"None",
"or",
"unfeasibly",
"far",
"back",
"."
] |
def moving_start(self, now):
"""
Returns a datetime from which to ensure contiguousness in the case when
start is None or unfeasibly far back.
"""
raise NotImplementedError
|
[
"def",
"moving_start",
"(",
"self",
",",
"now",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/spotify/luigi/blob/c3b66f4a5fa7eaa52f9a72eb6704b1049035c789/luigi/tools/range.py#L143-L148
|
||
AppScale/gts
|
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
|
AppServer/lib/django-1.3/django/utils/formats.py
|
python
|
reset_format_cache
|
()
|
Clear any cached formats.
This method is provided primarily for testing purposes,
so that the effects of cached formats can be removed.
|
Clear any cached formats.
|
[
"Clear",
"any",
"cached",
"formats",
"."
] |
def reset_format_cache():
"""Clear any cached formats.
This method is provided primarily for testing purposes,
so that the effects of cached formats can be removed.
"""
global _format_cache, _format_modules_cache
_format_cache = {}
_format_modules_cache = {}
|
[
"def",
"reset_format_cache",
"(",
")",
":",
"global",
"_format_cache",
",",
"_format_modules_cache",
"_format_cache",
"=",
"{",
"}",
"_format_modules_cache",
"=",
"{",
"}"
] |
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-1.3/django/utils/formats.py#L17-L25
|
||
chengzhengxin/groupsoftmax-simpledet
|
3f63a00998c57fee25241cf43a2e8600893ea462
|
models/cascade_rcnn/builder.py
|
python
|
CascadeRcnn.get_train_symbol
|
(backbone, neck, rpn_head, roi_extractor, bbox_head, \
bbox_head_2nd, bbox_head_3rd)
|
return X.group(rpn_loss + bbox_loss + bbox_loss_2nd + bbox_loss_3rd)
|
[] |
def get_train_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head, \
bbox_head_2nd, bbox_head_3rd):
gt_bbox = X.var("gt_bbox")
im_info = X.var("im_info")
rpn_feat = backbone.get_rpn_feature()
rcnn_feat = backbone.get_rcnn_feature()
rpn_feat = neck.get_rpn_feature(rpn_feat)
rcnn_feat = neck.get_rcnn_feature(rcnn_feat)
rpn_loss = rpn_head.get_loss(rpn_feat, gt_bbox, im_info)
# stage1
proposal, bbox_cls, bbox_target, bbox_weight = \
rpn_head.get_sampled_proposal(
rpn_feat,
gt_bbox,
im_info
)
roi_feat = roi_extractor.get_roi_feature(rcnn_feat, proposal)
bbox_loss = bbox_head.get_loss(
roi_feat,
bbox_cls,
bbox_target,
bbox_weight
)
bbox_pred = bbox_head._bbox_delta
# stage2
# though call get_sampled_proposal, bbox_head does not sample rois
proposal_2nd, bbox_cls_2nd, bbox_target_2nd, bbox_weight_2nd = \
bbox_head.get_sampled_proposal(
proposal,
bbox_pred,
gt_bbox,
im_info
)
roi_feat_2nd = roi_extractor.get_roi_feature(rcnn_feat, proposal_2nd)
bbox_loss_2nd = bbox_head_2nd.get_loss(
roi_feat_2nd,
bbox_cls_2nd,
bbox_target_2nd,
bbox_weight_2nd
)
bbox_pred_2nd = bbox_head_2nd._bbox_delta
# stage3
# though call get_sampled_proposal, bbox_head does not sample rois
proposal_3rd, bbox_cls_3rd, bbox_target_3rd, bbox_weight_3rd = \
bbox_head_2nd.get_sampled_proposal(
proposal_2nd,
bbox_pred_2nd,
gt_bbox,
im_info
)
roi_feat_3rd = roi_extractor.get_roi_feature(rcnn_feat, proposal_3rd)
bbox_loss_3rd = bbox_head_3rd.get_loss(
roi_feat_3rd,
bbox_cls_3rd,
bbox_target_3rd,
bbox_weight_3rd
)
return X.group(rpn_loss + bbox_loss + bbox_loss_2nd + bbox_loss_3rd)
|
[
"def",
"get_train_symbol",
"(",
"backbone",
",",
"neck",
",",
"rpn_head",
",",
"roi_extractor",
",",
"bbox_head",
",",
"bbox_head_2nd",
",",
"bbox_head_3rd",
")",
":",
"gt_bbox",
"=",
"X",
".",
"var",
"(",
"\"gt_bbox\"",
")",
"im_info",
"=",
"X",
".",
"var",
"(",
"\"im_info\"",
")",
"rpn_feat",
"=",
"backbone",
".",
"get_rpn_feature",
"(",
")",
"rcnn_feat",
"=",
"backbone",
".",
"get_rcnn_feature",
"(",
")",
"rpn_feat",
"=",
"neck",
".",
"get_rpn_feature",
"(",
"rpn_feat",
")",
"rcnn_feat",
"=",
"neck",
".",
"get_rcnn_feature",
"(",
"rcnn_feat",
")",
"rpn_loss",
"=",
"rpn_head",
".",
"get_loss",
"(",
"rpn_feat",
",",
"gt_bbox",
",",
"im_info",
")",
"# stage1",
"proposal",
",",
"bbox_cls",
",",
"bbox_target",
",",
"bbox_weight",
"=",
"rpn_head",
".",
"get_sampled_proposal",
"(",
"rpn_feat",
",",
"gt_bbox",
",",
"im_info",
")",
"roi_feat",
"=",
"roi_extractor",
".",
"get_roi_feature",
"(",
"rcnn_feat",
",",
"proposal",
")",
"bbox_loss",
"=",
"bbox_head",
".",
"get_loss",
"(",
"roi_feat",
",",
"bbox_cls",
",",
"bbox_target",
",",
"bbox_weight",
")",
"bbox_pred",
"=",
"bbox_head",
".",
"_bbox_delta",
"# stage2",
"# though call get_sampled_proposal, bbox_head does not sample rois",
"proposal_2nd",
",",
"bbox_cls_2nd",
",",
"bbox_target_2nd",
",",
"bbox_weight_2nd",
"=",
"bbox_head",
".",
"get_sampled_proposal",
"(",
"proposal",
",",
"bbox_pred",
",",
"gt_bbox",
",",
"im_info",
")",
"roi_feat_2nd",
"=",
"roi_extractor",
".",
"get_roi_feature",
"(",
"rcnn_feat",
",",
"proposal_2nd",
")",
"bbox_loss_2nd",
"=",
"bbox_head_2nd",
".",
"get_loss",
"(",
"roi_feat_2nd",
",",
"bbox_cls_2nd",
",",
"bbox_target_2nd",
",",
"bbox_weight_2nd",
")",
"bbox_pred_2nd",
"=",
"bbox_head_2nd",
".",
"_bbox_delta",
"# stage3",
"# though call get_sampled_proposal, bbox_head does not sample rois",
"proposal_3rd",
",",
"bbox_cls_3rd",
",",
"bbox_target_3rd",
",",
"bbox_weight_3rd",
"=",
"bbox_head_2nd",
".",
"get_sampled_proposal",
"(",
"proposal_2nd",
",",
"bbox_pred_2nd",
",",
"gt_bbox",
",",
"im_info",
")",
"roi_feat_3rd",
"=",
"roi_extractor",
".",
"get_roi_feature",
"(",
"rcnn_feat",
",",
"proposal_3rd",
")",
"bbox_loss_3rd",
"=",
"bbox_head_3rd",
".",
"get_loss",
"(",
"roi_feat_3rd",
",",
"bbox_cls_3rd",
",",
"bbox_target_3rd",
",",
"bbox_weight_3rd",
")",
"return",
"X",
".",
"group",
"(",
"rpn_loss",
"+",
"bbox_loss",
"+",
"bbox_loss_2nd",
"+",
"bbox_loss_3rd",
")"
] |
https://github.com/chengzhengxin/groupsoftmax-simpledet/blob/3f63a00998c57fee25241cf43a2e8600893ea462/models/cascade_rcnn/builder.py#L15-L78
|
|||
makerbot/ReplicatorG
|
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
|
skein_engines/skeinforge-35/fabmetheus_utilities/euclidean.py
|
python
|
getLeftPoint
|
(points)
|
return leftPointComplex
|
Get the leftmost complex point in the points.
|
Get the leftmost complex point in the points.
|
[
"Get",
"the",
"leftmost",
"complex",
"point",
"in",
"the",
"points",
"."
] |
def getLeftPoint(points):
'Get the leftmost complex point in the points.'
leftmost = 999999999.0
leftPointComplex = None
for pointComplex in points:
if pointComplex.real < leftmost:
leftmost = pointComplex.real
leftPointComplex = pointComplex
return leftPointComplex
|
[
"def",
"getLeftPoint",
"(",
"points",
")",
":",
"leftmost",
"=",
"999999999.0",
"leftPointComplex",
"=",
"None",
"for",
"pointComplex",
"in",
"points",
":",
"if",
"pointComplex",
".",
"real",
"<",
"leftmost",
":",
"leftmost",
"=",
"pointComplex",
".",
"real",
"leftPointComplex",
"=",
"pointComplex",
"return",
"leftPointComplex"
] |
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-35/fabmetheus_utilities/euclidean.py#L995-L1003
|
|
IronLanguages/main
|
a949455434b1fda8c783289e897e78a9a0caabb5
|
External.LCA_RESTRICTED/Languages/CPython/27/Lib/imaplib.py
|
python
|
IMAP4.recent
|
(self)
|
return self._untagged_response(typ, dat, name)
|
Return most recent 'RECENT' responses if any exist,
else prompt server for an update using the 'NOOP' command.
(typ, [data]) = <instance>.recent()
'data' is None if no new messages,
else list of RECENT responses, most recent last.
|
Return most recent 'RECENT' responses if any exist,
else prompt server for an update using the 'NOOP' command.
|
[
"Return",
"most",
"recent",
"RECENT",
"responses",
"if",
"any",
"exist",
"else",
"prompt",
"server",
"for",
"an",
"update",
"using",
"the",
"NOOP",
"command",
"."
] |
def recent(self):
"""Return most recent 'RECENT' responses if any exist,
else prompt server for an update using the 'NOOP' command.
(typ, [data]) = <instance>.recent()
'data' is None if no new messages,
else list of RECENT responses, most recent last.
"""
name = 'RECENT'
typ, dat = self._untagged_response('OK', [None], name)
if dat[-1]:
return typ, dat
typ, dat = self.noop() # Prod server for response
return self._untagged_response(typ, dat, name)
|
[
"def",
"recent",
"(",
"self",
")",
":",
"name",
"=",
"'RECENT'",
"typ",
",",
"dat",
"=",
"self",
".",
"_untagged_response",
"(",
"'OK'",
",",
"[",
"None",
"]",
",",
"name",
")",
"if",
"dat",
"[",
"-",
"1",
"]",
":",
"return",
"typ",
",",
"dat",
"typ",
",",
"dat",
"=",
"self",
".",
"noop",
"(",
")",
"# Prod server for response",
"return",
"self",
".",
"_untagged_response",
"(",
"typ",
",",
"dat",
",",
"name",
")"
] |
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/imaplib.py#L273-L287
|
|
keras-team/keras
|
5caa668b6a415675064a730f5eb46ecc08e40f65
|
keras/engine/training_v1.py
|
python
|
_TrainingEndpoint.feed_output_shape
|
(self)
|
The output shape for the feedable target.
|
The output shape for the feedable target.
|
[
"The",
"output",
"shape",
"for",
"the",
"feedable",
"target",
"."
] |
def feed_output_shape(self):
"""The output shape for the feedable target."""
if not self.has_feedable_training_target():
return None
if ((isinstance(self.loss_fn, losses.LossFunctionWrapper) and
self.loss_fn.fn == losses.sparse_categorical_crossentropy)) or (
isinstance(self.loss_fn, losses.SparseCategoricalCrossentropy)):
if backend.image_data_format() == 'channels_first':
return (self.shape[0], 1) + self.shape[2:]
else:
return self.shape[:-1] + (1,)
elif (not isinstance(self.loss_fn, losses.Loss) or
(isinstance(self.loss_fn, losses.LossFunctionWrapper) and
(getattr(losses, self.loss_fn.fn.__name__, None) is None))):
# If the given loss is not an instance of the `Loss` class (custom
# class) or if the loss function that is wrapped is not in the
# `losses` module, then it is a user-defined loss and we make no
# assumptions about it.
return None
else:
return self.shape
|
[
"def",
"feed_output_shape",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"has_feedable_training_target",
"(",
")",
":",
"return",
"None",
"if",
"(",
"(",
"isinstance",
"(",
"self",
".",
"loss_fn",
",",
"losses",
".",
"LossFunctionWrapper",
")",
"and",
"self",
".",
"loss_fn",
".",
"fn",
"==",
"losses",
".",
"sparse_categorical_crossentropy",
")",
")",
"or",
"(",
"isinstance",
"(",
"self",
".",
"loss_fn",
",",
"losses",
".",
"SparseCategoricalCrossentropy",
")",
")",
":",
"if",
"backend",
".",
"image_data_format",
"(",
")",
"==",
"'channels_first'",
":",
"return",
"(",
"self",
".",
"shape",
"[",
"0",
"]",
",",
"1",
")",
"+",
"self",
".",
"shape",
"[",
"2",
":",
"]",
"else",
":",
"return",
"self",
".",
"shape",
"[",
":",
"-",
"1",
"]",
"+",
"(",
"1",
",",
")",
"elif",
"(",
"not",
"isinstance",
"(",
"self",
".",
"loss_fn",
",",
"losses",
".",
"Loss",
")",
"or",
"(",
"isinstance",
"(",
"self",
".",
"loss_fn",
",",
"losses",
".",
"LossFunctionWrapper",
")",
"and",
"(",
"getattr",
"(",
"losses",
",",
"self",
".",
"loss_fn",
".",
"fn",
".",
"__name__",
",",
"None",
")",
"is",
"None",
")",
")",
")",
":",
"# If the given loss is not an instance of the `Loss` class (custom",
"# class) or if the loss function that is wrapped is not in the",
"# `losses` module, then it is a user-defined loss and we make no",
"# assumptions about it.",
"return",
"None",
"else",
":",
"return",
"self",
".",
"shape"
] |
https://github.com/keras-team/keras/blob/5caa668b6a415675064a730f5eb46ecc08e40f65/keras/engine/training_v1.py#L3048-L3069
|
||
sensepost/Snoopy
|
57b354e5b41e0aee4eccf58fa91e15b198c304c2
|
snoopy/server/bin/sslstripSnoopy/sslstrip/ClientRequest.py
|
python
|
ClientRequest.sendExpiredCookies
|
(self, host, path, expireHeaders)
|
[] |
def sendExpiredCookies(self, host, path, expireHeaders):
self.setResponseCode(302, "Moved")
self.setHeader("Connection", "close")
self.setHeader("Location", "http://" + host + path)
for header in expireHeaders:
self.setHeader("Set-Cookie", header)
self.finish()
|
[
"def",
"sendExpiredCookies",
"(",
"self",
",",
"host",
",",
"path",
",",
"expireHeaders",
")",
":",
"self",
".",
"setResponseCode",
"(",
"302",
",",
"\"Moved\"",
")",
"self",
".",
"setHeader",
"(",
"\"Connection\"",
",",
"\"close\"",
")",
"self",
".",
"setHeader",
"(",
"\"Location\"",
",",
"\"http://\"",
"+",
"host",
"+",
"path",
")",
"for",
"header",
"in",
"expireHeaders",
":",
"self",
".",
"setHeader",
"(",
"\"Set-Cookie\"",
",",
"header",
")",
"self",
".",
"finish",
"(",
")"
] |
https://github.com/sensepost/Snoopy/blob/57b354e5b41e0aee4eccf58fa91e15b198c304c2/snoopy/server/bin/sslstripSnoopy/sslstrip/ClientRequest.py#L146-L154
|
||||
reddit-archive/reddit
|
753b17407e9a9dca09558526805922de24133d53
|
r2/r2/controllers/front.py
|
python
|
FormsController.GET_prefs
|
(self, location='', verified=False)
|
return PrefsPage(content=content, infotext=infotext).render()
|
Preference page
|
Preference page
|
[
"Preference",
"page"
] |
def GET_prefs(self, location='', verified=False):
"""Preference page"""
content = None
infotext = None
if not location or location == 'options':
content = PrefOptions(
done=request.GET.get('done'),
error_style_override=request.GET.get('error_style_override'),
generic_error=request.GET.get('generic_error'),
)
elif location == 'update':
if verified:
infotext = strings.email_verified
content = PrefUpdate()
elif location == 'apps':
content = PrefApps(my_apps=OAuth2Client._by_user_grouped(c.user),
developed_apps=OAuth2Client._by_developer(c.user))
elif location == 'feeds' and c.user.pref_private_feeds:
content = PrefFeeds()
elif location == 'deactivate':
content = PrefDeactivate()
elif location == 'delete':
return self.redirect('/prefs/deactivate', code=301)
elif location == 'security':
if c.user.name not in g.admins:
return self.redirect('/prefs/')
content = PrefSecurity()
else:
return self.abort404()
return PrefsPage(content=content, infotext=infotext).render()
|
[
"def",
"GET_prefs",
"(",
"self",
",",
"location",
"=",
"''",
",",
"verified",
"=",
"False",
")",
":",
"content",
"=",
"None",
"infotext",
"=",
"None",
"if",
"not",
"location",
"or",
"location",
"==",
"'options'",
":",
"content",
"=",
"PrefOptions",
"(",
"done",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'done'",
")",
",",
"error_style_override",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'error_style_override'",
")",
",",
"generic_error",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'generic_error'",
")",
",",
")",
"elif",
"location",
"==",
"'update'",
":",
"if",
"verified",
":",
"infotext",
"=",
"strings",
".",
"email_verified",
"content",
"=",
"PrefUpdate",
"(",
")",
"elif",
"location",
"==",
"'apps'",
":",
"content",
"=",
"PrefApps",
"(",
"my_apps",
"=",
"OAuth2Client",
".",
"_by_user_grouped",
"(",
"c",
".",
"user",
")",
",",
"developed_apps",
"=",
"OAuth2Client",
".",
"_by_developer",
"(",
"c",
".",
"user",
")",
")",
"elif",
"location",
"==",
"'feeds'",
"and",
"c",
".",
"user",
".",
"pref_private_feeds",
":",
"content",
"=",
"PrefFeeds",
"(",
")",
"elif",
"location",
"==",
"'deactivate'",
":",
"content",
"=",
"PrefDeactivate",
"(",
")",
"elif",
"location",
"==",
"'delete'",
":",
"return",
"self",
".",
"redirect",
"(",
"'/prefs/deactivate'",
",",
"code",
"=",
"301",
")",
"elif",
"location",
"==",
"'security'",
":",
"if",
"c",
".",
"user",
".",
"name",
"not",
"in",
"g",
".",
"admins",
":",
"return",
"self",
".",
"redirect",
"(",
"'/prefs/'",
")",
"content",
"=",
"PrefSecurity",
"(",
")",
"else",
":",
"return",
"self",
".",
"abort404",
"(",
")",
"return",
"PrefsPage",
"(",
"content",
"=",
"content",
",",
"infotext",
"=",
"infotext",
")",
".",
"render",
"(",
")"
] |
https://github.com/reddit-archive/reddit/blob/753b17407e9a9dca09558526805922de24133d53/r2/r2/controllers/front.py#L1691-L1721
|
|
apache/libcloud
|
90971e17bfd7b6bb97b2489986472c531cc8e140
|
libcloud/storage/drivers/local.py
|
python
|
LocalStorageDriver._get_objects
|
(self, container)
|
Recursively iterate through the file-system and return the object names
|
Recursively iterate through the file-system and return the object names
|
[
"Recursively",
"iterate",
"through",
"the",
"file",
"-",
"system",
"and",
"return",
"the",
"object",
"names"
] |
def _get_objects(self, container):
"""
Recursively iterate through the file-system and return the object names
"""
cpath = self.get_container_cdn_url(container, check=True)
for folder, subfolders, files in os.walk(cpath, topdown=True):
# Remove unwanted subfolders
for subf in IGNORE_FOLDERS:
if subf in subfolders:
subfolders.remove(subf)
for name in files:
full_path = os.path.join(folder, name)
object_name = relpath(full_path, start=cpath)
yield self._make_object(container, object_name)
|
[
"def",
"_get_objects",
"(",
"self",
",",
"container",
")",
":",
"cpath",
"=",
"self",
".",
"get_container_cdn_url",
"(",
"container",
",",
"check",
"=",
"True",
")",
"for",
"folder",
",",
"subfolders",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"cpath",
",",
"topdown",
"=",
"True",
")",
":",
"# Remove unwanted subfolders",
"for",
"subf",
"in",
"IGNORE_FOLDERS",
":",
"if",
"subf",
"in",
"subfolders",
":",
"subfolders",
".",
"remove",
"(",
"subf",
")",
"for",
"name",
"in",
"files",
":",
"full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"folder",
",",
"name",
")",
"object_name",
"=",
"relpath",
"(",
"full_path",
",",
"start",
"=",
"cpath",
")",
"yield",
"self",
".",
"_make_object",
"(",
"container",
",",
"object_name",
")"
] |
https://github.com/apache/libcloud/blob/90971e17bfd7b6bb97b2489986472c531cc8e140/libcloud/storage/drivers/local.py#L289-L305
|
||
AltSchool/dynamic-rest
|
6d983287bbc4b42747da0e431e5acd96ea8c4b40
|
dynamic_rest/routers.py
|
python
|
DynamicRouter.get_canonical_serializer
|
(
resource_key,
model=None,
instance=None,
resource_name=None
)
|
return resource_map[resource_key]['viewset'].serializer_class
|
Return canonical serializer for a given resource name.
Arguments:
resource_key - Resource key, usually DB table for model-based
resources, otherwise the plural name.
model - (Optional) Model class to look up by.
instance - (Optional) Model object instance.
Returns: serializer class
|
Return canonical serializer for a given resource name.
|
[
"Return",
"canonical",
"serializer",
"for",
"a",
"given",
"resource",
"name",
"."
] |
def get_canonical_serializer(
resource_key,
model=None,
instance=None,
resource_name=None
):
"""
Return canonical serializer for a given resource name.
Arguments:
resource_key - Resource key, usually DB table for model-based
resources, otherwise the plural name.
model - (Optional) Model class to look up by.
instance - (Optional) Model object instance.
Returns: serializer class
"""
if model:
resource_key = get_model_table(model)
elif instance:
resource_key = instance._meta.db_table
elif resource_name:
resource_key = resource_name_map[resource_name]
if resource_key not in resource_map:
return None
return resource_map[resource_key]['viewset'].serializer_class
|
[
"def",
"get_canonical_serializer",
"(",
"resource_key",
",",
"model",
"=",
"None",
",",
"instance",
"=",
"None",
",",
"resource_name",
"=",
"None",
")",
":",
"if",
"model",
":",
"resource_key",
"=",
"get_model_table",
"(",
"model",
")",
"elif",
"instance",
":",
"resource_key",
"=",
"instance",
".",
"_meta",
".",
"db_table",
"elif",
"resource_name",
":",
"resource_key",
"=",
"resource_name_map",
"[",
"resource_name",
"]",
"if",
"resource_key",
"not",
"in",
"resource_map",
":",
"return",
"None",
"return",
"resource_map",
"[",
"resource_key",
"]",
"[",
"'viewset'",
"]",
".",
"serializer_class"
] |
https://github.com/AltSchool/dynamic-rest/blob/6d983287bbc4b42747da0e431e5acd96ea8c4b40/dynamic_rest/routers.py#L268-L295
|
|
caiiiac/Machine-Learning-with-Python
|
1a26c4467da41ca4ebc3d5bd789ea942ef79422f
|
MachineLearning/venv/lib/python3.5/site-packages/pandas/core/frame.py
|
python
|
DataFrame.eval
|
(self, expr, inplace=None, **kwargs)
|
return _eval(expr, inplace=inplace, **kwargs)
|
Evaluate an expression in the context of the calling DataFrame
instance.
Parameters
----------
expr : string
The expression string to evaluate.
inplace : bool
If the expression contains an assignment, whether to return a new
DataFrame or mutate the existing.
WARNING: inplace=None currently falls back to to True, but
in a future version, will default to False. Use inplace=True
explicitly rather than relying on the default.
.. versionadded:: 0.18.0
kwargs : dict
See the documentation for :func:`~pandas.eval` for complete details
on the keyword arguments accepted by
:meth:`~pandas.DataFrame.query`.
Returns
-------
ret : ndarray, scalar, or pandas object
See Also
--------
pandas.DataFrame.query
pandas.DataFrame.assign
pandas.eval
Notes
-----
For more details see the API documentation for :func:`~pandas.eval`.
For detailed examples see :ref:`enhancing performance with eval
<enhancingperf.eval>`.
Examples
--------
>>> from numpy.random import randn
>>> from pandas import DataFrame
>>> df = DataFrame(randn(10, 2), columns=list('ab'))
>>> df.eval('a + b')
>>> df.eval('c = a + b')
|
Evaluate an expression in the context of the calling DataFrame
instance.
|
[
"Evaluate",
"an",
"expression",
"in",
"the",
"context",
"of",
"the",
"calling",
"DataFrame",
"instance",
"."
] |
def eval(self, expr, inplace=None, **kwargs):
"""Evaluate an expression in the context of the calling DataFrame
instance.
Parameters
----------
expr : string
The expression string to evaluate.
inplace : bool
If the expression contains an assignment, whether to return a new
DataFrame or mutate the existing.
WARNING: inplace=None currently falls back to to True, but
in a future version, will default to False. Use inplace=True
explicitly rather than relying on the default.
.. versionadded:: 0.18.0
kwargs : dict
See the documentation for :func:`~pandas.eval` for complete details
on the keyword arguments accepted by
:meth:`~pandas.DataFrame.query`.
Returns
-------
ret : ndarray, scalar, or pandas object
See Also
--------
pandas.DataFrame.query
pandas.DataFrame.assign
pandas.eval
Notes
-----
For more details see the API documentation for :func:`~pandas.eval`.
For detailed examples see :ref:`enhancing performance with eval
<enhancingperf.eval>`.
Examples
--------
>>> from numpy.random import randn
>>> from pandas import DataFrame
>>> df = DataFrame(randn(10, 2), columns=list('ab'))
>>> df.eval('a + b')
>>> df.eval('c = a + b')
"""
inplace = validate_bool_kwarg(inplace, 'inplace')
resolvers = kwargs.pop('resolvers', None)
kwargs['level'] = kwargs.pop('level', 0) + 1
if resolvers is None:
index_resolvers = self._get_index_resolvers()
resolvers = dict(self.iteritems()), index_resolvers
if 'target' not in kwargs:
kwargs['target'] = self
kwargs['resolvers'] = kwargs.get('resolvers', ()) + tuple(resolvers)
return _eval(expr, inplace=inplace, **kwargs)
|
[
"def",
"eval",
"(",
"self",
",",
"expr",
",",
"inplace",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"inplace",
"=",
"validate_bool_kwarg",
"(",
"inplace",
",",
"'inplace'",
")",
"resolvers",
"=",
"kwargs",
".",
"pop",
"(",
"'resolvers'",
",",
"None",
")",
"kwargs",
"[",
"'level'",
"]",
"=",
"kwargs",
".",
"pop",
"(",
"'level'",
",",
"0",
")",
"+",
"1",
"if",
"resolvers",
"is",
"None",
":",
"index_resolvers",
"=",
"self",
".",
"_get_index_resolvers",
"(",
")",
"resolvers",
"=",
"dict",
"(",
"self",
".",
"iteritems",
"(",
")",
")",
",",
"index_resolvers",
"if",
"'target'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'target'",
"]",
"=",
"self",
"kwargs",
"[",
"'resolvers'",
"]",
"=",
"kwargs",
".",
"get",
"(",
"'resolvers'",
",",
"(",
")",
")",
"+",
"tuple",
"(",
"resolvers",
")",
"return",
"_eval",
"(",
"expr",
",",
"inplace",
"=",
"inplace",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/caiiiac/Machine-Learning-with-Python/blob/1a26c4467da41ca4ebc3d5bd789ea942ef79422f/MachineLearning/venv/lib/python3.5/site-packages/pandas/core/frame.py#L2228-L2284
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.