nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cloudera/hue
|
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
|
desktop/core/ext-py/Django-1.11.29/django/contrib/admin/sites.py
|
python
|
AdminSite.password_change_done
|
(self, request, extra_context=None)
|
return PasswordChangeDoneView.as_view(**defaults)(request)
|
Displays the "success" page after a password change.
|
Displays the "success" page after a password change.
|
[
"Displays",
"the",
"success",
"page",
"after",
"a",
"password",
"change",
"."
] |
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import PasswordChangeDoneView
defaults = {
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
request.current_app = self.name
return PasswordChangeDoneView.as_view(**defaults)(request)
|
[
"def",
"password_change_done",
"(",
"self",
",",
"request",
",",
"extra_context",
"=",
"None",
")",
":",
"from",
"django",
".",
"contrib",
".",
"auth",
".",
"views",
"import",
"PasswordChangeDoneView",
"defaults",
"=",
"{",
"'extra_context'",
":",
"dict",
"(",
"self",
".",
"each_context",
"(",
"request",
")",
",",
"*",
"*",
"(",
"extra_context",
"or",
"{",
"}",
")",
")",
",",
"}",
"if",
"self",
".",
"password_change_done_template",
"is",
"not",
"None",
":",
"defaults",
"[",
"'template_name'",
"]",
"=",
"self",
".",
"password_change_done_template",
"request",
".",
"current_app",
"=",
"self",
".",
"name",
"return",
"PasswordChangeDoneView",
".",
"as_view",
"(",
"*",
"*",
"defaults",
")",
"(",
"request",
")"
] |
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/Django-1.11.29/django/contrib/admin/sites.py#L317-L328
|
|
buke/GreenOdoo
|
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
|
runtime/common/lib/python2.7/site-packages/libxml2.py
|
python
|
uCSIsBasicLatin
|
(code)
|
return ret
|
Check whether the character is part of BasicLatin UCS Block
|
Check whether the character is part of BasicLatin UCS Block
|
[
"Check",
"whether",
"the",
"character",
"is",
"part",
"of",
"BasicLatin",
"UCS",
"Block"
] |
def uCSIsBasicLatin(code):
"""Check whether the character is part of BasicLatin UCS Block """
ret = libxml2mod.xmlUCSIsBasicLatin(code)
return ret
|
[
"def",
"uCSIsBasicLatin",
"(",
"code",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlUCSIsBasicLatin",
"(",
"code",
")",
"return",
"ret"
] |
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/common/lib/python2.7/site-packages/libxml2.py#L2075-L2078
|
|
maxtepkeev/python-redmine
|
3e1b7e290082b18b639492c6ba601688c60f26d9
|
redminelib/resultsets.py
|
python
|
BaseResourceSet.__len__
|
(self)
|
return sum(1 for _ in self)
|
Allows len() to be called on a ResourceSet object.
|
Allows len() to be called on a ResourceSet object.
|
[
"Allows",
"len",
"()",
"to",
"be",
"called",
"on",
"a",
"ResourceSet",
"object",
"."
] |
def __len__(self):
"""
Allows len() to be called on a ResourceSet object.
"""
return sum(1 for _ in self)
|
[
"def",
"__len__",
"(",
"self",
")",
":",
"return",
"sum",
"(",
"1",
"for",
"_",
"in",
"self",
")"
] |
https://github.com/maxtepkeev/python-redmine/blob/3e1b7e290082b18b639492c6ba601688c60f26d9/redminelib/resultsets.py#L163-L167
|
|
francisck/DanderSpritz_docs
|
86bb7caca5a957147f120b18bb5c31f299914904
|
Python/Core/Lib/mailbox.py
|
python
|
_create_carefully
|
(path)
|
Create a file if it doesn't exist and open for reading and writing.
|
Create a file if it doesn't exist and open for reading and writing.
|
[
"Create",
"a",
"file",
"if",
"it",
"doesn",
"t",
"exist",
"and",
"open",
"for",
"reading",
"and",
"writing",
"."
] |
def _create_carefully(path):
"""Create a file if it doesn't exist and open for reading and writing."""
fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDWR, 438)
try:
return open(path, 'rb+')
finally:
os.close(fd)
|
[
"def",
"_create_carefully",
"(",
"path",
")",
":",
"fd",
"=",
"os",
".",
"open",
"(",
"path",
",",
"os",
".",
"O_CREAT",
"|",
"os",
".",
"O_EXCL",
"|",
"os",
".",
"O_RDWR",
",",
"438",
")",
"try",
":",
"return",
"open",
"(",
"path",
",",
"'rb+'",
")",
"finally",
":",
"os",
".",
"close",
"(",
"fd",
")"
] |
https://github.com/francisck/DanderSpritz_docs/blob/86bb7caca5a957147f120b18bb5c31f299914904/Python/Core/Lib/mailbox.py#L1970-L1976
|
||
aws-samples/aws-kube-codesuite
|
ab4e5ce45416b83bffb947ab8d234df5437f4fca
|
src/kubernetes/client/models/v1_network_policy.py
|
python
|
V1NetworkPolicy.__init__
|
(self, api_version=None, kind=None, metadata=None, spec=None)
|
V1NetworkPolicy - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
|
V1NetworkPolicy - a model defined in Swagger
|
[
"V1NetworkPolicy",
"-",
"a",
"model",
"defined",
"in",
"Swagger"
] |
def __init__(self, api_version=None, kind=None, metadata=None, spec=None):
"""
V1NetworkPolicy - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1NetworkPolicySpec'
}
self.attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec'
}
self._api_version = api_version
self._kind = kind
self._metadata = metadata
self._spec = spec
|
[
"def",
"__init__",
"(",
"self",
",",
"api_version",
"=",
"None",
",",
"kind",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"spec",
"=",
"None",
")",
":",
"self",
".",
"swagger_types",
"=",
"{",
"'api_version'",
":",
"'str'",
",",
"'kind'",
":",
"'str'",
",",
"'metadata'",
":",
"'V1ObjectMeta'",
",",
"'spec'",
":",
"'V1NetworkPolicySpec'",
"}",
"self",
".",
"attribute_map",
"=",
"{",
"'api_version'",
":",
"'apiVersion'",
",",
"'kind'",
":",
"'kind'",
",",
"'metadata'",
":",
"'metadata'",
",",
"'spec'",
":",
"'spec'",
"}",
"self",
".",
"_api_version",
"=",
"api_version",
"self",
".",
"_kind",
"=",
"kind",
"self",
".",
"_metadata",
"=",
"metadata",
"self",
".",
"_spec",
"=",
"spec"
] |
https://github.com/aws-samples/aws-kube-codesuite/blob/ab4e5ce45416b83bffb947ab8d234df5437f4fca/src/kubernetes/client/models/v1_network_policy.py#L24-L50
|
||
genforce/interfacegan
|
acec139909fb9aad41fbdbbbde651dfc0b7b3a17
|
models/base_generator.py
|
python
|
BaseGenerator.preprocess
|
(self, latent_codes)
|
Preprocesses the input latent code if needed.
Args:
latent_codes: The input latent codes for preprocessing.
Returns:
The preprocessed latent codes which can be used as final input for the
generator.
|
Preprocesses the input latent code if needed.
|
[
"Preprocesses",
"the",
"input",
"latent",
"code",
"if",
"needed",
"."
] |
def preprocess(self, latent_codes):
"""Preprocesses the input latent code if needed.
Args:
latent_codes: The input latent codes for preprocessing.
Returns:
The preprocessed latent codes which can be used as final input for the
generator.
"""
raise NotImplementedError(f'Should be implemented in derived class!')
|
[
"def",
"preprocess",
"(",
"self",
",",
"latent_codes",
")",
":",
"raise",
"NotImplementedError",
"(",
"f'Should be implemented in derived class!'",
")"
] |
https://github.com/genforce/interfacegan/blob/acec139909fb9aad41fbdbbbde651dfc0b7b3a17/models/base_generator.py#L146-L156
|
||
PaddlePaddle/PaddleHub
|
107ee7e1a49d15e9c94da3956475d88a53fc165f
|
modules/image/classification/efficientnetb4_imagenet/module.py
|
python
|
BlockDecoder._decode_block_string
|
(block_string: str)
|
return BlockArgs(
kernel_size=int(options['k']),
num_repeat=int(options['r']),
input_filters=int(options['i']),
output_filters=int(options['o']),
expand_ratio=int(options['e']),
id_skip=('noskip' not in block_string),
se_ratio=float(options['se']) if 'se' in options else None,
stride=[int(options['s'][0])])
|
Gets a block through a string notation of arguments.
|
Gets a block through a string notation of arguments.
|
[
"Gets",
"a",
"block",
"through",
"a",
"string",
"notation",
"of",
"arguments",
"."
] |
def _decode_block_string(block_string: str):
""" Gets a block through a string notation of arguments. """
assert isinstance(block_string, str)
ops = block_string.split('_')
options = {}
for op in ops:
splits = re.split(r'(\d.*)', op)
if len(splits) >= 2:
key, value = splits[:2]
options[key] = value
# Check stride
cond_1 = ('s' in options and len(options['s']) == 1)
cond_2 = ((len(options['s']) == 2) and (options['s'][0] == options['s'][1]))
assert (cond_1 or cond_2)
return BlockArgs(
kernel_size=int(options['k']),
num_repeat=int(options['r']),
input_filters=int(options['i']),
output_filters=int(options['o']),
expand_ratio=int(options['e']),
id_skip=('noskip' not in block_string),
se_ratio=float(options['se']) if 'se' in options else None,
stride=[int(options['s'][0])])
|
[
"def",
"_decode_block_string",
"(",
"block_string",
":",
"str",
")",
":",
"assert",
"isinstance",
"(",
"block_string",
",",
"str",
")",
"ops",
"=",
"block_string",
".",
"split",
"(",
"'_'",
")",
"options",
"=",
"{",
"}",
"for",
"op",
"in",
"ops",
":",
"splits",
"=",
"re",
".",
"split",
"(",
"r'(\\d.*)'",
",",
"op",
")",
"if",
"len",
"(",
"splits",
")",
">=",
"2",
":",
"key",
",",
"value",
"=",
"splits",
"[",
":",
"2",
"]",
"options",
"[",
"key",
"]",
"=",
"value",
"# Check stride",
"cond_1",
"=",
"(",
"'s'",
"in",
"options",
"and",
"len",
"(",
"options",
"[",
"'s'",
"]",
")",
"==",
"1",
")",
"cond_2",
"=",
"(",
"(",
"len",
"(",
"options",
"[",
"'s'",
"]",
")",
"==",
"2",
")",
"and",
"(",
"options",
"[",
"'s'",
"]",
"[",
"0",
"]",
"==",
"options",
"[",
"'s'",
"]",
"[",
"1",
"]",
")",
")",
"assert",
"(",
"cond_1",
"or",
"cond_2",
")",
"return",
"BlockArgs",
"(",
"kernel_size",
"=",
"int",
"(",
"options",
"[",
"'k'",
"]",
")",
",",
"num_repeat",
"=",
"int",
"(",
"options",
"[",
"'r'",
"]",
")",
",",
"input_filters",
"=",
"int",
"(",
"options",
"[",
"'i'",
"]",
")",
",",
"output_filters",
"=",
"int",
"(",
"options",
"[",
"'o'",
"]",
")",
",",
"expand_ratio",
"=",
"int",
"(",
"options",
"[",
"'e'",
"]",
")",
",",
"id_skip",
"=",
"(",
"'noskip'",
"not",
"in",
"block_string",
")",
",",
"se_ratio",
"=",
"float",
"(",
"options",
"[",
"'se'",
"]",
")",
"if",
"'se'",
"in",
"options",
"else",
"None",
",",
"stride",
"=",
"[",
"int",
"(",
"options",
"[",
"'s'",
"]",
"[",
"0",
"]",
")",
"]",
")"
] |
https://github.com/PaddlePaddle/PaddleHub/blob/107ee7e1a49d15e9c94da3956475d88a53fc165f/modules/image/classification/efficientnetb4_imagenet/module.py#L130-L155
|
|
sympy/sympy
|
d822fcba181155b85ff2b29fe525adbafb22b448
|
sympy/polys/distributedmodules.py
|
python
|
sdm_to_dict
|
(f)
|
return dict(f)
|
Make a dictionary from a distributed polynomial.
|
Make a dictionary from a distributed polynomial.
|
[
"Make",
"a",
"dictionary",
"from",
"a",
"distributed",
"polynomial",
"."
] |
def sdm_to_dict(f):
"""Make a dictionary from a distributed polynomial. """
return dict(f)
|
[
"def",
"sdm_to_dict",
"(",
"f",
")",
":",
"return",
"dict",
"(",
"f",
")"
] |
https://github.com/sympy/sympy/blob/d822fcba181155b85ff2b29fe525adbafb22b448/sympy/polys/distributedmodules.py#L159-L161
|
|
anki/vector-python-sdk
|
d61fdb07c6278deba750f987b20441fff2df865f
|
anki_vector/world.py
|
python
|
World.create_custom_fixed_object
|
(self,
pose: util.Pose,
x_size_mm: float,
y_size_mm: float,
z_size_mm: float,
relative_to_robot: bool = False,
use_robot_origin: bool = True)
|
return fixed_custom_object
|
Defines a cuboid of custom size and places it in the world. It cannot be observed.
See :class:`objects.CustomObjectMarkers`.
:param pose: The pose of the object we are creating.
:param x_size_mm: size of the object (in millimeters) in the x axis.
:param y_size_mm: size of the object (in millimeters) in the y axis.
:param z_size_mm: size of the object (in millimeters) in the z axis.
:param relative_to_robot: whether or not the pose given assumes the robot's pose as its origin.
:param use_robot_origin: whether or not to override the origin_id in the given pose to be
the origin_id of Vector.
.. testcode::
import anki_vector
from anki_vector.util import degrees, Pose
with anki_vector.Robot() as robot:
robot.world.create_custom_fixed_object(Pose(100, 0, 0, angle_z=degrees(0)),
x_size_mm=10, y_size_mm=100, z_size_mm=100,
relative_to_robot=True)
Returns:
FixedCustomObject instance with the specified dimensions and pose.
|
Defines a cuboid of custom size and places it in the world. It cannot be observed.
|
[
"Defines",
"a",
"cuboid",
"of",
"custom",
"size",
"and",
"places",
"it",
"in",
"the",
"world",
".",
"It",
"cannot",
"be",
"observed",
"."
] |
def create_custom_fixed_object(self,
pose: util.Pose,
x_size_mm: float,
y_size_mm: float,
z_size_mm: float,
relative_to_robot: bool = False,
use_robot_origin: bool = True) -> objects.FixedCustomObject:
"""Defines a cuboid of custom size and places it in the world. It cannot be observed.
See :class:`objects.CustomObjectMarkers`.
:param pose: The pose of the object we are creating.
:param x_size_mm: size of the object (in millimeters) in the x axis.
:param y_size_mm: size of the object (in millimeters) in the y axis.
:param z_size_mm: size of the object (in millimeters) in the z axis.
:param relative_to_robot: whether or not the pose given assumes the robot's pose as its origin.
:param use_robot_origin: whether or not to override the origin_id in the given pose to be
the origin_id of Vector.
.. testcode::
import anki_vector
from anki_vector.util import degrees, Pose
with anki_vector.Robot() as robot:
robot.world.create_custom_fixed_object(Pose(100, 0, 0, angle_z=degrees(0)),
x_size_mm=10, y_size_mm=100, z_size_mm=100,
relative_to_robot=True)
Returns:
FixedCustomObject instance with the specified dimensions and pose.
"""
# Override the origin of the pose to be the same as the robot's. This will make sure they are in
# the same space in the robot every time.
if use_robot_origin:
pose = util.Pose(x=pose.position.x, y=pose.position.y, z=pose.position.z,
q0=pose.rotation.q0, q1=pose.rotation.q1,
q2=pose.rotation.q2, q3=pose.rotation.q3,
origin_id=self._robot.pose.origin_id)
# In this case define the given pose to be with respect to the robot's pose as its origin.
if relative_to_robot:
pose = self._robot.pose.define_pose_relative_this(pose)
response = self._create_custom_fixed_object(pose, x_size_mm, y_size_mm, z_size_mm)
if isinstance(response, futures.Future):
response = response.result()
fixed_custom_object = self.fixed_custom_object_factory(
self._robot,
pose,
x_size_mm,
y_size_mm,
z_size_mm,
response.object_id)
if fixed_custom_object:
self._objects[fixed_custom_object.object_id] = fixed_custom_object
return fixed_custom_object
|
[
"def",
"create_custom_fixed_object",
"(",
"self",
",",
"pose",
":",
"util",
".",
"Pose",
",",
"x_size_mm",
":",
"float",
",",
"y_size_mm",
":",
"float",
",",
"z_size_mm",
":",
"float",
",",
"relative_to_robot",
":",
"bool",
"=",
"False",
",",
"use_robot_origin",
":",
"bool",
"=",
"True",
")",
"->",
"objects",
".",
"FixedCustomObject",
":",
"# Override the origin of the pose to be the same as the robot's. This will make sure they are in",
"# the same space in the robot every time.",
"if",
"use_robot_origin",
":",
"pose",
"=",
"util",
".",
"Pose",
"(",
"x",
"=",
"pose",
".",
"position",
".",
"x",
",",
"y",
"=",
"pose",
".",
"position",
".",
"y",
",",
"z",
"=",
"pose",
".",
"position",
".",
"z",
",",
"q0",
"=",
"pose",
".",
"rotation",
".",
"q0",
",",
"q1",
"=",
"pose",
".",
"rotation",
".",
"q1",
",",
"q2",
"=",
"pose",
".",
"rotation",
".",
"q2",
",",
"q3",
"=",
"pose",
".",
"rotation",
".",
"q3",
",",
"origin_id",
"=",
"self",
".",
"_robot",
".",
"pose",
".",
"origin_id",
")",
"# In this case define the given pose to be with respect to the robot's pose as its origin.",
"if",
"relative_to_robot",
":",
"pose",
"=",
"self",
".",
"_robot",
".",
"pose",
".",
"define_pose_relative_this",
"(",
"pose",
")",
"response",
"=",
"self",
".",
"_create_custom_fixed_object",
"(",
"pose",
",",
"x_size_mm",
",",
"y_size_mm",
",",
"z_size_mm",
")",
"if",
"isinstance",
"(",
"response",
",",
"futures",
".",
"Future",
")",
":",
"response",
"=",
"response",
".",
"result",
"(",
")",
"fixed_custom_object",
"=",
"self",
".",
"fixed_custom_object_factory",
"(",
"self",
".",
"_robot",
",",
"pose",
",",
"x_size_mm",
",",
"y_size_mm",
",",
"z_size_mm",
",",
"response",
".",
"object_id",
")",
"if",
"fixed_custom_object",
":",
"self",
".",
"_objects",
"[",
"fixed_custom_object",
".",
"object_id",
"]",
"=",
"fixed_custom_object",
"return",
"fixed_custom_object"
] |
https://github.com/anki/vector-python-sdk/blob/d61fdb07c6278deba750f987b20441fff2df865f/anki_vector/world.py#L731-L789
|
|
richzhang/PerceptualSimilarity
|
31bc1271ae6f13b7e281b9959ac24a5e8f2ed522
|
util/util.py
|
python
|
mkdir
|
(path)
|
[] |
def mkdir(path):
if not os.path.exists(path):
os.makedirs(path)
|
[
"def",
"mkdir",
"(",
"path",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"os",
".",
"makedirs",
"(",
"path",
")"
] |
https://github.com/richzhang/PerceptualSimilarity/blob/31bc1271ae6f13b7e281b9959ac24a5e8f2ed522/util/util.py#L34-L36
|
||||
nlloyd/SubliminalCollaborator
|
5c619e17ddbe8acb9eea8996ec038169ddcd50a1
|
libs/twisted/conch/ssh/userauth.py
|
python
|
SSHUserAuthServer.auth_publickey
|
(self, packet)
|
Public key authentication. Payload::
byte has signature
string algorithm name
string key blob
[string signature] (if has signature is True)
Create a SSHPublicKey credential and verify it using our portal.
|
Public key authentication. Payload::
byte has signature
string algorithm name
string key blob
[string signature] (if has signature is True)
|
[
"Public",
"key",
"authentication",
".",
"Payload",
"::",
"byte",
"has",
"signature",
"string",
"algorithm",
"name",
"string",
"key",
"blob",
"[",
"string",
"signature",
"]",
"(",
"if",
"has",
"signature",
"is",
"True",
")"
] |
def auth_publickey(self, packet):
"""
Public key authentication. Payload::
byte has signature
string algorithm name
string key blob
[string signature] (if has signature is True)
Create a SSHPublicKey credential and verify it using our portal.
"""
hasSig = ord(packet[0])
algName, blob, rest = getNS(packet[1:], 2)
pubKey = keys.Key.fromString(blob)
signature = hasSig and getNS(rest)[0] or None
if hasSig:
b = (NS(self.transport.sessionID) + chr(MSG_USERAUTH_REQUEST) +
NS(self.user) + NS(self.nextService) + NS('publickey') +
chr(hasSig) + NS(pubKey.sshType()) + NS(blob))
c = credentials.SSHPrivateKey(self.user, algName, blob, b,
signature)
return self.portal.login(c, None, interfaces.IConchUser)
else:
c = credentials.SSHPrivateKey(self.user, algName, blob, None, None)
return self.portal.login(c, None,
interfaces.IConchUser).addErrback(self._ebCheckKey,
packet[1:])
|
[
"def",
"auth_publickey",
"(",
"self",
",",
"packet",
")",
":",
"hasSig",
"=",
"ord",
"(",
"packet",
"[",
"0",
"]",
")",
"algName",
",",
"blob",
",",
"rest",
"=",
"getNS",
"(",
"packet",
"[",
"1",
":",
"]",
",",
"2",
")",
"pubKey",
"=",
"keys",
".",
"Key",
".",
"fromString",
"(",
"blob",
")",
"signature",
"=",
"hasSig",
"and",
"getNS",
"(",
"rest",
")",
"[",
"0",
"]",
"or",
"None",
"if",
"hasSig",
":",
"b",
"=",
"(",
"NS",
"(",
"self",
".",
"transport",
".",
"sessionID",
")",
"+",
"chr",
"(",
"MSG_USERAUTH_REQUEST",
")",
"+",
"NS",
"(",
"self",
".",
"user",
")",
"+",
"NS",
"(",
"self",
".",
"nextService",
")",
"+",
"NS",
"(",
"'publickey'",
")",
"+",
"chr",
"(",
"hasSig",
")",
"+",
"NS",
"(",
"pubKey",
".",
"sshType",
"(",
")",
")",
"+",
"NS",
"(",
"blob",
")",
")",
"c",
"=",
"credentials",
".",
"SSHPrivateKey",
"(",
"self",
".",
"user",
",",
"algName",
",",
"blob",
",",
"b",
",",
"signature",
")",
"return",
"self",
".",
"portal",
".",
"login",
"(",
"c",
",",
"None",
",",
"interfaces",
".",
"IConchUser",
")",
"else",
":",
"c",
"=",
"credentials",
".",
"SSHPrivateKey",
"(",
"self",
".",
"user",
",",
"algName",
",",
"blob",
",",
"None",
",",
"None",
")",
"return",
"self",
".",
"portal",
".",
"login",
"(",
"c",
",",
"None",
",",
"interfaces",
".",
"IConchUser",
")",
".",
"addErrback",
"(",
"self",
".",
"_ebCheckKey",
",",
"packet",
"[",
"1",
":",
"]",
")"
] |
https://github.com/nlloyd/SubliminalCollaborator/blob/5c619e17ddbe8acb9eea8996ec038169ddcd50a1/libs/twisted/conch/ssh/userauth.py#L253-L278
|
||
ganeti/ganeti
|
d340a9ddd12f501bef57da421b5f9b969a4ba905
|
lib/objects.py
|
python
|
Cluster.SimpleFillDiskState
|
(disk_state)
|
return FillDict(constants.DS_DEFAULTS, disk_state)
|
Fill an disk_state sub dict with cluster defaults.
|
Fill an disk_state sub dict with cluster defaults.
|
[
"Fill",
"an",
"disk_state",
"sub",
"dict",
"with",
"cluster",
"defaults",
"."
] |
def SimpleFillDiskState(disk_state):
"""Fill an disk_state sub dict with cluster defaults.
"""
return FillDict(constants.DS_DEFAULTS, disk_state)
|
[
"def",
"SimpleFillDiskState",
"(",
"disk_state",
")",
":",
"return",
"FillDict",
"(",
"constants",
".",
"DS_DEFAULTS",
",",
"disk_state",
")"
] |
https://github.com/ganeti/ganeti/blob/d340a9ddd12f501bef57da421b5f9b969a4ba905/lib/objects.py#L2084-L2088
|
|
rizar/attention-lvcsr
|
1ae52cafdd8419874846f9544a299eef9c758f3b
|
libs/Theano/theano/gof/destroyhandler.py
|
python
|
DestroyHandler.validate
|
(self, fgraph)
|
return True
|
Return None.
Raise InconsistencyError when
a) orderings() raises an error
b) orderings cannot be topologically sorted.
|
Return None.
|
[
"Return",
"None",
"."
] |
def validate(self, fgraph):
"""
Return None.
Raise InconsistencyError when
a) orderings() raises an error
b) orderings cannot be topologically sorted.
"""
if self.destroyers:
ords = self.orderings(fgraph)
if _contains_cycle(fgraph, ords):
raise InconsistencyError("Dependency graph contains cycles")
else:
# James's Conjecture:
# If there are no destructive ops, then there can be no cycles.
# FB: This isn't always True. It can happend that
# optimization introduce node that depend on itself. This
# is very rare and should not happen in general. It will be
# caught later. The error will be far from the source. But
# doing this conjecture should speed up compilation most of
# the time. The user should create such dependency except
# if he mess too much with the internal.
pass
return True
|
[
"def",
"validate",
"(",
"self",
",",
"fgraph",
")",
":",
"if",
"self",
".",
"destroyers",
":",
"ords",
"=",
"self",
".",
"orderings",
"(",
"fgraph",
")",
"if",
"_contains_cycle",
"(",
"fgraph",
",",
"ords",
")",
":",
"raise",
"InconsistencyError",
"(",
"\"Dependency graph contains cycles\"",
")",
"else",
":",
"# James's Conjecture:",
"# If there are no destructive ops, then there can be no cycles.",
"# FB: This isn't always True. It can happend that",
"# optimization introduce node that depend on itself. This",
"# is very rare and should not happen in general. It will be",
"# caught later. The error will be far from the source. But",
"# doing this conjecture should speed up compilation most of",
"# the time. The user should create such dependency except",
"# if he mess too much with the internal.",
"pass",
"return",
"True"
] |
https://github.com/rizar/attention-lvcsr/blob/1ae52cafdd8419874846f9544a299eef9c758f3b/libs/Theano/theano/gof/destroyhandler.py#L894-L920
|
|
Source-Python-Dev-Team/Source.Python
|
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
|
addons/source-python/packages/source-python/listeners/__init__.py
|
python
|
ListenerManagerDecorator._unload_instance
|
(self)
|
Unregister the listener.
|
Unregister the listener.
|
[
"Unregister",
"the",
"listener",
"."
] |
def _unload_instance(self):
"""Unregister the listener."""
# Was the callback registered?
if self.callback is None:
return
# Log the unregistering
listeners_logger.log_debug(
'{0}._unload_instance - Unregistering <{1}>'.format(
self.name, self.callback))
# Unregister the listener
self.manager.unregister_listener(self.callback)
|
[
"def",
"_unload_instance",
"(",
"self",
")",
":",
"# Was the callback registered?",
"if",
"self",
".",
"callback",
"is",
"None",
":",
"return",
"# Log the unregistering",
"listeners_logger",
".",
"log_debug",
"(",
"'{0}._unload_instance - Unregistering <{1}>'",
".",
"format",
"(",
"self",
".",
"name",
",",
"self",
".",
"callback",
")",
")",
"# Unregister the listener",
"self",
".",
"manager",
".",
"unregister_listener",
"(",
"self",
".",
"callback",
")"
] |
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/packages/source-python/listeners/__init__.py#L248-L260
|
||
numba/numba
|
bf480b9e0da858a65508c2b17759a72ee6a44c51
|
numba/cuda/simulator/reduction.py
|
python
|
Reduce
|
(func)
|
return reduce_wrapper
|
[] |
def Reduce(func):
def reduce_wrapper(seq, res=None, init=0):
r = pyreduce(func, seq, init)
if res is not None:
res[0] = r
return None
else:
return r
return reduce_wrapper
|
[
"def",
"Reduce",
"(",
"func",
")",
":",
"def",
"reduce_wrapper",
"(",
"seq",
",",
"res",
"=",
"None",
",",
"init",
"=",
"0",
")",
":",
"r",
"=",
"pyreduce",
"(",
"func",
",",
"seq",
",",
"init",
")",
"if",
"res",
"is",
"not",
"None",
":",
"res",
"[",
"0",
"]",
"=",
"r",
"return",
"None",
"else",
":",
"return",
"r",
"return",
"reduce_wrapper"
] |
https://github.com/numba/numba/blob/bf480b9e0da858a65508c2b17759a72ee6a44c51/numba/cuda/simulator/reduction.py#L4-L12
|
|||
Kozea/Radicale
|
8fa4345b6ffb32cd44154d64bba2caf28d54f214
|
radicale/server.py
|
python
|
ParallelHTTPServer.finish_request
|
( # type:ignore[override]
self, request: socket.socket, client_address_and_socket:
Tuple[ADDRESS_TYPE, socket.socket])
|
[] |
def finish_request( # type:ignore[override]
self, request: socket.socket, client_address_and_socket:
Tuple[ADDRESS_TYPE, socket.socket]) -> None:
# HACK: Unpack `client_address_and_socket` and call super class
# `finish_request` with original `client_address`
client_address, worker_socket = client_address_and_socket
try:
return self.finish_request_locked(request, client_address)
finally:
worker_socket.close()
|
[
"def",
"finish_request",
"(",
"# type:ignore[override]",
"self",
",",
"request",
":",
"socket",
".",
"socket",
",",
"client_address_and_socket",
":",
"Tuple",
"[",
"ADDRESS_TYPE",
",",
"socket",
".",
"socket",
"]",
")",
"->",
"None",
":",
"# HACK: Unpack `client_address_and_socket` and call super class",
"# `finish_request` with original `client_address`",
"client_address",
",",
"worker_socket",
"=",
"client_address_and_socket",
"try",
":",
"return",
"self",
".",
"finish_request_locked",
"(",
"request",
",",
"client_address",
")",
"finally",
":",
"worker_socket",
".",
"close",
"(",
")"
] |
https://github.com/Kozea/Radicale/blob/8fa4345b6ffb32cd44154d64bba2caf28d54f214/radicale/server.py#L123-L132
|
||||
TencentCloud/tencentcloud-sdk-python
|
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
|
tencentcloud/youmall/v20180228/models.py
|
python
|
DescribeNetworkInfoRequest.__init__
|
(self)
|
r"""
:param Time: 请求时间戳
:type Time: int
:param CompanyId: 优mall集团id,通过"指定身份标识获取客户门店列表"接口获取
:type CompanyId: str
:param ShopId: 优mall店铺id,通过"指定身份标识获取客户门店列表"接口获取,不填则拉取集团全部店铺当前
:type ShopId: int
|
r"""
:param Time: 请求时间戳
:type Time: int
:param CompanyId: 优mall集团id,通过"指定身份标识获取客户门店列表"接口获取
:type CompanyId: str
:param ShopId: 优mall店铺id,通过"指定身份标识获取客户门店列表"接口获取,不填则拉取集团全部店铺当前
:type ShopId: int
|
[
"r",
":",
"param",
"Time",
":",
"请求时间戳",
":",
"type",
"Time",
":",
"int",
":",
"param",
"CompanyId",
":",
"优mall集团id,通过",
"指定身份标识获取客户门店列表",
"接口获取",
":",
"type",
"CompanyId",
":",
"str",
":",
"param",
"ShopId",
":",
"优mall店铺id,通过",
"指定身份标识获取客户门店列表",
"接口获取,不填则拉取集团全部店铺当前",
":",
"type",
"ShopId",
":",
"int"
] |
def __init__(self):
r"""
:param Time: 请求时间戳
:type Time: int
:param CompanyId: 优mall集团id,通过"指定身份标识获取客户门店列表"接口获取
:type CompanyId: str
:param ShopId: 优mall店铺id,通过"指定身份标识获取客户门店列表"接口获取,不填则拉取集团全部店铺当前
:type ShopId: int
"""
self.Time = None
self.CompanyId = None
self.ShopId = None
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"Time",
"=",
"None",
"self",
".",
"CompanyId",
"=",
"None",
"self",
".",
"ShopId",
"=",
"None"
] |
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/youmall/v20180228/models.py#L732-L743
|
||
kuri65536/python-for-android
|
26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891
|
python-build/python-libs/gdata/src/gdata/Crypto/PublicKey/DSA.py
|
python
|
generate
|
(bits, randfunc, progress_func=None)
|
return obj
|
generate(bits:int, randfunc:callable, progress_func:callable)
Generate a DSA key of length 'bits', using 'randfunc' to get
random data and 'progress_func', if present, to display
the progress of the key generation.
|
generate(bits:int, randfunc:callable, progress_func:callable)
|
[
"generate",
"(",
"bits",
":",
"int",
"randfunc",
":",
"callable",
"progress_func",
":",
"callable",
")"
] |
def generate(bits, randfunc, progress_func=None):
"""generate(bits:int, randfunc:callable, progress_func:callable)
Generate a DSA key of length 'bits', using 'randfunc' to get
random data and 'progress_func', if present, to display
the progress of the key generation.
"""
if bits<160:
raise error, 'Key length <160 bits'
obj=DSAobj()
# Generate string S and prime q
if progress_func:
progress_func('p,q\n')
while (1):
S, obj.q = generateQ(randfunc)
n=(bits-1)/160
C, N, V = 0, 2, {}
b=(obj.q >> 5) & 15
powb=pow(bignum(2), b)
powL1=pow(bignum(2), bits-1)
while C<4096:
for k in range(0, n+1):
V[k]=bytes_to_long(SHA.new(S+str(N)+str(k)).digest())
W=V[n] % powb
for k in range(n-1, -1, -1):
W=(W<<160L)+V[k]
X=W+powL1
p=X-(X%(2*obj.q)-1)
if powL1<=p and isPrime(p):
break
C, N = C+1, N+n+1
if C<4096:
break
if progress_func:
progress_func('4096 multiples failed\n')
obj.p = p
power=(p-1)/obj.q
if progress_func:
progress_func('h,g\n')
while (1):
h=bytes_to_long(randfunc(bits)) % (p-1)
g=pow(h, power, p)
if 1<h<p-1 and g>1:
break
obj.g=g
if progress_func:
progress_func('x,y\n')
while (1):
x=bytes_to_long(randfunc(20))
if 0 < x < obj.q:
break
obj.x, obj.y = x, pow(g, x, p)
return obj
|
[
"def",
"generate",
"(",
"bits",
",",
"randfunc",
",",
"progress_func",
"=",
"None",
")",
":",
"if",
"bits",
"<",
"160",
":",
"raise",
"error",
",",
"'Key length <160 bits'",
"obj",
"=",
"DSAobj",
"(",
")",
"# Generate string S and prime q",
"if",
"progress_func",
":",
"progress_func",
"(",
"'p,q\\n'",
")",
"while",
"(",
"1",
")",
":",
"S",
",",
"obj",
".",
"q",
"=",
"generateQ",
"(",
"randfunc",
")",
"n",
"=",
"(",
"bits",
"-",
"1",
")",
"/",
"160",
"C",
",",
"N",
",",
"V",
"=",
"0",
",",
"2",
",",
"{",
"}",
"b",
"=",
"(",
"obj",
".",
"q",
">>",
"5",
")",
"&",
"15",
"powb",
"=",
"pow",
"(",
"bignum",
"(",
"2",
")",
",",
"b",
")",
"powL1",
"=",
"pow",
"(",
"bignum",
"(",
"2",
")",
",",
"bits",
"-",
"1",
")",
"while",
"C",
"<",
"4096",
":",
"for",
"k",
"in",
"range",
"(",
"0",
",",
"n",
"+",
"1",
")",
":",
"V",
"[",
"k",
"]",
"=",
"bytes_to_long",
"(",
"SHA",
".",
"new",
"(",
"S",
"+",
"str",
"(",
"N",
")",
"+",
"str",
"(",
"k",
")",
")",
".",
"digest",
"(",
")",
")",
"W",
"=",
"V",
"[",
"n",
"]",
"%",
"powb",
"for",
"k",
"in",
"range",
"(",
"n",
"-",
"1",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"W",
"=",
"(",
"W",
"<<",
"160L",
")",
"+",
"V",
"[",
"k",
"]",
"X",
"=",
"W",
"+",
"powL1",
"p",
"=",
"X",
"-",
"(",
"X",
"%",
"(",
"2",
"*",
"obj",
".",
"q",
")",
"-",
"1",
")",
"if",
"powL1",
"<=",
"p",
"and",
"isPrime",
"(",
"p",
")",
":",
"break",
"C",
",",
"N",
"=",
"C",
"+",
"1",
",",
"N",
"+",
"n",
"+",
"1",
"if",
"C",
"<",
"4096",
":",
"break",
"if",
"progress_func",
":",
"progress_func",
"(",
"'4096 multiples failed\\n'",
")",
"obj",
".",
"p",
"=",
"p",
"power",
"=",
"(",
"p",
"-",
"1",
")",
"/",
"obj",
".",
"q",
"if",
"progress_func",
":",
"progress_func",
"(",
"'h,g\\n'",
")",
"while",
"(",
"1",
")",
":",
"h",
"=",
"bytes_to_long",
"(",
"randfunc",
"(",
"bits",
")",
")",
"%",
"(",
"p",
"-",
"1",
")",
"g",
"=",
"pow",
"(",
"h",
",",
"power",
",",
"p",
")",
"if",
"1",
"<",
"h",
"<",
"p",
"-",
"1",
"and",
"g",
">",
"1",
":",
"break",
"obj",
".",
"g",
"=",
"g",
"if",
"progress_func",
":",
"progress_func",
"(",
"'x,y\\n'",
")",
"while",
"(",
"1",
")",
":",
"x",
"=",
"bytes_to_long",
"(",
"randfunc",
"(",
"20",
")",
")",
"if",
"0",
"<",
"x",
"<",
"obj",
".",
"q",
":",
"break",
"obj",
".",
"x",
",",
"obj",
".",
"y",
"=",
"x",
",",
"pow",
"(",
"g",
",",
"x",
",",
"p",
")",
"return",
"obj"
] |
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-build/python-libs/gdata/src/gdata/Crypto/PublicKey/DSA.py#L47-L101
|
|
spyder-ide/spyder
|
55da47c032dfcf519600f67f8b30eab467f965e7
|
spyder/plugins/variableexplorer/widgets/objectexplorer/tree_model.py
|
python
|
TreeModel.inspectedIndex
|
(self)
|
The model index that point to the inspectedItem.
|
The model index that point to the inspectedItem.
|
[
"The",
"model",
"index",
"that",
"point",
"to",
"the",
"inspectedItem",
"."
] |
def inspectedIndex(self):
"""The model index that point to the inspectedItem."""
if self.inspectedNodeIsVisible:
return self.createIndex(0, 0, self._inspected_item)
else:
return self.rootIndex()
|
[
"def",
"inspectedIndex",
"(",
"self",
")",
":",
"if",
"self",
".",
"inspectedNodeIsVisible",
":",
"return",
"self",
".",
"createIndex",
"(",
"0",
",",
"0",
",",
"self",
".",
"_inspected_item",
")",
"else",
":",
"return",
"self",
".",
"rootIndex",
"(",
")"
] |
https://github.com/spyder-ide/spyder/blob/55da47c032dfcf519600f67f8b30eab467f965e7/spyder/plugins/variableexplorer/widgets/objectexplorer/tree_model.py#L124-L129
|
||
LinuxCNC/simple-gcode-generators
|
d06c2750fe03c41becd6bd5863ace54b52920155
|
bezel/bezel.py
|
python
|
Application.__init__
|
(self, master=None)
|
[] |
def __init__(self, master=None):
Frame.__init__(self, master)
self.grid()
self.createWidgets()
self.DoIt()
|
[
"def",
"__init__",
"(",
"self",
",",
"master",
"=",
"None",
")",
":",
"Frame",
".",
"__init__",
"(",
"self",
",",
"master",
")",
"self",
".",
"grid",
"(",
")",
"self",
".",
"createWidgets",
"(",
")",
"self",
".",
"DoIt",
"(",
")"
] |
https://github.com/LinuxCNC/simple-gcode-generators/blob/d06c2750fe03c41becd6bd5863ace54b52920155/bezel/bezel.py#L42-L46
|
||||
uzh-rpg/rpg_trajectory_evaluation
|
995e584d19712b26539fca6dbf694224eea0f5ba
|
src/rpg_trajectory_evaluation/transformations.py
|
python
|
vector_norm
|
(data, axis=None, out=None)
|
Return length, i.e. eucledian norm, of ndarray along axis.
>>> v = numpy.random.random(3)
>>> n = vector_norm(v)
>>> numpy.allclose(n, numpy.linalg.norm(v))
True
>>> v = numpy.random.rand(6, 5, 3)
>>> n = vector_norm(v, axis=-1)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=2)))
True
>>> n = vector_norm(v, axis=1)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
True
>>> v = numpy.random.rand(5, 4, 3)
>>> n = numpy.empty((5, 3), dtype=numpy.float64)
>>> vector_norm(v, axis=1, out=n)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
True
>>> vector_norm([])
0.0
>>> vector_norm([1.0])
1.0
|
Return length, i.e. eucledian norm, of ndarray along axis.
|
[
"Return",
"length",
"i",
".",
"e",
".",
"eucledian",
"norm",
"of",
"ndarray",
"along",
"axis",
"."
] |
def vector_norm(data, axis=None, out=None):
"""Return length, i.e. eucledian norm, of ndarray along axis.
>>> v = numpy.random.random(3)
>>> n = vector_norm(v)
>>> numpy.allclose(n, numpy.linalg.norm(v))
True
>>> v = numpy.random.rand(6, 5, 3)
>>> n = vector_norm(v, axis=-1)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=2)))
True
>>> n = vector_norm(v, axis=1)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
True
>>> v = numpy.random.rand(5, 4, 3)
>>> n = numpy.empty((5, 3), dtype=numpy.float64)
>>> vector_norm(v, axis=1, out=n)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
True
>>> vector_norm([])
0.0
>>> vector_norm([1.0])
1.0
"""
data = numpy.array(data, dtype=numpy.float64, copy=True)
if out is None:
if data.ndim == 1:
return math.sqrt(numpy.dot(data, data))
data *= data
out = numpy.atleast_1d(numpy.sum(data, axis=axis))
numpy.sqrt(out, out)
return out
else:
data *= data
numpy.sum(data, axis=axis, out=out)
numpy.sqrt(out, out)
|
[
"def",
"vector_norm",
"(",
"data",
",",
"axis",
"=",
"None",
",",
"out",
"=",
"None",
")",
":",
"data",
"=",
"numpy",
".",
"array",
"(",
"data",
",",
"dtype",
"=",
"numpy",
".",
"float64",
",",
"copy",
"=",
"True",
")",
"if",
"out",
"is",
"None",
":",
"if",
"data",
".",
"ndim",
"==",
"1",
":",
"return",
"math",
".",
"sqrt",
"(",
"numpy",
".",
"dot",
"(",
"data",
",",
"data",
")",
")",
"data",
"*=",
"data",
"out",
"=",
"numpy",
".",
"atleast_1d",
"(",
"numpy",
".",
"sum",
"(",
"data",
",",
"axis",
"=",
"axis",
")",
")",
"numpy",
".",
"sqrt",
"(",
"out",
",",
"out",
")",
"return",
"out",
"else",
":",
"data",
"*=",
"data",
"numpy",
".",
"sum",
"(",
"data",
",",
"axis",
"=",
"axis",
",",
"out",
"=",
"out",
")",
"numpy",
".",
"sqrt",
"(",
"out",
",",
"out",
")"
] |
https://github.com/uzh-rpg/rpg_trajectory_evaluation/blob/995e584d19712b26539fca6dbf694224eea0f5ba/src/rpg_trajectory_evaluation/transformations.py#L1803-L1839
|
||
quantmind/pulsar
|
fee44e871954aa6ca36d00bb5a3739abfdb89b26
|
pulsar/apps/http/client.py
|
python
|
HttpClient.put
|
(self, url, **kwargs)
|
return self.request('PUT', url, **kwargs)
|
Sends a PUT request and returns a :class:`.HttpResponse` object.
:params url: url for the new :class:`HttpRequest` object.
:param \*\*kwargs: Optional arguments for the :meth:`request` method.
|
Sends a PUT request and returns a :class:`.HttpResponse` object.
|
[
"Sends",
"a",
"PUT",
"request",
"and",
"returns",
"a",
":",
"class",
":",
".",
"HttpResponse",
"object",
"."
] |
def put(self, url, **kwargs):
"""Sends a PUT request and returns a :class:`.HttpResponse` object.
:params url: url for the new :class:`HttpRequest` object.
:param \*\*kwargs: Optional arguments for the :meth:`request` method.
"""
return self.request('PUT', url, **kwargs)
|
[
"def",
"put",
"(",
"self",
",",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"request",
"(",
"'PUT'",
",",
"url",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/quantmind/pulsar/blob/fee44e871954aa6ca36d00bb5a3739abfdb89b26/pulsar/apps/http/client.py#L840-L846
|
|
scikit-learn/scikit-learn
|
1d1aadd0711b87d2a11c80aad15df6f8cf156712
|
doc/tutorial/machine_learning_map/pyparsing.py
|
python
|
replaceWith
|
(replStr)
|
return lambda s,l,t: [replStr]
|
Helper method for common parse actions that simply return a literal value. Especially
useful when used with C{L{transformString<ParserElement.transformString>}()}.
Example::
num = Word(nums).setParseAction(lambda toks: int(toks[0]))
na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
term = na | num
OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
|
Helper method for common parse actions that simply return a literal value. Especially
useful when used with C{L{transformString<ParserElement.transformString>}()}.
|
[
"Helper",
"method",
"for",
"common",
"parse",
"actions",
"that",
"simply",
"return",
"a",
"literal",
"value",
".",
"Especially",
"useful",
"when",
"used",
"with",
"C",
"{",
"L",
"{",
"transformString<ParserElement",
".",
"transformString",
">",
"}",
"()",
"}",
"."
] |
def replaceWith(replStr):
"""
Helper method for common parse actions that simply return a literal value. Especially
useful when used with C{L{transformString<ParserElement.transformString>}()}.
Example::
num = Word(nums).setParseAction(lambda toks: int(toks[0]))
na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
term = na | num
OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
"""
return lambda s,l,t: [replStr]
|
[
"def",
"replaceWith",
"(",
"replStr",
")",
":",
"return",
"lambda",
"s",
",",
"l",
",",
"t",
":",
"[",
"replStr",
"]"
] |
https://github.com/scikit-learn/scikit-learn/blob/1d1aadd0711b87d2a11c80aad15df6f8cf156712/doc/tutorial/machine_learning_map/pyparsing.py#L4770-L4782
|
|
dreamnettech/dreampower
|
0253fe6b403552deb1a5fe3363678e4aea7437fa
|
src/main.py
|
python
|
select_processing
|
()
|
return process
|
Select the processing to use following args parameters.
:return: <Process> a process to run
|
Select the processing to use following args parameters.
|
[
"Select",
"the",
"processing",
"to",
"use",
"following",
"args",
"parameters",
"."
] |
def select_processing():
"""
Select the processing to use following args parameters.
:return: <Process> a process to run
"""
if Conf.args['image_size'] and Conf.args['image_size'] >= 256:
Conf.set_image_size(Conf.args['image_size'])
if os.path.isdir(Conf.args['input']):
process = processing_image_folder()
elif Conf.args['n_runs'] != 1:
process = multiple_image_processing()
else:
process = simple_image_processing()
Conf.log.debug("Process to execute : {}".format(process))
return process
|
[
"def",
"select_processing",
"(",
")",
":",
"if",
"Conf",
".",
"args",
"[",
"'image_size'",
"]",
"and",
"Conf",
".",
"args",
"[",
"'image_size'",
"]",
">=",
"256",
":",
"Conf",
".",
"set_image_size",
"(",
"Conf",
".",
"args",
"[",
"'image_size'",
"]",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"Conf",
".",
"args",
"[",
"'input'",
"]",
")",
":",
"process",
"=",
"processing_image_folder",
"(",
")",
"elif",
"Conf",
".",
"args",
"[",
"'n_runs'",
"]",
"!=",
"1",
":",
"process",
"=",
"multiple_image_processing",
"(",
")",
"else",
":",
"process",
"=",
"simple_image_processing",
"(",
")",
"Conf",
".",
"log",
".",
"debug",
"(",
"\"Process to execute : {}\"",
".",
"format",
"(",
"process",
")",
")",
"return",
"process"
] |
https://github.com/dreamnettech/dreampower/blob/0253fe6b403552deb1a5fe3363678e4aea7437fa/src/main.py#L33-L49
|
|
JiYou/openstack
|
8607dd488bde0905044b303eb6e52bdea6806923
|
packages/source/swift/swift/common/swob.py
|
python
|
Request.get_response
|
(self, application)
|
return Response(status=status, headers=dict(headers),
app_iter=app_iter, request=self)
|
Calls the application with this request's environment. Returns a
Response object that wraps up the application's result.
:param application: the WSGI application to call
|
Calls the application with this request's environment. Returns a
Response object that wraps up the application's result.
|
[
"Calls",
"the",
"application",
"with",
"this",
"request",
"s",
"environment",
".",
"Returns",
"a",
"Response",
"object",
"that",
"wraps",
"up",
"the",
"application",
"s",
"result",
"."
] |
def get_response(self, application):
"""
Calls the application with this request's environment. Returns a
Response object that wraps up the application's result.
:param application: the WSGI application to call
"""
status, headers, app_iter = self.call_application(application)
return Response(status=status, headers=dict(headers),
app_iter=app_iter, request=self)
|
[
"def",
"get_response",
"(",
"self",
",",
"application",
")",
":",
"status",
",",
"headers",
",",
"app_iter",
"=",
"self",
".",
"call_application",
"(",
"application",
")",
"return",
"Response",
"(",
"status",
"=",
"status",
",",
"headers",
"=",
"dict",
"(",
"headers",
")",
",",
"app_iter",
"=",
"app_iter",
",",
"request",
"=",
"self",
")"
] |
https://github.com/JiYou/openstack/blob/8607dd488bde0905044b303eb6e52bdea6806923/packages/source/swift/swift/common/swob.py#L855-L864
|
|
nipy/nipy
|
d16d268938dcd5c15748ca051532c21f57cf8a22
|
nipy/externals/transforms3d/taitbryan.py
|
python
|
quat2euler
|
(q)
|
return mat2euler(nq.quat2mat(q))
|
Return Euler angles corresponding to quaternion `q`
Parameters
----------
q : 4 element sequence
w, x, y, z of quaternion
Returns
-------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Notes
-----
It's possible to reduce the amount of calculation a little, by
combining parts of the ``quat2mat`` and ``mat2euler`` functions, but
the reduction in computation is small, and the code repetition is
large.
|
Return Euler angles corresponding to quaternion `q`
|
[
"Return",
"Euler",
"angles",
"corresponding",
"to",
"quaternion",
"q"
] |
def quat2euler(q):
''' Return Euler angles corresponding to quaternion `q`
Parameters
----------
q : 4 element sequence
w, x, y, z of quaternion
Returns
-------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Notes
-----
It's possible to reduce the amount of calculation a little, by
combining parts of the ``quat2mat`` and ``mat2euler`` functions, but
the reduction in computation is small, and the code repetition is
large.
'''
# delayed import to avoid cyclic dependencies
from . import quaternions as nq
return mat2euler(nq.quat2mat(q))
|
[
"def",
"quat2euler",
"(",
"q",
")",
":",
"# delayed import to avoid cyclic dependencies",
"from",
".",
"import",
"quaternions",
"as",
"nq",
"return",
"mat2euler",
"(",
"nq",
".",
"quat2mat",
"(",
"q",
")",
")"
] |
https://github.com/nipy/nipy/blob/d16d268938dcd5c15748ca051532c21f57cf8a22/nipy/externals/transforms3d/taitbryan.py#L302-L328
|
|
pypa/pipenv
|
b21baade71a86ab3ee1429f71fbc14d4f95fb75d
|
pipenv/vendor/requirementslib/models/setup_info.py
|
python
|
SetupReader._find_extras_require
|
(
cls, call: ast.Call, body: "Iterable[Any]"
)
|
return extras_require
|
[] |
def _find_extras_require(
cls, call: ast.Call, body: "Iterable[Any]"
) -> "Dict[str, List[str]]":
extras_require: "Dict[str, List[str]]" = {}
value = cls._find_in_call(call, "extras_require")
if value is None:
# Trying to find in kwargs
kwargs = cls._find_call_kwargs(call)
if kwargs is None:
return extras_require
if not isinstance(kwargs, ast.Name):
raise Unparsable()
variable = cls._find_variable_in_body(body, kwargs.id)
if not isinstance(variable, (ast.Dict, ast.Call)):
raise Unparsable()
if isinstance(variable, ast.Call):
if not isinstance(variable.func, ast.Name):
raise Unparsable()
if variable.func.id != "dict":
raise Unparsable()
value = cls._find_in_call(variable, "extras_require")
else:
value = cls._find_in_dict(variable, "extras_require")
if value is None:
return extras_require
if isinstance(value, ast.Dict):
for key, val in zip(value.keys, value.values):
if isinstance(val, ast.Name):
val = cls._find_variable_in_body(body, val.id)
if isinstance(val, ast.List):
extras_require[key.s] = [e.s for e in val.elts]
else:
raise Unparsable()
elif isinstance(value, ast.Name):
variable = cls._find_variable_in_body(body, value.id)
if variable is None or not isinstance(variable, ast.Dict):
raise Unparsable()
for key, val in zip(variable.keys, variable.values):
if isinstance(val, ast.Name):
val = cls._find_variable_in_body(body, val.id)
if isinstance(val, ast.List):
extras_require[key.s] = [e.s for e in val.elts]
else:
raise Unparsable()
else:
raise Unparsable()
return extras_require
|
[
"def",
"_find_extras_require",
"(",
"cls",
",",
"call",
":",
"ast",
".",
"Call",
",",
"body",
":",
"\"Iterable[Any]\"",
")",
"->",
"\"Dict[str, List[str]]\"",
":",
"extras_require",
":",
"\"Dict[str, List[str]]\"",
"=",
"{",
"}",
"value",
"=",
"cls",
".",
"_find_in_call",
"(",
"call",
",",
"\"extras_require\"",
")",
"if",
"value",
"is",
"None",
":",
"# Trying to find in kwargs",
"kwargs",
"=",
"cls",
".",
"_find_call_kwargs",
"(",
"call",
")",
"if",
"kwargs",
"is",
"None",
":",
"return",
"extras_require",
"if",
"not",
"isinstance",
"(",
"kwargs",
",",
"ast",
".",
"Name",
")",
":",
"raise",
"Unparsable",
"(",
")",
"variable",
"=",
"cls",
".",
"_find_variable_in_body",
"(",
"body",
",",
"kwargs",
".",
"id",
")",
"if",
"not",
"isinstance",
"(",
"variable",
",",
"(",
"ast",
".",
"Dict",
",",
"ast",
".",
"Call",
")",
")",
":",
"raise",
"Unparsable",
"(",
")",
"if",
"isinstance",
"(",
"variable",
",",
"ast",
".",
"Call",
")",
":",
"if",
"not",
"isinstance",
"(",
"variable",
".",
"func",
",",
"ast",
".",
"Name",
")",
":",
"raise",
"Unparsable",
"(",
")",
"if",
"variable",
".",
"func",
".",
"id",
"!=",
"\"dict\"",
":",
"raise",
"Unparsable",
"(",
")",
"value",
"=",
"cls",
".",
"_find_in_call",
"(",
"variable",
",",
"\"extras_require\"",
")",
"else",
":",
"value",
"=",
"cls",
".",
"_find_in_dict",
"(",
"variable",
",",
"\"extras_require\"",
")",
"if",
"value",
"is",
"None",
":",
"return",
"extras_require",
"if",
"isinstance",
"(",
"value",
",",
"ast",
".",
"Dict",
")",
":",
"for",
"key",
",",
"val",
"in",
"zip",
"(",
"value",
".",
"keys",
",",
"value",
".",
"values",
")",
":",
"if",
"isinstance",
"(",
"val",
",",
"ast",
".",
"Name",
")",
":",
"val",
"=",
"cls",
".",
"_find_variable_in_body",
"(",
"body",
",",
"val",
".",
"id",
")",
"if",
"isinstance",
"(",
"val",
",",
"ast",
".",
"List",
")",
":",
"extras_require",
"[",
"key",
".",
"s",
"]",
"=",
"[",
"e",
".",
"s",
"for",
"e",
"in",
"val",
".",
"elts",
"]",
"else",
":",
"raise",
"Unparsable",
"(",
")",
"elif",
"isinstance",
"(",
"value",
",",
"ast",
".",
"Name",
")",
":",
"variable",
"=",
"cls",
".",
"_find_variable_in_body",
"(",
"body",
",",
"value",
".",
"id",
")",
"if",
"variable",
"is",
"None",
"or",
"not",
"isinstance",
"(",
"variable",
",",
"ast",
".",
"Dict",
")",
":",
"raise",
"Unparsable",
"(",
")",
"for",
"key",
",",
"val",
"in",
"zip",
"(",
"variable",
".",
"keys",
",",
"variable",
".",
"values",
")",
":",
"if",
"isinstance",
"(",
"val",
",",
"ast",
".",
"Name",
")",
":",
"val",
"=",
"cls",
".",
"_find_variable_in_body",
"(",
"body",
",",
"val",
".",
"id",
")",
"if",
"isinstance",
"(",
"val",
",",
"ast",
".",
"List",
")",
":",
"extras_require",
"[",
"key",
".",
"s",
"]",
"=",
"[",
"e",
".",
"s",
"for",
"e",
"in",
"val",
".",
"elts",
"]",
"else",
":",
"raise",
"Unparsable",
"(",
")",
"else",
":",
"raise",
"Unparsable",
"(",
")",
"return",
"extras_require"
] |
https://github.com/pypa/pipenv/blob/b21baade71a86ab3ee1429f71fbc14d4f95fb75d/pipenv/vendor/requirementslib/models/setup_info.py#L370-L429
|
|||
pwnieexpress/pwn_plug_sources
|
1a23324f5dc2c3de20f9c810269b6a29b2758cad
|
src/sslstrip/sslstrip/URLMonitor.py
|
python
|
URLMonitor.getInstance
|
()
|
return URLMonitor._instance
|
[] |
def getInstance():
if URLMonitor._instance == None:
URLMonitor._instance = URLMonitor()
return URLMonitor._instance
|
[
"def",
"getInstance",
"(",
")",
":",
"if",
"URLMonitor",
".",
"_instance",
"==",
"None",
":",
"URLMonitor",
".",
"_instance",
"=",
"URLMonitor",
"(",
")",
"return",
"URLMonitor",
".",
"_instance"
] |
https://github.com/pwnieexpress/pwn_plug_sources/blob/1a23324f5dc2c3de20f9c810269b6a29b2758cad/src/sslstrip/sslstrip/URLMonitor.py#L81-L85
|
|||
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/sympy/physics/quantum/identitysearch.py
|
python
|
is_reducible
|
(circuit, nqubits, begin, end)
|
return False
|
Determines if a circuit is reducible by checking
if its subcircuits are scalar values.
Parameters
==========
circuit : Gate tuple
A tuple of Gates representing a circuit. The circuit to check
if a gate identity is contained in a subcircuit.
nqubits : int
The number of qubits the circuit operates on.
begin : int
The leftmost gate in the circuit to include in a subcircuit.
end : int
The rightmost gate in the circuit to include in a subcircuit.
Examples
========
Check if the circuit can be reduced:
>>> from sympy.physics.quantum.identitysearch import (
... GateIdentity, is_reducible)
>>> from sympy.physics.quantum.gate import X, Y, Z
>>> x = X(0); y = Y(0); z = Z(0)
>>> is_reducible((x, y, z), 1, 0, 3)
True
Check if an interval in the circuit can be reduced:
>>> is_reducible((x, y, z), 1, 1, 3)
False
>>> is_reducible((x, y, y), 1, 1, 3)
True
|
Determines if a circuit is reducible by checking
if its subcircuits are scalar values.
|
[
"Determines",
"if",
"a",
"circuit",
"is",
"reducible",
"by",
"checking",
"if",
"its",
"subcircuits",
"are",
"scalar",
"values",
"."
] |
def is_reducible(circuit, nqubits, begin, end):
"""Determines if a circuit is reducible by checking
if its subcircuits are scalar values.
Parameters
==========
circuit : Gate tuple
A tuple of Gates representing a circuit. The circuit to check
if a gate identity is contained in a subcircuit.
nqubits : int
The number of qubits the circuit operates on.
begin : int
The leftmost gate in the circuit to include in a subcircuit.
end : int
The rightmost gate in the circuit to include in a subcircuit.
Examples
========
Check if the circuit can be reduced:
>>> from sympy.physics.quantum.identitysearch import (
... GateIdentity, is_reducible)
>>> from sympy.physics.quantum.gate import X, Y, Z
>>> x = X(0); y = Y(0); z = Z(0)
>>> is_reducible((x, y, z), 1, 0, 3)
True
Check if an interval in the circuit can be reduced:
>>> is_reducible((x, y, z), 1, 1, 3)
False
>>> is_reducible((x, y, y), 1, 1, 3)
True
"""
current_circuit = ()
# Start from the gate at "end" and go down to almost the gate at "begin"
for ndx in reversed(range(begin, end)):
next_gate = circuit[ndx]
current_circuit = (next_gate,) + current_circuit
# If a circuit as a matrix is equivalent to a scalar value
if (is_scalar_matrix(current_circuit, nqubits, False)):
return True
return False
|
[
"def",
"is_reducible",
"(",
"circuit",
",",
"nqubits",
",",
"begin",
",",
"end",
")",
":",
"current_circuit",
"=",
"(",
")",
"# Start from the gate at \"end\" and go down to almost the gate at \"begin\"",
"for",
"ndx",
"in",
"reversed",
"(",
"range",
"(",
"begin",
",",
"end",
")",
")",
":",
"next_gate",
"=",
"circuit",
"[",
"ndx",
"]",
"current_circuit",
"=",
"(",
"next_gate",
",",
")",
"+",
"current_circuit",
"# If a circuit as a matrix is equivalent to a scalar value",
"if",
"(",
"is_scalar_matrix",
"(",
"current_circuit",
",",
"nqubits",
",",
"False",
")",
")",
":",
"return",
"True",
"return",
"False"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/physics/quantum/identitysearch.py#L704-L752
|
|
zhl2008/awd-platform
|
0416b31abea29743387b10b3914581fbe8e7da5e
|
web_flaskbb/lib/python2.7/site-packages/whoosh/analysis/filters.py
|
python
|
LowercaseFilter.__call__
|
(self, tokens)
|
[] |
def __call__(self, tokens):
for t in tokens:
t.text = t.text.lower()
yield t
|
[
"def",
"__call__",
"(",
"self",
",",
"tokens",
")",
":",
"for",
"t",
"in",
"tokens",
":",
"t",
".",
"text",
"=",
"t",
".",
"text",
".",
"lower",
"(",
")",
"yield",
"t"
] |
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/whoosh/analysis/filters.py#L224-L227
|
||||
NVIDIA/apex
|
b88c507edb0d067d5570f7a8efe03a90664a3d16
|
apex/reparameterization/__init__.py
|
python
|
remove_weight_norm
|
(module, name='', remove_all=False)
|
return remove_reparameterization(module, reparameterization=WeightNorm,
name=name, remove_all=remove_all)
|
Removes the weight normalization reparameterization of a parameter from a module.
If no parameter is supplied then all weight norm parameterizations are removed.
Args:
module (nn.Module): containing module
name (str, optional): name of weight parameter
Example:
>>> m = apply_weight_norm(nn.Linear(20, 40))
>>> remove_weight_norm(m)
|
Removes the weight normalization reparameterization of a parameter from a module.
If no parameter is supplied then all weight norm parameterizations are removed.
Args:
module (nn.Module): containing module
name (str, optional): name of weight parameter
Example:
>>> m = apply_weight_norm(nn.Linear(20, 40))
>>> remove_weight_norm(m)
|
[
"Removes",
"the",
"weight",
"normalization",
"reparameterization",
"of",
"a",
"parameter",
"from",
"a",
"module",
".",
"If",
"no",
"parameter",
"is",
"supplied",
"then",
"all",
"weight",
"norm",
"parameterizations",
"are",
"removed",
".",
"Args",
":",
"module",
"(",
"nn",
".",
"Module",
")",
":",
"containing",
"module",
"name",
"(",
"str",
"optional",
")",
":",
"name",
"of",
"weight",
"parameter",
"Example",
":",
">>>",
"m",
"=",
"apply_weight_norm",
"(",
"nn",
".",
"Linear",
"(",
"20",
"40",
"))",
">>>",
"remove_weight_norm",
"(",
"m",
")"
] |
def remove_weight_norm(module, name='', remove_all=False):
"""
Removes the weight normalization reparameterization of a parameter from a module.
If no parameter is supplied then all weight norm parameterizations are removed.
Args:
module (nn.Module): containing module
name (str, optional): name of weight parameter
Example:
>>> m = apply_weight_norm(nn.Linear(20, 40))
>>> remove_weight_norm(m)
"""
return remove_reparameterization(module, reparameterization=WeightNorm,
name=name, remove_all=remove_all)
|
[
"def",
"remove_weight_norm",
"(",
"module",
",",
"name",
"=",
"''",
",",
"remove_all",
"=",
"False",
")",
":",
"return",
"remove_reparameterization",
"(",
"module",
",",
"reparameterization",
"=",
"WeightNorm",
",",
"name",
"=",
"name",
",",
"remove_all",
"=",
"remove_all",
")"
] |
https://github.com/NVIDIA/apex/blob/b88c507edb0d067d5570f7a8efe03a90664a3d16/apex/reparameterization/__init__.py#L50-L62
|
|
bilylee/SiamFC-TensorFlow
|
f572dca95f2b3b2861f54de467259753428e468c
|
embeddings/convolutional_alexnet.py
|
python
|
convolutional_alexnet_arg_scope
|
(embed_config,
trainable=True,
is_training=False)
|
Defines the default arg scope.
Args:
embed_config: A dictionary which contains configurations for the embedding function.
trainable: If the weights in the embedding function is trainable.
is_training: If the embedding function is built for training.
Returns:
An `arg_scope` to use for the convolutional_alexnet models.
|
Defines the default arg scope.
|
[
"Defines",
"the",
"default",
"arg",
"scope",
"."
] |
def convolutional_alexnet_arg_scope(embed_config,
trainable=True,
is_training=False):
"""Defines the default arg scope.
Args:
embed_config: A dictionary which contains configurations for the embedding function.
trainable: If the weights in the embedding function is trainable.
is_training: If the embedding function is built for training.
Returns:
An `arg_scope` to use for the convolutional_alexnet models.
"""
# Only consider the model to be in training mode if it's trainable.
# This is vital for batch_norm since moving_mean and moving_variance
# will get updated even if not trainable.
is_model_training = trainable and is_training
if get(embed_config, 'use_bn', True):
batch_norm_scale = get(embed_config, 'bn_scale', True)
batch_norm_decay = 1 - get(embed_config, 'bn_momentum', 3e-4)
batch_norm_epsilon = get(embed_config, 'bn_epsilon', 1e-6)
batch_norm_params = {
"scale": batch_norm_scale,
# Decay for the moving averages.
"decay": batch_norm_decay,
# Epsilon to prevent 0s in variance.
"epsilon": batch_norm_epsilon,
"trainable": trainable,
"is_training": is_model_training,
# Collection containing the moving mean and moving variance.
"variables_collections": {
"beta": None,
"gamma": None,
"moving_mean": ["moving_vars"],
"moving_variance": ["moving_vars"],
},
'updates_collections': None, # Ensure that updates are done within a frame
}
normalizer_fn = slim.batch_norm
else:
batch_norm_params = {}
normalizer_fn = None
weight_decay = get(embed_config, 'weight_decay', 5e-4)
if trainable:
weights_regularizer = slim.l2_regularizer(weight_decay)
else:
weights_regularizer = None
init_method = get(embed_config, 'init_method', 'kaiming_normal')
if is_model_training:
logging.info('embedding init method -- {}'.format(init_method))
if init_method == 'kaiming_normal':
# The same setting as siamese-fc
initializer = slim.variance_scaling_initializer(factor=2.0, mode='FAN_OUT', uniform=False)
else:
initializer = slim.xavier_initializer()
with slim.arg_scope(
[slim.conv2d],
weights_regularizer=weights_regularizer,
weights_initializer=initializer,
padding='VALID',
trainable=trainable,
activation_fn=tf.nn.relu,
normalizer_fn=normalizer_fn,
normalizer_params=batch_norm_params):
with slim.arg_scope([slim.batch_norm], **batch_norm_params):
with slim.arg_scope([slim.batch_norm], is_training=is_model_training) as arg_sc:
return arg_sc
|
[
"def",
"convolutional_alexnet_arg_scope",
"(",
"embed_config",
",",
"trainable",
"=",
"True",
",",
"is_training",
"=",
"False",
")",
":",
"# Only consider the model to be in training mode if it's trainable.",
"# This is vital for batch_norm since moving_mean and moving_variance",
"# will get updated even if not trainable.",
"is_model_training",
"=",
"trainable",
"and",
"is_training",
"if",
"get",
"(",
"embed_config",
",",
"'use_bn'",
",",
"True",
")",
":",
"batch_norm_scale",
"=",
"get",
"(",
"embed_config",
",",
"'bn_scale'",
",",
"True",
")",
"batch_norm_decay",
"=",
"1",
"-",
"get",
"(",
"embed_config",
",",
"'bn_momentum'",
",",
"3e-4",
")",
"batch_norm_epsilon",
"=",
"get",
"(",
"embed_config",
",",
"'bn_epsilon'",
",",
"1e-6",
")",
"batch_norm_params",
"=",
"{",
"\"scale\"",
":",
"batch_norm_scale",
",",
"# Decay for the moving averages.",
"\"decay\"",
":",
"batch_norm_decay",
",",
"# Epsilon to prevent 0s in variance.",
"\"epsilon\"",
":",
"batch_norm_epsilon",
",",
"\"trainable\"",
":",
"trainable",
",",
"\"is_training\"",
":",
"is_model_training",
",",
"# Collection containing the moving mean and moving variance.",
"\"variables_collections\"",
":",
"{",
"\"beta\"",
":",
"None",
",",
"\"gamma\"",
":",
"None",
",",
"\"moving_mean\"",
":",
"[",
"\"moving_vars\"",
"]",
",",
"\"moving_variance\"",
":",
"[",
"\"moving_vars\"",
"]",
",",
"}",
",",
"'updates_collections'",
":",
"None",
",",
"# Ensure that updates are done within a frame",
"}",
"normalizer_fn",
"=",
"slim",
".",
"batch_norm",
"else",
":",
"batch_norm_params",
"=",
"{",
"}",
"normalizer_fn",
"=",
"None",
"weight_decay",
"=",
"get",
"(",
"embed_config",
",",
"'weight_decay'",
",",
"5e-4",
")",
"if",
"trainable",
":",
"weights_regularizer",
"=",
"slim",
".",
"l2_regularizer",
"(",
"weight_decay",
")",
"else",
":",
"weights_regularizer",
"=",
"None",
"init_method",
"=",
"get",
"(",
"embed_config",
",",
"'init_method'",
",",
"'kaiming_normal'",
")",
"if",
"is_model_training",
":",
"logging",
".",
"info",
"(",
"'embedding init method -- {}'",
".",
"format",
"(",
"init_method",
")",
")",
"if",
"init_method",
"==",
"'kaiming_normal'",
":",
"# The same setting as siamese-fc",
"initializer",
"=",
"slim",
".",
"variance_scaling_initializer",
"(",
"factor",
"=",
"2.0",
",",
"mode",
"=",
"'FAN_OUT'",
",",
"uniform",
"=",
"False",
")",
"else",
":",
"initializer",
"=",
"slim",
".",
"xavier_initializer",
"(",
")",
"with",
"slim",
".",
"arg_scope",
"(",
"[",
"slim",
".",
"conv2d",
"]",
",",
"weights_regularizer",
"=",
"weights_regularizer",
",",
"weights_initializer",
"=",
"initializer",
",",
"padding",
"=",
"'VALID'",
",",
"trainable",
"=",
"trainable",
",",
"activation_fn",
"=",
"tf",
".",
"nn",
".",
"relu",
",",
"normalizer_fn",
"=",
"normalizer_fn",
",",
"normalizer_params",
"=",
"batch_norm_params",
")",
":",
"with",
"slim",
".",
"arg_scope",
"(",
"[",
"slim",
".",
"batch_norm",
"]",
",",
"*",
"*",
"batch_norm_params",
")",
":",
"with",
"slim",
".",
"arg_scope",
"(",
"[",
"slim",
".",
"batch_norm",
"]",
",",
"is_training",
"=",
"is_model_training",
")",
"as",
"arg_sc",
":",
"return",
"arg_sc"
] |
https://github.com/bilylee/SiamFC-TensorFlow/blob/f572dca95f2b3b2861f54de467259753428e468c/embeddings/convolutional_alexnet.py#L27-L97
|
||
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/squeezebox/media_player.py
|
python
|
SqueezeBoxEntity.async_media_play
|
(self)
|
Send play command to media player.
|
Send play command to media player.
|
[
"Send",
"play",
"command",
"to",
"media",
"player",
"."
] |
async def async_media_play(self):
"""Send play command to media player."""
await self._player.async_play()
|
[
"async",
"def",
"async_media_play",
"(",
"self",
")",
":",
"await",
"self",
".",
"_player",
".",
"async_play",
"(",
")"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/squeezebox/media_player.py#L427-L429
|
||
Delta-ML/delta
|
31dfebc8f20b7cb282b62f291ff25a87e403cc86
|
delta/utils/cmvn.py
|
python
|
create_cmvn_statis
|
(feature_size, add_delta_deltas=True)
|
return sums, square, count
|
init sums, squares and cout of feature statistic
|
init sums, squares and cout of feature statistic
|
[
"init",
"sums",
"squares",
"and",
"cout",
"of",
"feature",
"statistic"
] |
def create_cmvn_statis(feature_size, add_delta_deltas=True):
''' init sums, squares and cout of feature statistic '''
sums = np.zeros([1, feature_size, 3 if add_delta_deltas else 1],
dtype=np.float64)
square = np.zeros([1, feature_size, 3 if add_delta_deltas else 1],
dtype=np.float64)
count = 0.0
return sums, square, count
|
[
"def",
"create_cmvn_statis",
"(",
"feature_size",
",",
"add_delta_deltas",
"=",
"True",
")",
":",
"sums",
"=",
"np",
".",
"zeros",
"(",
"[",
"1",
",",
"feature_size",
",",
"3",
"if",
"add_delta_deltas",
"else",
"1",
"]",
",",
"dtype",
"=",
"np",
".",
"float64",
")",
"square",
"=",
"np",
".",
"zeros",
"(",
"[",
"1",
",",
"feature_size",
",",
"3",
"if",
"add_delta_deltas",
"else",
"1",
"]",
",",
"dtype",
"=",
"np",
".",
"float64",
")",
"count",
"=",
"0.0",
"return",
"sums",
",",
"square",
",",
"count"
] |
https://github.com/Delta-ML/delta/blob/31dfebc8f20b7cb282b62f291ff25a87e403cc86/delta/utils/cmvn.py#L23-L30
|
|
biolab/orange3
|
41685e1c7b1d1babe680113685a2d44bcc9fec0b
|
Orange/widgets/data/owdatasampler.py
|
python
|
OWDataSampler.__init__
|
(self)
|
[] |
def __init__(self):
super().__init__()
if self.compatibility_mode:
self.Information.compatibility_mode()
self.data = None
self.indices = None
self.sampled_instances = self.remaining_instances = None
self.sampling_box = gui.vBox(self.controlArea, "Sampling Type")
sampling = gui.radioButtons(self.sampling_box, self, "sampling_type",
callback=self.sampling_type_changed)
def set_sampling_type(i):
def set_sampling_type_i():
self.sampling_type = i
self.sampling_type_changed()
return set_sampling_type_i
gui.appendRadioButton(sampling, "Fixed proportion of data:")
self.sampleSizePercentageSlider = gui.hSlider(
gui.indentedBox(sampling), self,
"sampleSizePercentage",
minValue=0, maxValue=100, ticks=10, labelFormat="%d %%",
callback=set_sampling_type(self.FixedProportion))
gui.appendRadioButton(sampling, "Fixed sample size")
ibox = gui.indentedBox(sampling)
self.sampleSizeSpin = gui.spin(
ibox, self, "sampleSizeNumber", label="Instances: ",
minv=1, maxv=self._MAX_SAMPLE_SIZE,
callback=set_sampling_type(self.FixedSize),
controlWidth=90)
gui.checkBox(
ibox, self, "replacement", "Sample with replacement",
callback=set_sampling_type(self.FixedSize))
gui.appendRadioButton(sampling, "Cross validation")
form = QFormLayout(
formAlignment=Qt.AlignLeft | Qt.AlignTop,
labelAlignment=Qt.AlignLeft,
fieldGrowthPolicy=QFormLayout.AllNonFixedFieldsGrow)
ibox = gui.indentedBox(sampling, orientation=form)
form.addRow("Number of subsets:",
gui.spin(
ibox, self, "number_of_folds", 2, 100,
addToLayout=False,
callback=self.number_of_folds_changed))
self.selected_fold_spin = gui.spin(
ibox, self, "selectedFold", 1, self.number_of_folds,
addToLayout=False, callback=self.fold_changed)
form.addRow("Unused subset:" if not self.compatibility_mode
else "Selected subset:", self.selected_fold_spin)
gui.appendRadioButton(sampling, "Bootstrap")
self.sql_box = gui.vBox(self.controlArea, "Sampling Type")
sampling = gui.radioButtons(self.sql_box, self, "sampling_type",
callback=self.sampling_type_changed)
gui.appendRadioButton(sampling, "Time:")
ibox = gui.indentedBox(sampling)
spin = gui.spin(ibox, self, "sampleSizeSqlTime", minv=1, maxv=3600,
callback=set_sampling_type(self.SqlTime))
spin.setSuffix(" sec")
gui.appendRadioButton(sampling, "Percentage")
ibox = gui.indentedBox(sampling)
spin = gui.spin(ibox, self, "sampleSizeSqlPercentage", spinType=float,
minv=0.0001, maxv=100, step=0.1, decimals=4,
callback=set_sampling_type(self.SqlProportion))
spin.setSuffix(" %")
self.sql_box.setVisible(False)
self.options_box = gui.vBox(self.controlArea, "Options", addSpaceBefore=False)
self.cb_seed = gui.checkBox(
self.options_box, self, "use_seed",
"Replicable (deterministic) sampling",
callback=self.settings_changed)
self.cb_stratify = gui.checkBox(
self.options_box, self, "stratify",
"Stratify sample (when possible)", callback=self.settings_changed)
self.cb_sql_dl = gui.checkBox(
self.options_box, self, "sql_dl", "Download data to local memory",
callback=self.settings_changed)
self.cb_sql_dl.setVisible(False)
gui.button(self.buttonsArea, self, "Sample Data",
callback=self.commit)
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"if",
"self",
".",
"compatibility_mode",
":",
"self",
".",
"Information",
".",
"compatibility_mode",
"(",
")",
"self",
".",
"data",
"=",
"None",
"self",
".",
"indices",
"=",
"None",
"self",
".",
"sampled_instances",
"=",
"self",
".",
"remaining_instances",
"=",
"None",
"self",
".",
"sampling_box",
"=",
"gui",
".",
"vBox",
"(",
"self",
".",
"controlArea",
",",
"\"Sampling Type\"",
")",
"sampling",
"=",
"gui",
".",
"radioButtons",
"(",
"self",
".",
"sampling_box",
",",
"self",
",",
"\"sampling_type\"",
",",
"callback",
"=",
"self",
".",
"sampling_type_changed",
")",
"def",
"set_sampling_type",
"(",
"i",
")",
":",
"def",
"set_sampling_type_i",
"(",
")",
":",
"self",
".",
"sampling_type",
"=",
"i",
"self",
".",
"sampling_type_changed",
"(",
")",
"return",
"set_sampling_type_i",
"gui",
".",
"appendRadioButton",
"(",
"sampling",
",",
"\"Fixed proportion of data:\"",
")",
"self",
".",
"sampleSizePercentageSlider",
"=",
"gui",
".",
"hSlider",
"(",
"gui",
".",
"indentedBox",
"(",
"sampling",
")",
",",
"self",
",",
"\"sampleSizePercentage\"",
",",
"minValue",
"=",
"0",
",",
"maxValue",
"=",
"100",
",",
"ticks",
"=",
"10",
",",
"labelFormat",
"=",
"\"%d %%\"",
",",
"callback",
"=",
"set_sampling_type",
"(",
"self",
".",
"FixedProportion",
")",
")",
"gui",
".",
"appendRadioButton",
"(",
"sampling",
",",
"\"Fixed sample size\"",
")",
"ibox",
"=",
"gui",
".",
"indentedBox",
"(",
"sampling",
")",
"self",
".",
"sampleSizeSpin",
"=",
"gui",
".",
"spin",
"(",
"ibox",
",",
"self",
",",
"\"sampleSizeNumber\"",
",",
"label",
"=",
"\"Instances: \"",
",",
"minv",
"=",
"1",
",",
"maxv",
"=",
"self",
".",
"_MAX_SAMPLE_SIZE",
",",
"callback",
"=",
"set_sampling_type",
"(",
"self",
".",
"FixedSize",
")",
",",
"controlWidth",
"=",
"90",
")",
"gui",
".",
"checkBox",
"(",
"ibox",
",",
"self",
",",
"\"replacement\"",
",",
"\"Sample with replacement\"",
",",
"callback",
"=",
"set_sampling_type",
"(",
"self",
".",
"FixedSize",
")",
")",
"gui",
".",
"appendRadioButton",
"(",
"sampling",
",",
"\"Cross validation\"",
")",
"form",
"=",
"QFormLayout",
"(",
"formAlignment",
"=",
"Qt",
".",
"AlignLeft",
"|",
"Qt",
".",
"AlignTop",
",",
"labelAlignment",
"=",
"Qt",
".",
"AlignLeft",
",",
"fieldGrowthPolicy",
"=",
"QFormLayout",
".",
"AllNonFixedFieldsGrow",
")",
"ibox",
"=",
"gui",
".",
"indentedBox",
"(",
"sampling",
",",
"orientation",
"=",
"form",
")",
"form",
".",
"addRow",
"(",
"\"Number of subsets:\"",
",",
"gui",
".",
"spin",
"(",
"ibox",
",",
"self",
",",
"\"number_of_folds\"",
",",
"2",
",",
"100",
",",
"addToLayout",
"=",
"False",
",",
"callback",
"=",
"self",
".",
"number_of_folds_changed",
")",
")",
"self",
".",
"selected_fold_spin",
"=",
"gui",
".",
"spin",
"(",
"ibox",
",",
"self",
",",
"\"selectedFold\"",
",",
"1",
",",
"self",
".",
"number_of_folds",
",",
"addToLayout",
"=",
"False",
",",
"callback",
"=",
"self",
".",
"fold_changed",
")",
"form",
".",
"addRow",
"(",
"\"Unused subset:\"",
"if",
"not",
"self",
".",
"compatibility_mode",
"else",
"\"Selected subset:\"",
",",
"self",
".",
"selected_fold_spin",
")",
"gui",
".",
"appendRadioButton",
"(",
"sampling",
",",
"\"Bootstrap\"",
")",
"self",
".",
"sql_box",
"=",
"gui",
".",
"vBox",
"(",
"self",
".",
"controlArea",
",",
"\"Sampling Type\"",
")",
"sampling",
"=",
"gui",
".",
"radioButtons",
"(",
"self",
".",
"sql_box",
",",
"self",
",",
"\"sampling_type\"",
",",
"callback",
"=",
"self",
".",
"sampling_type_changed",
")",
"gui",
".",
"appendRadioButton",
"(",
"sampling",
",",
"\"Time:\"",
")",
"ibox",
"=",
"gui",
".",
"indentedBox",
"(",
"sampling",
")",
"spin",
"=",
"gui",
".",
"spin",
"(",
"ibox",
",",
"self",
",",
"\"sampleSizeSqlTime\"",
",",
"minv",
"=",
"1",
",",
"maxv",
"=",
"3600",
",",
"callback",
"=",
"set_sampling_type",
"(",
"self",
".",
"SqlTime",
")",
")",
"spin",
".",
"setSuffix",
"(",
"\" sec\"",
")",
"gui",
".",
"appendRadioButton",
"(",
"sampling",
",",
"\"Percentage\"",
")",
"ibox",
"=",
"gui",
".",
"indentedBox",
"(",
"sampling",
")",
"spin",
"=",
"gui",
".",
"spin",
"(",
"ibox",
",",
"self",
",",
"\"sampleSizeSqlPercentage\"",
",",
"spinType",
"=",
"float",
",",
"minv",
"=",
"0.0001",
",",
"maxv",
"=",
"100",
",",
"step",
"=",
"0.1",
",",
"decimals",
"=",
"4",
",",
"callback",
"=",
"set_sampling_type",
"(",
"self",
".",
"SqlProportion",
")",
")",
"spin",
".",
"setSuffix",
"(",
"\" %\"",
")",
"self",
".",
"sql_box",
".",
"setVisible",
"(",
"False",
")",
"self",
".",
"options_box",
"=",
"gui",
".",
"vBox",
"(",
"self",
".",
"controlArea",
",",
"\"Options\"",
",",
"addSpaceBefore",
"=",
"False",
")",
"self",
".",
"cb_seed",
"=",
"gui",
".",
"checkBox",
"(",
"self",
".",
"options_box",
",",
"self",
",",
"\"use_seed\"",
",",
"\"Replicable (deterministic) sampling\"",
",",
"callback",
"=",
"self",
".",
"settings_changed",
")",
"self",
".",
"cb_stratify",
"=",
"gui",
".",
"checkBox",
"(",
"self",
".",
"options_box",
",",
"self",
",",
"\"stratify\"",
",",
"\"Stratify sample (when possible)\"",
",",
"callback",
"=",
"self",
".",
"settings_changed",
")",
"self",
".",
"cb_sql_dl",
"=",
"gui",
".",
"checkBox",
"(",
"self",
".",
"options_box",
",",
"self",
",",
"\"sql_dl\"",
",",
"\"Download data to local memory\"",
",",
"callback",
"=",
"self",
".",
"settings_changed",
")",
"self",
".",
"cb_sql_dl",
".",
"setVisible",
"(",
"False",
")",
"gui",
".",
"button",
"(",
"self",
".",
"buttonsArea",
",",
"self",
",",
"\"Sample Data\"",
",",
"callback",
"=",
"self",
".",
"commit",
")"
] |
https://github.com/biolab/orange3/blob/41685e1c7b1d1babe680113685a2d44bcc9fec0b/Orange/widgets/data/owdatasampler.py#L79-L165
|
||||
skylander86/lambda-text-extractor
|
6da52d077a2fc571e38bfe29c33ae68f6443cd5a
|
lib-linux_x64/pptx/oxml/ns.py
|
python
|
NamespacePrefixedTag.nspfx
|
(self)
|
return self._pfx
|
Return the string namespace prefix for the tag, e.g. 'f' is returned
for tag 'f:foobar'.
|
Return the string namespace prefix for the tag, e.g. 'f' is returned
for tag 'f:foobar'.
|
[
"Return",
"the",
"string",
"namespace",
"prefix",
"for",
"the",
"tag",
"e",
".",
"g",
".",
"f",
"is",
"returned",
"for",
"tag",
"f",
":",
"foobar",
"."
] |
def nspfx(self):
"""
Return the string namespace prefix for the tag, e.g. 'f' is returned
for tag 'f:foobar'.
"""
return self._pfx
|
[
"def",
"nspfx",
"(",
"self",
")",
":",
"return",
"self",
".",
"_pfx"
] |
https://github.com/skylander86/lambda-text-extractor/blob/6da52d077a2fc571e38bfe29c33ae68f6443cd5a/lib-linux_x64/pptx/oxml/ns.py#L83-L88
|
|
sunnyxiaohu/R-C3D.pytorch
|
e8731af7b95f1dc934f6604f9c09e3c4ead74db5
|
lib/tf_model_zoo/models/differential_privacy/dp_sgd/dp_optimizer/utils.py
|
python
|
BuildNetwork
|
(inputs, network_parameters)
|
return outputs, projection, training_parameters
|
Build a network using the given parameters.
Args:
inputs: a Tensor of floats containing the input data.
network_parameters: NetworkParameters object
that describes the parameters for the network.
Returns:
output, training_parameters: where the outputs (a tensor) is the output
of the network, and training_parameters (a dictionary that maps the
name of each variable to a dictionary of parameters) is the parameters
used during training.
|
Build a network using the given parameters.
|
[
"Build",
"a",
"network",
"using",
"the",
"given",
"parameters",
"."
] |
def BuildNetwork(inputs, network_parameters):
"""Build a network using the given parameters.
Args:
inputs: a Tensor of floats containing the input data.
network_parameters: NetworkParameters object
that describes the parameters for the network.
Returns:
output, training_parameters: where the outputs (a tensor) is the output
of the network, and training_parameters (a dictionary that maps the
name of each variable to a dictionary of parameters) is the parameters
used during training.
"""
training_parameters = {}
num_inputs = network_parameters.input_size
outputs = inputs
projection = None
# First apply convolutions, if needed
for conv_param in network_parameters.conv_parameters:
outputs = tf.reshape(
outputs,
[-1, conv_param.in_size, conv_param.in_size,
conv_param.in_channels])
conv_weights_name = "%s_conv_weight" % (conv_param.name)
conv_bias_name = "%s_conv_bias" % (conv_param.name)
conv_std_dev = 1.0 / (conv_param.patch_size
* math.sqrt(conv_param.in_channels))
conv_weights = tf.Variable(
tf.truncated_normal([conv_param.patch_size,
conv_param.patch_size,
conv_param.in_channels,
conv_param.out_channels],
stddev=conv_std_dev),
trainable=conv_param.trainable,
name=conv_weights_name)
conv_bias = tf.Variable(
tf.truncated_normal([conv_param.out_channels],
stddev=conv_param.bias_stddev),
trainable=conv_param.trainable,
name=conv_bias_name)
training_parameters[conv_weights_name] = {}
training_parameters[conv_bias_name] = {}
conv = tf.nn.conv2d(outputs, conv_weights,
strides=[1, conv_param.stride,
conv_param.stride, 1],
padding="SAME")
relud = tf.nn.relu(conv + conv_bias)
mpd = tf.nn.max_pool(relud, ksize=[1,
conv_param.max_pool_size,
conv_param.max_pool_size, 1],
strides=[1, conv_param.max_pool_stride,
conv_param.max_pool_stride, 1],
padding="SAME")
outputs = mpd
num_inputs = conv_param.num_outputs
# this should equal
# in_size * in_size * out_channels / (stride * max_pool_stride)
# once all the convs are done, reshape to make it flat
outputs = tf.reshape(outputs, [-1, num_inputs])
# Now project, if needed
if network_parameters.projection_type is not "NONE":
projection = tf.Variable(tf.truncated_normal(
[num_inputs, network_parameters.projection_dimensions],
stddev=1.0 / math.sqrt(num_inputs)), trainable=False, name="projection")
num_inputs = network_parameters.projection_dimensions
outputs = tf.matmul(outputs, projection)
# Now apply any other layers
for layer_parameters in network_parameters.layer_parameters:
num_units = layer_parameters.num_units
hidden_weights_name = "%s_weight" % (layer_parameters.name)
hidden_weights = tf.Variable(
tf.truncated_normal([num_inputs, num_units],
stddev=1.0 / math.sqrt(num_inputs)),
name=hidden_weights_name, trainable=layer_parameters.trainable)
training_parameters[hidden_weights_name] = {}
if layer_parameters.gradient_l2norm_bound:
training_parameters[hidden_weights_name]["gradient_l2norm_bound"] = (
layer_parameters.gradient_l2norm_bound)
if layer_parameters.weight_decay:
training_parameters[hidden_weights_name]["weight_decay"] = (
layer_parameters.weight_decay)
outputs = tf.matmul(outputs, hidden_weights)
if layer_parameters.with_bias:
hidden_biases_name = "%s_bias" % (layer_parameters.name)
hidden_biases = tf.Variable(tf.zeros([num_units]),
name=hidden_biases_name)
training_parameters[hidden_biases_name] = {}
if layer_parameters.bias_gradient_l2norm_bound:
training_parameters[hidden_biases_name][
"bias_gradient_l2norm_bound"] = (
layer_parameters.bias_gradient_l2norm_bound)
outputs += hidden_biases
if layer_parameters.relu:
outputs = tf.nn.relu(outputs)
# num_inputs for the next layer is num_units in the current layer.
num_inputs = num_units
return outputs, projection, training_parameters
|
[
"def",
"BuildNetwork",
"(",
"inputs",
",",
"network_parameters",
")",
":",
"training_parameters",
"=",
"{",
"}",
"num_inputs",
"=",
"network_parameters",
".",
"input_size",
"outputs",
"=",
"inputs",
"projection",
"=",
"None",
"# First apply convolutions, if needed",
"for",
"conv_param",
"in",
"network_parameters",
".",
"conv_parameters",
":",
"outputs",
"=",
"tf",
".",
"reshape",
"(",
"outputs",
",",
"[",
"-",
"1",
",",
"conv_param",
".",
"in_size",
",",
"conv_param",
".",
"in_size",
",",
"conv_param",
".",
"in_channels",
"]",
")",
"conv_weights_name",
"=",
"\"%s_conv_weight\"",
"%",
"(",
"conv_param",
".",
"name",
")",
"conv_bias_name",
"=",
"\"%s_conv_bias\"",
"%",
"(",
"conv_param",
".",
"name",
")",
"conv_std_dev",
"=",
"1.0",
"/",
"(",
"conv_param",
".",
"patch_size",
"*",
"math",
".",
"sqrt",
"(",
"conv_param",
".",
"in_channels",
")",
")",
"conv_weights",
"=",
"tf",
".",
"Variable",
"(",
"tf",
".",
"truncated_normal",
"(",
"[",
"conv_param",
".",
"patch_size",
",",
"conv_param",
".",
"patch_size",
",",
"conv_param",
".",
"in_channels",
",",
"conv_param",
".",
"out_channels",
"]",
",",
"stddev",
"=",
"conv_std_dev",
")",
",",
"trainable",
"=",
"conv_param",
".",
"trainable",
",",
"name",
"=",
"conv_weights_name",
")",
"conv_bias",
"=",
"tf",
".",
"Variable",
"(",
"tf",
".",
"truncated_normal",
"(",
"[",
"conv_param",
".",
"out_channels",
"]",
",",
"stddev",
"=",
"conv_param",
".",
"bias_stddev",
")",
",",
"trainable",
"=",
"conv_param",
".",
"trainable",
",",
"name",
"=",
"conv_bias_name",
")",
"training_parameters",
"[",
"conv_weights_name",
"]",
"=",
"{",
"}",
"training_parameters",
"[",
"conv_bias_name",
"]",
"=",
"{",
"}",
"conv",
"=",
"tf",
".",
"nn",
".",
"conv2d",
"(",
"outputs",
",",
"conv_weights",
",",
"strides",
"=",
"[",
"1",
",",
"conv_param",
".",
"stride",
",",
"conv_param",
".",
"stride",
",",
"1",
"]",
",",
"padding",
"=",
"\"SAME\"",
")",
"relud",
"=",
"tf",
".",
"nn",
".",
"relu",
"(",
"conv",
"+",
"conv_bias",
")",
"mpd",
"=",
"tf",
".",
"nn",
".",
"max_pool",
"(",
"relud",
",",
"ksize",
"=",
"[",
"1",
",",
"conv_param",
".",
"max_pool_size",
",",
"conv_param",
".",
"max_pool_size",
",",
"1",
"]",
",",
"strides",
"=",
"[",
"1",
",",
"conv_param",
".",
"max_pool_stride",
",",
"conv_param",
".",
"max_pool_stride",
",",
"1",
"]",
",",
"padding",
"=",
"\"SAME\"",
")",
"outputs",
"=",
"mpd",
"num_inputs",
"=",
"conv_param",
".",
"num_outputs",
"# this should equal",
"# in_size * in_size * out_channels / (stride * max_pool_stride)",
"# once all the convs are done, reshape to make it flat",
"outputs",
"=",
"tf",
".",
"reshape",
"(",
"outputs",
",",
"[",
"-",
"1",
",",
"num_inputs",
"]",
")",
"# Now project, if needed",
"if",
"network_parameters",
".",
"projection_type",
"is",
"not",
"\"NONE\"",
":",
"projection",
"=",
"tf",
".",
"Variable",
"(",
"tf",
".",
"truncated_normal",
"(",
"[",
"num_inputs",
",",
"network_parameters",
".",
"projection_dimensions",
"]",
",",
"stddev",
"=",
"1.0",
"/",
"math",
".",
"sqrt",
"(",
"num_inputs",
")",
")",
",",
"trainable",
"=",
"False",
",",
"name",
"=",
"\"projection\"",
")",
"num_inputs",
"=",
"network_parameters",
".",
"projection_dimensions",
"outputs",
"=",
"tf",
".",
"matmul",
"(",
"outputs",
",",
"projection",
")",
"# Now apply any other layers",
"for",
"layer_parameters",
"in",
"network_parameters",
".",
"layer_parameters",
":",
"num_units",
"=",
"layer_parameters",
".",
"num_units",
"hidden_weights_name",
"=",
"\"%s_weight\"",
"%",
"(",
"layer_parameters",
".",
"name",
")",
"hidden_weights",
"=",
"tf",
".",
"Variable",
"(",
"tf",
".",
"truncated_normal",
"(",
"[",
"num_inputs",
",",
"num_units",
"]",
",",
"stddev",
"=",
"1.0",
"/",
"math",
".",
"sqrt",
"(",
"num_inputs",
")",
")",
",",
"name",
"=",
"hidden_weights_name",
",",
"trainable",
"=",
"layer_parameters",
".",
"trainable",
")",
"training_parameters",
"[",
"hidden_weights_name",
"]",
"=",
"{",
"}",
"if",
"layer_parameters",
".",
"gradient_l2norm_bound",
":",
"training_parameters",
"[",
"hidden_weights_name",
"]",
"[",
"\"gradient_l2norm_bound\"",
"]",
"=",
"(",
"layer_parameters",
".",
"gradient_l2norm_bound",
")",
"if",
"layer_parameters",
".",
"weight_decay",
":",
"training_parameters",
"[",
"hidden_weights_name",
"]",
"[",
"\"weight_decay\"",
"]",
"=",
"(",
"layer_parameters",
".",
"weight_decay",
")",
"outputs",
"=",
"tf",
".",
"matmul",
"(",
"outputs",
",",
"hidden_weights",
")",
"if",
"layer_parameters",
".",
"with_bias",
":",
"hidden_biases_name",
"=",
"\"%s_bias\"",
"%",
"(",
"layer_parameters",
".",
"name",
")",
"hidden_biases",
"=",
"tf",
".",
"Variable",
"(",
"tf",
".",
"zeros",
"(",
"[",
"num_units",
"]",
")",
",",
"name",
"=",
"hidden_biases_name",
")",
"training_parameters",
"[",
"hidden_biases_name",
"]",
"=",
"{",
"}",
"if",
"layer_parameters",
".",
"bias_gradient_l2norm_bound",
":",
"training_parameters",
"[",
"hidden_biases_name",
"]",
"[",
"\"bias_gradient_l2norm_bound\"",
"]",
"=",
"(",
"layer_parameters",
".",
"bias_gradient_l2norm_bound",
")",
"outputs",
"+=",
"hidden_biases",
"if",
"layer_parameters",
".",
"relu",
":",
"outputs",
"=",
"tf",
".",
"nn",
".",
"relu",
"(",
"outputs",
")",
"# num_inputs for the next layer is num_units in the current layer.",
"num_inputs",
"=",
"num_units",
"return",
"outputs",
",",
"projection",
",",
"training_parameters"
] |
https://github.com/sunnyxiaohu/R-C3D.pytorch/blob/e8731af7b95f1dc934f6604f9c09e3c4ead74db5/lib/tf_model_zoo/models/differential_privacy/dp_sgd/dp_optimizer/utils.py#L88-L193
|
|
smart-on-fhir/client-py
|
6047277daa31f10931e44ed19e92128298cdb64b
|
fhirclient/models/device.py
|
python
|
Device.__init__
|
(self, jsondict=None, strict=True)
|
Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
|
Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
|
[
"Initialize",
"all",
"valid",
"properties",
".",
":",
"raises",
":",
"FHIRValidationError",
"on",
"validation",
"errors",
"unless",
"strict",
"is",
"False",
":",
"param",
"dict",
"jsondict",
":",
"A",
"JSON",
"dictionary",
"to",
"use",
"for",
"initialization",
":",
"param",
"bool",
"strict",
":",
"If",
"True",
"(",
"the",
"default",
")",
"invalid",
"variables",
"will",
"raise",
"a",
"TypeError"
] |
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.contact = None
""" Details for human/organization for support.
List of `ContactPoint` items (represented as `dict` in JSON). """
self.definition = None
""" The reference to the definition for the device.
Type `FHIRReference` (represented as `dict` in JSON). """
self.deviceName = None
""" The name of the device as given by the manufacturer.
List of `DeviceDeviceName` items (represented as `dict` in JSON). """
self.distinctIdentifier = None
""" The distinct identification string.
Type `str`. """
self.expirationDate = None
""" Date and time of expiry of this device (if applicable).
Type `FHIRDate` (represented as `str` in JSON). """
self.identifier = None
""" Instance identifier.
List of `Identifier` items (represented as `dict` in JSON). """
self.location = None
""" Where the device is found.
Type `FHIRReference` (represented as `dict` in JSON). """
self.lotNumber = None
""" Lot number of manufacture.
Type `str`. """
self.manufactureDate = None
""" Date when the device was made.
Type `FHIRDate` (represented as `str` in JSON). """
self.manufacturer = None
""" Name of device manufacturer.
Type `str`. """
self.modelNumber = None
""" The model number for the device.
Type `str`. """
self.note = None
""" Device notes and comments.
List of `Annotation` items (represented as `dict` in JSON). """
self.owner = None
""" Organization responsible for device.
Type `FHIRReference` (represented as `dict` in JSON). """
self.parent = None
""" The parent device.
Type `FHIRReference` (represented as `dict` in JSON). """
self.partNumber = None
""" The part number of the device.
Type `str`. """
self.patient = None
""" Patient to whom Device is affixed.
Type `FHIRReference` (represented as `dict` in JSON). """
self.property = None
""" The actual configuration settings of a device as it actually
operates, e.g., regulation status, time properties.
List of `DeviceProperty` items (represented as `dict` in JSON). """
self.safety = None
""" Safety Characteristics of Device.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.serialNumber = None
""" Serial number assigned by the manufacturer.
Type `str`. """
self.specialization = None
""" The capabilities supported on a device, the standards to which the
device conforms for a particular purpose, and used for the
communication.
List of `DeviceSpecialization` items (represented as `dict` in JSON). """
self.status = None
""" active | inactive | entered-in-error | unknown.
Type `str`. """
self.statusReason = None
""" online | paused | standby | offline | not-ready | transduc-discon |
hw-discon | off.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.type = None
""" The kind or type of device.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.udiCarrier = None
""" Unique Device Identifier (UDI) Barcode string.
List of `DeviceUdiCarrier` items (represented as `dict` in JSON). """
self.url = None
""" Network address to contact device.
Type `str`. """
self.version = None
""" The actual design of the device or software version running on the
device.
List of `DeviceVersion` items (represented as `dict` in JSON). """
super(Device, self).__init__(jsondict=jsondict, strict=strict)
|
[
"def",
"__init__",
"(",
"self",
",",
"jsondict",
"=",
"None",
",",
"strict",
"=",
"True",
")",
":",
"self",
".",
"contact",
"=",
"None",
"\"\"\" Details for human/organization for support.\n List of `ContactPoint` items (represented as `dict` in JSON). \"\"\"",
"self",
".",
"definition",
"=",
"None",
"\"\"\" The reference to the definition for the device.\n Type `FHIRReference` (represented as `dict` in JSON). \"\"\"",
"self",
".",
"deviceName",
"=",
"None",
"\"\"\" The name of the device as given by the manufacturer.\n List of `DeviceDeviceName` items (represented as `dict` in JSON). \"\"\"",
"self",
".",
"distinctIdentifier",
"=",
"None",
"\"\"\" The distinct identification string.\n Type `str`. \"\"\"",
"self",
".",
"expirationDate",
"=",
"None",
"\"\"\" Date and time of expiry of this device (if applicable).\n Type `FHIRDate` (represented as `str` in JSON). \"\"\"",
"self",
".",
"identifier",
"=",
"None",
"\"\"\" Instance identifier.\n List of `Identifier` items (represented as `dict` in JSON). \"\"\"",
"self",
".",
"location",
"=",
"None",
"\"\"\" Where the device is found.\n Type `FHIRReference` (represented as `dict` in JSON). \"\"\"",
"self",
".",
"lotNumber",
"=",
"None",
"\"\"\" Lot number of manufacture.\n Type `str`. \"\"\"",
"self",
".",
"manufactureDate",
"=",
"None",
"\"\"\" Date when the device was made.\n Type `FHIRDate` (represented as `str` in JSON). \"\"\"",
"self",
".",
"manufacturer",
"=",
"None",
"\"\"\" Name of device manufacturer.\n Type `str`. \"\"\"",
"self",
".",
"modelNumber",
"=",
"None",
"\"\"\" The model number for the device.\n Type `str`. \"\"\"",
"self",
".",
"note",
"=",
"None",
"\"\"\" Device notes and comments.\n List of `Annotation` items (represented as `dict` in JSON). \"\"\"",
"self",
".",
"owner",
"=",
"None",
"\"\"\" Organization responsible for device.\n Type `FHIRReference` (represented as `dict` in JSON). \"\"\"",
"self",
".",
"parent",
"=",
"None",
"\"\"\" The parent device.\n Type `FHIRReference` (represented as `dict` in JSON). \"\"\"",
"self",
".",
"partNumber",
"=",
"None",
"\"\"\" The part number of the device.\n Type `str`. \"\"\"",
"self",
".",
"patient",
"=",
"None",
"\"\"\" Patient to whom Device is affixed.\n Type `FHIRReference` (represented as `dict` in JSON). \"\"\"",
"self",
".",
"property",
"=",
"None",
"\"\"\" The actual configuration settings of a device as it actually\n operates, e.g., regulation status, time properties.\n List of `DeviceProperty` items (represented as `dict` in JSON). \"\"\"",
"self",
".",
"safety",
"=",
"None",
"\"\"\" Safety Characteristics of Device.\n List of `CodeableConcept` items (represented as `dict` in JSON). \"\"\"",
"self",
".",
"serialNumber",
"=",
"None",
"\"\"\" Serial number assigned by the manufacturer.\n Type `str`. \"\"\"",
"self",
".",
"specialization",
"=",
"None",
"\"\"\" The capabilities supported on a device, the standards to which the\n device conforms for a particular purpose, and used for the\n communication.\n List of `DeviceSpecialization` items (represented as `dict` in JSON). \"\"\"",
"self",
".",
"status",
"=",
"None",
"\"\"\" active | inactive | entered-in-error | unknown.\n Type `str`. \"\"\"",
"self",
".",
"statusReason",
"=",
"None",
"\"\"\" online | paused | standby | offline | not-ready | transduc-discon |\n hw-discon | off.\n List of `CodeableConcept` items (represented as `dict` in JSON). \"\"\"",
"self",
".",
"type",
"=",
"None",
"\"\"\" The kind or type of device.\n Type `CodeableConcept` (represented as `dict` in JSON). \"\"\"",
"self",
".",
"udiCarrier",
"=",
"None",
"\"\"\" Unique Device Identifier (UDI) Barcode string.\n List of `DeviceUdiCarrier` items (represented as `dict` in JSON). \"\"\"",
"self",
".",
"url",
"=",
"None",
"\"\"\" Network address to contact device.\n Type `str`. \"\"\"",
"self",
".",
"version",
"=",
"None",
"\"\"\" The actual design of the device or software version running on the\n device.\n List of `DeviceVersion` items (represented as `dict` in JSON). \"\"\"",
"super",
"(",
"Device",
",",
"self",
")",
".",
"__init__",
"(",
"jsondict",
"=",
"jsondict",
",",
"strict",
"=",
"strict",
")"
] |
https://github.com/smart-on-fhir/client-py/blob/6047277daa31f10931e44ed19e92128298cdb64b/fhirclient/models/device.py#L20-L137
|
||
openedx/ecommerce
|
db6c774e239e5aa65e5a6151995073d364e8c896
|
ecommerce/extensions/catalogue/migrations/0013_coupon_product_class.py
|
python
|
create_product_class
|
(apps, schema_editor)
|
Create a Coupon product class.
|
Create a Coupon product class.
|
[
"Create",
"a",
"Coupon",
"product",
"class",
"."
] |
def create_product_class(apps, schema_editor):
"""Create a Coupon product class."""
Category = apps.get_model("catalogue", "Category")
ProductAttribute = apps.get_model("catalogue", "ProductAttribute")
ProductClass = apps.get_model("catalogue", "ProductClass")
for klass in (Category, ProductAttribute, ProductClass):
klass.skip_history_when_saving = True
coupon = ProductClass(
track_stock=False,
requires_shipping=False,
name=COUPON_PRODUCT_CLASS_NAME,
slug=slugify(COUPON_PRODUCT_CLASS_NAME),
)
coupon.save()
pa = ProductAttribute(
product_class=coupon,
name='Coupon vouchers',
code='coupon_vouchers',
type='entity',
required=False
)
pa.save()
# Create a category for coupons.
c = Category(
description='All Coupons',
slug='coupons',
depth=1,
path='0002',
image='',
name='Coupons'
)
c.save()
|
[
"def",
"create_product_class",
"(",
"apps",
",",
"schema_editor",
")",
":",
"Category",
"=",
"apps",
".",
"get_model",
"(",
"\"catalogue\"",
",",
"\"Category\"",
")",
"ProductAttribute",
"=",
"apps",
".",
"get_model",
"(",
"\"catalogue\"",
",",
"\"ProductAttribute\"",
")",
"ProductClass",
"=",
"apps",
".",
"get_model",
"(",
"\"catalogue\"",
",",
"\"ProductClass\"",
")",
"for",
"klass",
"in",
"(",
"Category",
",",
"ProductAttribute",
",",
"ProductClass",
")",
":",
"klass",
".",
"skip_history_when_saving",
"=",
"True",
"coupon",
"=",
"ProductClass",
"(",
"track_stock",
"=",
"False",
",",
"requires_shipping",
"=",
"False",
",",
"name",
"=",
"COUPON_PRODUCT_CLASS_NAME",
",",
"slug",
"=",
"slugify",
"(",
"COUPON_PRODUCT_CLASS_NAME",
")",
",",
")",
"coupon",
".",
"save",
"(",
")",
"pa",
"=",
"ProductAttribute",
"(",
"product_class",
"=",
"coupon",
",",
"name",
"=",
"'Coupon vouchers'",
",",
"code",
"=",
"'coupon_vouchers'",
",",
"type",
"=",
"'entity'",
",",
"required",
"=",
"False",
")",
"pa",
".",
"save",
"(",
")",
"# Create a category for coupons.",
"c",
"=",
"Category",
"(",
"description",
"=",
"'All Coupons'",
",",
"slug",
"=",
"'coupons'",
",",
"depth",
"=",
"1",
",",
"path",
"=",
"'0002'",
",",
"image",
"=",
"''",
",",
"name",
"=",
"'Coupons'",
")",
"c",
".",
"save",
"(",
")"
] |
https://github.com/openedx/ecommerce/blob/db6c774e239e5aa65e5a6151995073d364e8c896/ecommerce/extensions/catalogue/migrations/0013_coupon_product_class.py#L10-L45
|
||
erdewit/ib_insync
|
9674fe974c07ca3afaf70673ae296f1d19f028dc
|
ib_insync/wrapper.py
|
python
|
Wrapper.setEventsDone
|
(self)
|
Set all subscribtion-type events as done.
|
Set all subscribtion-type events as done.
|
[
"Set",
"all",
"subscribtion",
"-",
"type",
"events",
"as",
"done",
"."
] |
def setEventsDone(self):
"""Set all subscribtion-type events as done."""
events = [ticker.updateEvent for ticker in self.tickers.values()]
events += [sub.updateEvent for sub in self.reqId2Subscriber.values()]
for trade in self.trades.values():
events += [
trade.statusEvent, trade.modifyEvent, trade.fillEvent,
trade.filledEvent, trade.commissionReportEvent,
trade.cancelEvent, trade.cancelledEvent]
for event in events:
event.set_done()
|
[
"def",
"setEventsDone",
"(",
"self",
")",
":",
"events",
"=",
"[",
"ticker",
".",
"updateEvent",
"for",
"ticker",
"in",
"self",
".",
"tickers",
".",
"values",
"(",
")",
"]",
"events",
"+=",
"[",
"sub",
".",
"updateEvent",
"for",
"sub",
"in",
"self",
".",
"reqId2Subscriber",
".",
"values",
"(",
")",
"]",
"for",
"trade",
"in",
"self",
".",
"trades",
".",
"values",
"(",
")",
":",
"events",
"+=",
"[",
"trade",
".",
"statusEvent",
",",
"trade",
".",
"modifyEvent",
",",
"trade",
".",
"fillEvent",
",",
"trade",
".",
"filledEvent",
",",
"trade",
".",
"commissionReportEvent",
",",
"trade",
".",
"cancelEvent",
",",
"trade",
".",
"cancelledEvent",
"]",
"for",
"event",
"in",
"events",
":",
"event",
".",
"set_done",
"(",
")"
] |
https://github.com/erdewit/ib_insync/blob/9674fe974c07ca3afaf70673ae296f1d19f028dc/ib_insync/wrapper.py#L114-L124
|
||
volatilityfoundation/volatility3
|
168b0d0b053ab97a7cb096ef2048795cc54d885f
|
volatility3/framework/symbols/wrappers.py
|
python
|
Flags.__init__
|
(self, choices: Mapping[str, int])
|
[] |
def __init__(self, choices: Mapping[str, int]) -> None:
self._choices = interfaces.objects.ReadOnlyMapping(choices)
|
[
"def",
"__init__",
"(",
"self",
",",
"choices",
":",
"Mapping",
"[",
"str",
",",
"int",
"]",
")",
"->",
"None",
":",
"self",
".",
"_choices",
"=",
"interfaces",
".",
"objects",
".",
"ReadOnlyMapping",
"(",
"choices",
")"
] |
https://github.com/volatilityfoundation/volatility3/blob/168b0d0b053ab97a7cb096ef2048795cc54d885f/volatility3/framework/symbols/wrappers.py#L14-L15
|
||||
takluyver/pynsist
|
17cd683735de1b2f2b75875befe56196dd652054
|
nsist/copymodules.py
|
python
|
copy_zipmodule
|
(loader, modname, target)
|
Copy a module or package out of a zip file to the target directory.
|
Copy a module or package out of a zip file to the target directory.
|
[
"Copy",
"a",
"module",
"or",
"package",
"out",
"of",
"a",
"zip",
"file",
"to",
"the",
"target",
"directory",
"."
] |
def copy_zipmodule(loader, modname, target):
"""Copy a module or package out of a zip file to the target directory."""
file = loader.get_filename(modname)
assert file.startswith(loader.archive)
path_in_zip = file[len(loader.archive+'/'):]
zf = zipfile.ZipFile(loader.archive)
# If the packages are in a subdirectory, extracting them recreates the
# directory structure from the zip file. So extract to a temp dir first,
# and then copy the modules to target.
tempdir = tempfile.mkdtemp()
if loader.is_package(modname):
# Extract everything in a folder
pkgdir, basename = os.path.split(path_in_zip)
assert basename.startswith('__init__')
pkgfiles = [f for f in zf.namelist() if f.startswith(pkgdir)]
zf.extractall(tempdir, pkgfiles)
shutil.copytree(pjoin(tempdir, pkgdir), pjoin(target, modname))
else:
# Extract a single file
zf.extract(path_in_zip, tempdir)
shutil.copy2(pjoin(tempdir, path_in_zip), target)
shutil.rmtree(tempdir)
|
[
"def",
"copy_zipmodule",
"(",
"loader",
",",
"modname",
",",
"target",
")",
":",
"file",
"=",
"loader",
".",
"get_filename",
"(",
"modname",
")",
"assert",
"file",
".",
"startswith",
"(",
"loader",
".",
"archive",
")",
"path_in_zip",
"=",
"file",
"[",
"len",
"(",
"loader",
".",
"archive",
"+",
"'/'",
")",
":",
"]",
"zf",
"=",
"zipfile",
".",
"ZipFile",
"(",
"loader",
".",
"archive",
")",
"# If the packages are in a subdirectory, extracting them recreates the",
"# directory structure from the zip file. So extract to a temp dir first,",
"# and then copy the modules to target.",
"tempdir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"if",
"loader",
".",
"is_package",
"(",
"modname",
")",
":",
"# Extract everything in a folder",
"pkgdir",
",",
"basename",
"=",
"os",
".",
"path",
".",
"split",
"(",
"path_in_zip",
")",
"assert",
"basename",
".",
"startswith",
"(",
"'__init__'",
")",
"pkgfiles",
"=",
"[",
"f",
"for",
"f",
"in",
"zf",
".",
"namelist",
"(",
")",
"if",
"f",
".",
"startswith",
"(",
"pkgdir",
")",
"]",
"zf",
".",
"extractall",
"(",
"tempdir",
",",
"pkgfiles",
")",
"shutil",
".",
"copytree",
"(",
"pjoin",
"(",
"tempdir",
",",
"pkgdir",
")",
",",
"pjoin",
"(",
"target",
",",
"modname",
")",
")",
"else",
":",
"# Extract a single file",
"zf",
".",
"extract",
"(",
"path_in_zip",
",",
"tempdir",
")",
"shutil",
".",
"copy2",
"(",
"pjoin",
"(",
"tempdir",
",",
"path_in_zip",
")",
",",
"target",
")",
"shutil",
".",
"rmtree",
"(",
"tempdir",
")"
] |
https://github.com/takluyver/pynsist/blob/17cd683735de1b2f2b75875befe56196dd652054/nsist/copymodules.py#L48-L71
|
||
Calysto/calysto_scheme
|
15bf81987870bcae1264e5a0a06feb9a8ee12b8b
|
calysto_scheme/scheme.py
|
python
|
get_exception_info
|
(exception)
|
[] |
def get_exception_info(exception):
column = list_ref(exception, 5)
line = list_ref(exception, 4)
source = list_ref(exception, 3)
if (False if (((source) is (symbol_none)) is False) else True):
return symbol_none
else:
return format("line ~a, column ~a of ~a", line, column, source)
|
[
"def",
"get_exception_info",
"(",
"exception",
")",
":",
"column",
"=",
"list_ref",
"(",
"exception",
",",
"5",
")",
"line",
"=",
"list_ref",
"(",
"exception",
",",
"4",
")",
"source",
"=",
"list_ref",
"(",
"exception",
",",
"3",
")",
"if",
"(",
"False",
"if",
"(",
"(",
"(",
"source",
")",
"is",
"(",
"symbol_none",
")",
")",
"is",
"False",
")",
"else",
"True",
")",
":",
"return",
"symbol_none",
"else",
":",
"return",
"format",
"(",
"\"line ~a, column ~a of ~a\"",
",",
"line",
",",
"column",
",",
"source",
")"
] |
https://github.com/Calysto/calysto_scheme/blob/15bf81987870bcae1264e5a0a06feb9a8ee12b8b/calysto_scheme/scheme.py#L8062-L8069
|
||||
Rapptz/RoboDanny
|
1fb95d76d1b7685e2e2ff950e11cddfc96efbfec
|
cogs/splatoon.py
|
python
|
Splatoon.gear
|
(self, ctx, *, query: GearQuery)
|
Searches for Splatoon 2 gear that matches your query.
The query can be a main ability, a brand, or a name.
For advanced queries to reduce results you can pass some filters:
`--brand` with the brand name.
`--ability` with the main ability.
`--frequent` with the buffed main ability probability
`--type` with the type of clothing (head, hat, shoes, or clothes)
For example, a query like `ink resist --brand splash mob` will give all
gear with Ink Resistance Up and Splash Mob as the brand.
**Note**: you must pass a query before passing a filter.
|
Searches for Splatoon 2 gear that matches your query.
|
[
"Searches",
"for",
"Splatoon",
"2",
"gear",
"that",
"matches",
"your",
"query",
"."
] |
async def gear(self, ctx, *, query: GearQuery):
"""Searches for Splatoon 2 gear that matches your query.
The query can be a main ability, a brand, or a name.
For advanced queries to reduce results you can pass some filters:
`--brand` with the brand name.
`--ability` with the main ability.
`--frequent` with the buffed main ability probability
`--type` with the type of clothing (head, hat, shoes, or clothes)
For example, a query like `ink resist --brand splash mob` will give all
gear with Ink Resistance Up and Splash Mob as the brand.
**Note**: you must pass a query before passing a filter.
"""
pages = RoboPages(GearPageSource(query), ctx=ctx, show_skip_pages=True)
await pages.start()
|
[
"async",
"def",
"gear",
"(",
"self",
",",
"ctx",
",",
"*",
",",
"query",
":",
"GearQuery",
")",
":",
"pages",
"=",
"RoboPages",
"(",
"GearPageSource",
"(",
"query",
")",
",",
"ctx",
"=",
"ctx",
",",
"show_skip_pages",
"=",
"True",
")",
"await",
"pages",
".",
"start",
"(",
")"
] |
https://github.com/Rapptz/RoboDanny/blob/1fb95d76d1b7685e2e2ff950e11cddfc96efbfec/cogs/splatoon.py#L1152-L1171
|
||
JaniceWuo/MovieRecommend
|
4c86db64ca45598917d304f535413df3bc9fea65
|
movierecommend/venv1/Lib/site-packages/django/contrib/admin/checks.py
|
python
|
ModelAdminChecks._check_inlines_item
|
(self, obj, model, inline, label)
|
Check one inline model admin.
|
Check one inline model admin.
|
[
"Check",
"one",
"inline",
"model",
"admin",
"."
] |
def _check_inlines_item(self, obj, model, inline, label):
""" Check one inline model admin. """
inline_label = '.'.join([inline.__module__, inline.__name__])
from django.contrib.admin.options import InlineModelAdmin
if not issubclass(inline, InlineModelAdmin):
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % inline_label,
obj=obj.__class__,
id='admin.E104',
)
]
elif not inline.model:
return [
checks.Error(
"'%s' must have a 'model' attribute." % inline_label,
obj=obj.__class__,
id='admin.E105',
)
]
elif not issubclass(inline.model, models.Model):
return must_be('a Model', option='%s.model' % inline_label, obj=obj, id='admin.E106')
else:
return inline(model, obj.admin_site).check()
|
[
"def",
"_check_inlines_item",
"(",
"self",
",",
"obj",
",",
"model",
",",
"inline",
",",
"label",
")",
":",
"inline_label",
"=",
"'.'",
".",
"join",
"(",
"[",
"inline",
".",
"__module__",
",",
"inline",
".",
"__name__",
"]",
")",
"from",
"django",
".",
"contrib",
".",
"admin",
".",
"options",
"import",
"InlineModelAdmin",
"if",
"not",
"issubclass",
"(",
"inline",
",",
"InlineModelAdmin",
")",
":",
"return",
"[",
"checks",
".",
"Error",
"(",
"\"'%s' must inherit from 'InlineModelAdmin'.\"",
"%",
"inline_label",
",",
"obj",
"=",
"obj",
".",
"__class__",
",",
"id",
"=",
"'admin.E104'",
",",
")",
"]",
"elif",
"not",
"inline",
".",
"model",
":",
"return",
"[",
"checks",
".",
"Error",
"(",
"\"'%s' must have a 'model' attribute.\"",
"%",
"inline_label",
",",
"obj",
"=",
"obj",
".",
"__class__",
",",
"id",
"=",
"'admin.E105'",
",",
")",
"]",
"elif",
"not",
"issubclass",
"(",
"inline",
".",
"model",
",",
"models",
".",
"Model",
")",
":",
"return",
"must_be",
"(",
"'a Model'",
",",
"option",
"=",
"'%s.model'",
"%",
"inline_label",
",",
"obj",
"=",
"obj",
",",
"id",
"=",
"'admin.E106'",
")",
"else",
":",
"return",
"inline",
"(",
"model",
",",
"obj",
".",
"admin_site",
")",
".",
"check",
"(",
")"
] |
https://github.com/JaniceWuo/MovieRecommend/blob/4c86db64ca45598917d304f535413df3bc9fea65/movierecommend/venv1/Lib/site-packages/django/contrib/admin/checks.py#L564-L589
|
||
reddit/baseplate.py
|
f29bd1ce0f1ec4962f65ecd5a2b016b1cd4fd5ac
|
baseplate/lib/crypto.py
|
python
|
make_signature
|
(secret: VersionedSecret, message: str, max_age: datetime.timedelta)
|
return base64.urlsafe_b64encode(header + digest)
|
Return a signature for the given message.
To ensure that key rotation works automatically, always fetch the secret
token from the secret store immediately before use and do not cache / save
the token anywhere. The ``current`` version of the secret will be used to
sign the token.
:param secret: The secret signing key from the secret store.
:param message: The message to sign.
:param max_age: The amount of time in the future the signature will be valid for.
:return: An encoded signature.
|
Return a signature for the given message.
|
[
"Return",
"a",
"signature",
"for",
"the",
"given",
"message",
"."
] |
def make_signature(secret: VersionedSecret, message: str, max_age: datetime.timedelta) -> bytes:
"""Return a signature for the given message.
To ensure that key rotation works automatically, always fetch the secret
token from the secret store immediately before use and do not cache / save
the token anywhere. The ``current`` version of the secret will be used to
sign the token.
:param secret: The secret signing key from the secret store.
:param message: The message to sign.
:param max_age: The amount of time in the future the signature will be valid for.
:return: An encoded signature.
"""
version = 1
expiration = int(time.time() + max_age.total_seconds())
header = _HEADER_FORMAT.pack(version, expiration)
digest = _compute_digest(secret.current, header, message)
return base64.urlsafe_b64encode(header + digest)
|
[
"def",
"make_signature",
"(",
"secret",
":",
"VersionedSecret",
",",
"message",
":",
"str",
",",
"max_age",
":",
"datetime",
".",
"timedelta",
")",
"->",
"bytes",
":",
"version",
"=",
"1",
"expiration",
"=",
"int",
"(",
"time",
".",
"time",
"(",
")",
"+",
"max_age",
".",
"total_seconds",
"(",
")",
")",
"header",
"=",
"_HEADER_FORMAT",
".",
"pack",
"(",
"version",
",",
"expiration",
")",
"digest",
"=",
"_compute_digest",
"(",
"secret",
".",
"current",
",",
"header",
",",
"message",
")",
"return",
"base64",
".",
"urlsafe_b64encode",
"(",
"header",
"+",
"digest",
")"
] |
https://github.com/reddit/baseplate.py/blob/f29bd1ce0f1ec4962f65ecd5a2b016b1cd4fd5ac/baseplate/lib/crypto.py#L101-L119
|
|
brian-team/brian2
|
c212a57cb992b766786b5769ebb830ff12d8a8ad
|
brian2/spatialneuron/morphology.py
|
python
|
Cylinder.start_diameter
|
(self)
|
return self._diameter
|
The diameter at the start of each compartment in this section.
|
The diameter at the start of each compartment in this section.
|
[
"The",
"diameter",
"at",
"the",
"start",
"of",
"each",
"compartment",
"in",
"this",
"section",
"."
] |
def start_diameter(self):
"""
The diameter at the start of each compartment in this section.
"""
return self._diameter
|
[
"def",
"start_diameter",
"(",
"self",
")",
":",
"return",
"self",
".",
"_diameter"
] |
https://github.com/brian-team/brian2/blob/c212a57cb992b766786b5769ebb830ff12d8a8ad/brian2/spatialneuron/morphology.py#L2176-L2180
|
|
cloudera/impyla
|
0c736af4cad2bade9b8e313badc08ec50e81c948
|
impala/_thrift_gen/hive_metastore/ThriftHiveMetastore.py
|
python
|
Client.drop_table
|
(self, dbname, name, deleteData)
|
Parameters:
- dbname
- name
- deleteData
|
Parameters:
- dbname
- name
- deleteData
|
[
"Parameters",
":",
"-",
"dbname",
"-",
"name",
"-",
"deleteData"
] |
def drop_table(self, dbname, name, deleteData):
"""
Parameters:
- dbname
- name
- deleteData
"""
self.send_drop_table(dbname, name, deleteData)
self.recv_drop_table()
|
[
"def",
"drop_table",
"(",
"self",
",",
"dbname",
",",
"name",
",",
"deleteData",
")",
":",
"self",
".",
"send_drop_table",
"(",
"dbname",
",",
"name",
",",
"deleteData",
")",
"self",
".",
"recv_drop_table",
"(",
")"
] |
https://github.com/cloudera/impyla/blob/0c736af4cad2bade9b8e313badc08ec50e81c948/impala/_thrift_gen/hive_metastore/ThriftHiveMetastore.py#L2007-L2015
|
||
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/core/expr.py
|
python
|
Expr.__gt__
|
(self, other)
|
return C.StrictGreaterThan(self, other)
|
[] |
def __gt__(self, other):
dif = self - other
if dif.is_number and dif.is_real is False:
raise TypeError("Invalid comparison of complex %s" % dif)
if dif.is_positive is not None and \
dif.is_positive is not dif.is_nonpositive:
return sympify(dif.is_positive)
return C.StrictGreaterThan(self, other)
|
[
"def",
"__gt__",
"(",
"self",
",",
"other",
")",
":",
"dif",
"=",
"self",
"-",
"other",
"if",
"dif",
".",
"is_number",
"and",
"dif",
".",
"is_real",
"is",
"False",
":",
"raise",
"TypeError",
"(",
"\"Invalid comparison of complex %s\"",
"%",
"dif",
")",
"if",
"dif",
".",
"is_positive",
"is",
"not",
"None",
"and",
"dif",
".",
"is_positive",
"is",
"not",
"dif",
".",
"is_nonpositive",
":",
"return",
"sympify",
"(",
"dif",
".",
"is_positive",
")",
"return",
"C",
".",
"StrictGreaterThan",
"(",
"self",
",",
"other",
")"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/core/expr.py#L237-L244
|
|||
redhat-imaging/imagefactory
|
176f6e045e1df049d50f33a924653128d5ab8b27
|
imgfac/rest/bottle.py
|
python
|
Bottle.close
|
(self)
|
Close the application and all installed plugins.
|
Close the application and all installed plugins.
|
[
"Close",
"the",
"application",
"and",
"all",
"installed",
"plugins",
"."
] |
def close(self):
''' Close the application and all installed plugins. '''
for plugin in self.plugins:
if hasattr(plugin, 'close'): plugin.close()
self.stopped = True
|
[
"def",
"close",
"(",
"self",
")",
":",
"for",
"plugin",
"in",
"self",
".",
"plugins",
":",
"if",
"hasattr",
"(",
"plugin",
",",
"'close'",
")",
":",
"plugin",
".",
"close",
"(",
")",
"self",
".",
"stopped",
"=",
"True"
] |
https://github.com/redhat-imaging/imagefactory/blob/176f6e045e1df049d50f33a924653128d5ab8b27/imgfac/rest/bottle.py#L747-L751
|
||
pimoroni/bme680-python
|
7afe2ef78259d89c83cacc9cbf2d55abff8c8e68
|
library/bme680/__init__.py
|
python
|
BME680._set_regs
|
(self, register, value)
|
Set one or more registers.
|
Set one or more registers.
|
[
"Set",
"one",
"or",
"more",
"registers",
"."
] |
def _set_regs(self, register, value):
"""Set one or more registers."""
if isinstance(value, int):
self._i2c.write_byte_data(self.i2c_addr, register, value)
else:
self._i2c.write_i2c_block_data(self.i2c_addr, register, value)
|
[
"def",
"_set_regs",
"(",
"self",
",",
"register",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"int",
")",
":",
"self",
".",
"_i2c",
".",
"write_byte_data",
"(",
"self",
".",
"i2c_addr",
",",
"register",
",",
"value",
")",
"else",
":",
"self",
".",
"_i2c",
".",
"write_i2c_block_data",
"(",
"self",
".",
"i2c_addr",
",",
"register",
",",
"value",
")"
] |
https://github.com/pimoroni/bme680-python/blob/7afe2ef78259d89c83cacc9cbf2d55abff8c8e68/library/bme680/__init__.py#L342-L347
|
||
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
ansible/roles/lib_openshift_3.2/library/oadm_router.py
|
python
|
Yedit.separator
|
(self)
|
return self._separator
|
getter method for yaml_dict
|
getter method for yaml_dict
|
[
"getter",
"method",
"for",
"yaml_dict"
] |
def separator(self):
''' getter method for yaml_dict '''
return self._separator
|
[
"def",
"separator",
"(",
"self",
")",
":",
"return",
"self",
".",
"_separator"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/ansible/roles/lib_openshift_3.2/library/oadm_router.py#L506-L508
|
|
harpribot/deep-summarization
|
9b3bb1daae11a1db2386dbe4a71848714e6127f8
|
helpers/checkpoint.py
|
python
|
Checkpointer.get_last_checkpoint
|
(self)
|
return self.last_ckpt
|
Assumes that the last checpoint has a higher checkpoint id. Checkpoint will be saved in this exact format
model_<checkpint_id>.ckpt Eg - model_100.ckpt
:return:
|
Assumes that the last checpoint has a higher checkpoint id. Checkpoint will be saved in this exact format
model_<checkpint_id>.ckpt Eg - model_100.ckpt
|
[
"Assumes",
"that",
"the",
"last",
"checpoint",
"has",
"a",
"higher",
"checkpoint",
"id",
".",
"Checkpoint",
"will",
"be",
"saved",
"in",
"this",
"exact",
"format",
"model_<checkpint_id",
">",
".",
"ckpt",
"Eg",
"-",
"model_100",
".",
"ckpt"
] |
def get_last_checkpoint(self):
"""
Assumes that the last checpoint has a higher checkpoint id. Checkpoint will be saved in this exact format
model_<checkpint_id>.ckpt Eg - model_100.ckpt
:return:
"""
'''
'''
self.present_checkpoints = glob.glob(self.get_checkpoint_location() + '/*.ckpt')
if len(self.present_checkpoints) != 0:
present_ids = [self.__get_id(ckpt) for ckpt in self.present_checkpoints]
# sort the ID's and return the model for the last ID
present_ids.sort()
self.last_id = present_ids[-1]
self.last_ckpt = self.get_checkpoint_location() + '/model_' +\
str(self.last_id) + '.ckpt'
return self.last_ckpt
|
[
"def",
"get_last_checkpoint",
"(",
"self",
")",
":",
"'''\n\n '''",
"self",
".",
"present_checkpoints",
"=",
"glob",
".",
"glob",
"(",
"self",
".",
"get_checkpoint_location",
"(",
")",
"+",
"'/*.ckpt'",
")",
"if",
"len",
"(",
"self",
".",
"present_checkpoints",
")",
"!=",
"0",
":",
"present_ids",
"=",
"[",
"self",
".",
"__get_id",
"(",
"ckpt",
")",
"for",
"ckpt",
"in",
"self",
".",
"present_checkpoints",
"]",
"# sort the ID's and return the model for the last ID",
"present_ids",
".",
"sort",
"(",
")",
"self",
".",
"last_id",
"=",
"present_ids",
"[",
"-",
"1",
"]",
"self",
".",
"last_ckpt",
"=",
"self",
".",
"get_checkpoint_location",
"(",
")",
"+",
"'/model_'",
"+",
"str",
"(",
"self",
".",
"last_id",
")",
"+",
"'.ckpt'",
"return",
"self",
".",
"last_ckpt"
] |
https://github.com/harpribot/deep-summarization/blob/9b3bb1daae11a1db2386dbe4a71848714e6127f8/helpers/checkpoint.py#L67-L86
|
|
ReactiveX/RxPY
|
52e72c2e691f0a8ae0f479cb3a22753e6c4f8242
|
rx/core/operators/window.py
|
python
|
_window_toggle
|
(openings: Observable,
closing_mapper: Callable[[Any], Observable]
)
|
return window_toggle
|
Projects each element of an observable sequence into zero or
more windows.
Args:
source: Source observable to project into windows.
Returns:
An observable sequence of windows.
|
Projects each element of an observable sequence into zero or
more windows.
|
[
"Projects",
"each",
"element",
"of",
"an",
"observable",
"sequence",
"into",
"zero",
"or",
"more",
"windows",
"."
] |
def _window_toggle(openings: Observable,
closing_mapper: Callable[[Any], Observable]
) -> Callable[[Observable], Observable]:
"""Projects each element of an observable sequence into zero or
more windows.
Args:
source: Source observable to project into windows.
Returns:
An observable sequence of windows.
"""
def window_toggle(source: Observable) -> Observable:
def mapper(args):
_, window = args
return window
return openings.pipe(
ops.group_join(
source,
closing_mapper,
lambda _: empty(),
),
ops.map(mapper),
)
return window_toggle
|
[
"def",
"_window_toggle",
"(",
"openings",
":",
"Observable",
",",
"closing_mapper",
":",
"Callable",
"[",
"[",
"Any",
"]",
",",
"Observable",
"]",
")",
"->",
"Callable",
"[",
"[",
"Observable",
"]",
",",
"Observable",
"]",
":",
"def",
"window_toggle",
"(",
"source",
":",
"Observable",
")",
"->",
"Observable",
":",
"def",
"mapper",
"(",
"args",
")",
":",
"_",
",",
"window",
"=",
"args",
"return",
"window",
"return",
"openings",
".",
"pipe",
"(",
"ops",
".",
"group_join",
"(",
"source",
",",
"closing_mapper",
",",
"lambda",
"_",
":",
"empty",
"(",
")",
",",
")",
",",
"ops",
".",
"map",
"(",
"mapper",
")",
",",
")",
"return",
"window_toggle"
] |
https://github.com/ReactiveX/RxPY/blob/52e72c2e691f0a8ae0f479cb3a22753e6c4f8242/rx/core/operators/window.py#L15-L41
|
|
biolab/orange2
|
db40a9449cb45b507d63dcd5739b223f9cffb8e6
|
Orange/OrangeCanvas/gui/utils.py
|
python
|
gradient_darker
|
(grad, factor)
|
return new_grad
|
Return a copy of the QGradient darkened by factor.
.. note:: Only QLinearGradeint and QRadialGradient are supported.
|
Return a copy of the QGradient darkened by factor.
|
[
"Return",
"a",
"copy",
"of",
"the",
"QGradient",
"darkened",
"by",
"factor",
"."
] |
def gradient_darker(grad, factor):
"""Return a copy of the QGradient darkened by factor.
.. note:: Only QLinearGradeint and QRadialGradient are supported.
"""
if type(grad) is QGradient:
if grad.type() == QGradient.LinearGradient:
grad = sip.cast(grad, QLinearGradient)
elif grad.type() == QGradient.RadialGradient:
grad = sip.cast(grad, QRadialGradient)
if isinstance(grad, QLinearGradient):
new_grad = QLinearGradient(grad.start(), grad.finalStop())
elif isinstance(grad, QRadialGradient):
new_grad = QRadialGradient(grad.center(), grad.radius(),
grad.focalPoint())
else:
raise TypeError
new_grad.setCoordinateMode(grad.coordinateMode())
for pos, color in grad.stops():
new_grad.setColorAt(pos, color.darker(factor))
return new_grad
|
[
"def",
"gradient_darker",
"(",
"grad",
",",
"factor",
")",
":",
"if",
"type",
"(",
"grad",
")",
"is",
"QGradient",
":",
"if",
"grad",
".",
"type",
"(",
")",
"==",
"QGradient",
".",
"LinearGradient",
":",
"grad",
"=",
"sip",
".",
"cast",
"(",
"grad",
",",
"QLinearGradient",
")",
"elif",
"grad",
".",
"type",
"(",
")",
"==",
"QGradient",
".",
"RadialGradient",
":",
"grad",
"=",
"sip",
".",
"cast",
"(",
"grad",
",",
"QRadialGradient",
")",
"if",
"isinstance",
"(",
"grad",
",",
"QLinearGradient",
")",
":",
"new_grad",
"=",
"QLinearGradient",
"(",
"grad",
".",
"start",
"(",
")",
",",
"grad",
".",
"finalStop",
"(",
")",
")",
"elif",
"isinstance",
"(",
"grad",
",",
"QRadialGradient",
")",
":",
"new_grad",
"=",
"QRadialGradient",
"(",
"grad",
".",
"center",
"(",
")",
",",
"grad",
".",
"radius",
"(",
")",
",",
"grad",
".",
"focalPoint",
"(",
")",
")",
"else",
":",
"raise",
"TypeError",
"new_grad",
".",
"setCoordinateMode",
"(",
"grad",
".",
"coordinateMode",
"(",
")",
")",
"for",
"pos",
",",
"color",
"in",
"grad",
".",
"stops",
"(",
")",
":",
"new_grad",
".",
"setColorAt",
"(",
"pos",
",",
"color",
".",
"darker",
"(",
"factor",
")",
")",
"return",
"new_grad"
] |
https://github.com/biolab/orange2/blob/db40a9449cb45b507d63dcd5739b223f9cffb8e6/Orange/OrangeCanvas/gui/utils.py#L141-L166
|
|
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_adm_ca_server_cert.py
|
python
|
CAServerCert.get
|
(self)
|
return None
|
get the current cert file
If a file exists by the same name in the specified location then the cert exists
|
get the current cert file
|
[
"get",
"the",
"current",
"cert",
"file"
] |
def get(self):
'''get the current cert file
If a file exists by the same name in the specified location then the cert exists
'''
cert = self.config.config_options['cert']['value']
if cert and os.path.exists(cert):
return open(cert).read()
return None
|
[
"def",
"get",
"(",
"self",
")",
":",
"cert",
"=",
"self",
".",
"config",
".",
"config_options",
"[",
"'cert'",
"]",
"[",
"'value'",
"]",
"if",
"cert",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"cert",
")",
":",
"return",
"open",
"(",
"cert",
")",
".",
"read",
"(",
")",
"return",
"None"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_adm_ca_server_cert.py#L1510-L1519
|
|
gramps-project/gramps
|
04d4651a43eb210192f40a9f8c2bad8ee8fa3753
|
gramps/gen/db/generic.py
|
python
|
DbGeneric.get_event_gramps_ids
|
(self)
|
return self._get_gramps_ids(EVENT_KEY)
|
Return a list of Gramps IDs, one ID for each Event in the
database.
|
Return a list of Gramps IDs, one ID for each Event in the
database.
|
[
"Return",
"a",
"list",
"of",
"Gramps",
"IDs",
"one",
"ID",
"for",
"each",
"Event",
"in",
"the",
"database",
"."
] |
def get_event_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Event in the
database.
"""
return self._get_gramps_ids(EVENT_KEY)
|
[
"def",
"get_event_gramps_ids",
"(",
"self",
")",
":",
"return",
"self",
".",
"_get_gramps_ids",
"(",
"EVENT_KEY",
")"
] |
https://github.com/gramps-project/gramps/blob/04d4651a43eb210192f40a9f8c2bad8ee8fa3753/gramps/gen/db/generic.py#L1219-L1224
|
|
googlearchive/simian
|
fb9c43946ff7ba29be417068d6447cfc0adfe9ef
|
src/simian/mac/models/munki.py
|
python
|
PackageInfo._SetDescription
|
(self, desc)
|
Sets the description to the plist, preserving any avg duration text.
|
Sets the description to the plist, preserving any avg duration text.
|
[
"Sets",
"the",
"description",
"to",
"the",
"plist",
"preserving",
"any",
"avg",
"duration",
"text",
"."
] |
def _SetDescription(self, desc):
"""Sets the description to the plist, preserving any avg duration text."""
if self.AVG_DURATION_REGEX.search(desc):
# If the new description has the avg duration text, just keep it all.
self.plist['description'] = desc
else:
# Otherwise append the old avg duration text to the new description.
match = self.AVG_DURATION_REGEX.search(self.plist.get('description', ''))
if match:
self.plist['description'] = '%s\n\n%s' % (desc, match.group(0))
else:
self.plist['description'] = desc
# Update the plist property with the new description.
self.plist = self.plist.GetXml()
|
[
"def",
"_SetDescription",
"(",
"self",
",",
"desc",
")",
":",
"if",
"self",
".",
"AVG_DURATION_REGEX",
".",
"search",
"(",
"desc",
")",
":",
"# If the new description has the avg duration text, just keep it all.",
"self",
".",
"plist",
"[",
"'description'",
"]",
"=",
"desc",
"else",
":",
"# Otherwise append the old avg duration text to the new description.",
"match",
"=",
"self",
".",
"AVG_DURATION_REGEX",
".",
"search",
"(",
"self",
".",
"plist",
".",
"get",
"(",
"'description'",
",",
"''",
")",
")",
"if",
"match",
":",
"self",
".",
"plist",
"[",
"'description'",
"]",
"=",
"'%s\\n\\n%s'",
"%",
"(",
"desc",
",",
"match",
".",
"group",
"(",
"0",
")",
")",
"else",
":",
"self",
".",
"plist",
"[",
"'description'",
"]",
"=",
"desc",
"# Update the plist property with the new description.",
"self",
".",
"plist",
"=",
"self",
".",
"plist",
".",
"GetXml",
"(",
")"
] |
https://github.com/googlearchive/simian/blob/fb9c43946ff7ba29be417068d6447cfc0adfe9ef/src/simian/mac/models/munki.py#L314-L328
|
||
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/whoosh/src/whoosh/automata/fst.py
|
python
|
IntersectionNode.edge
|
(self, key)
|
[] |
def edge(self, key):
a = self.a
b = self.b
if key in a and key in b:
return IntersectionNode(a.edge(key), b.edge(key))
|
[
"def",
"edge",
"(",
"self",
",",
"key",
")",
":",
"a",
"=",
"self",
".",
"a",
"b",
"=",
"self",
".",
"b",
"if",
"key",
"in",
"a",
"and",
"key",
"in",
"b",
":",
"return",
"IntersectionNode",
"(",
"a",
".",
"edge",
"(",
"key",
")",
",",
"b",
".",
"edge",
"(",
"key",
")",
")"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/whoosh/src/whoosh/automata/fst.py#L461-L465
|
||||
autotest/autotest
|
4614ae5f550cc888267b9a419e4b90deb54f8fae
|
client/job.py
|
python
|
base_client_job.install_pkg
|
(self, name, pkg_type, install_dir)
|
This method is a simple wrapper around the actual package
installation method in the Packager class. This is used
internally by the profilers, deps and tests code.
:param name: name of the package (ex: sleeptest, dbench etc.)
:param pkg_type: Type of the package (ex: test, dep etc.)
:param install_dir: The directory in which the source is actually
untarred into. (ex: client/profilers/<name> for profilers)
|
This method is a simple wrapper around the actual package
installation method in the Packager class. This is used
internally by the profilers, deps and tests code.
|
[
"This",
"method",
"is",
"a",
"simple",
"wrapper",
"around",
"the",
"actual",
"package",
"installation",
"method",
"in",
"the",
"Packager",
"class",
".",
"This",
"is",
"used",
"internally",
"by",
"the",
"profilers",
"deps",
"and",
"tests",
"code",
"."
] |
def install_pkg(self, name, pkg_type, install_dir):
'''
This method is a simple wrapper around the actual package
installation method in the Packager class. This is used
internally by the profilers, deps and tests code.
:param name: name of the package (ex: sleeptest, dbench etc.)
:param pkg_type: Type of the package (ex: test, dep etc.)
:param install_dir: The directory in which the source is actually
untarred into. (ex: client/profilers/<name> for profilers)
'''
if self.pkgmgr.repositories:
self.pkgmgr.install_pkg(name, pkg_type, self.pkgdir, install_dir)
|
[
"def",
"install_pkg",
"(",
"self",
",",
"name",
",",
"pkg_type",
",",
"install_dir",
")",
":",
"if",
"self",
".",
"pkgmgr",
".",
"repositories",
":",
"self",
".",
"pkgmgr",
".",
"install_pkg",
"(",
"name",
",",
"pkg_type",
",",
"self",
".",
"pkgdir",
",",
"install_dir",
")"
] |
https://github.com/autotest/autotest/blob/4614ae5f550cc888267b9a419e4b90deb54f8fae/client/job.py#L429-L441
|
||
sagemath/sage
|
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
|
src/sage/crypto/mq/sr.py
|
python
|
SR_gf2n.shift_rows_matrix
|
(self)
|
return shift_rows
|
Return the ``ShiftRows`` matrix.
EXAMPLES::
sage: sr = mq.SR(1, 2, 2, 4)
sage: s = sr.random_state_array()
sage: r1 = sr.shift_rows(s)
sage: r2 = sr.state_array( sr.shift_rows_matrix() * sr.vector(s) )
sage: r1 == r2
True
|
Return the ``ShiftRows`` matrix.
|
[
"Return",
"the",
"ShiftRows",
"matrix",
"."
] |
def shift_rows_matrix(self):
"""
Return the ``ShiftRows`` matrix.
EXAMPLES::
sage: sr = mq.SR(1, 2, 2, 4)
sage: s = sr.random_state_array()
sage: r1 = sr.shift_rows(s)
sage: r2 = sr.state_array( sr.shift_rows_matrix() * sr.vector(s) )
sage: r1 == r2
True
"""
e = self.e
r = self.r
c = self.c
k = self.base_ring()
bs = r*c*e
shift_rows = Matrix(k, bs, bs)
I = MatrixSpace(k, e, e)(1)
for x in range(0, c):
for y in range(0, r):
_r = ((x*r)+y) * e
_c = (((x*r)+((r+1)*y)) * e) % bs
self._insert_matrix_into_matrix(shift_rows, I, _r, _c)
return shift_rows
|
[
"def",
"shift_rows_matrix",
"(",
"self",
")",
":",
"e",
"=",
"self",
".",
"e",
"r",
"=",
"self",
".",
"r",
"c",
"=",
"self",
".",
"c",
"k",
"=",
"self",
".",
"base_ring",
"(",
")",
"bs",
"=",
"r",
"*",
"c",
"*",
"e",
"shift_rows",
"=",
"Matrix",
"(",
"k",
",",
"bs",
",",
"bs",
")",
"I",
"=",
"MatrixSpace",
"(",
"k",
",",
"e",
",",
"e",
")",
"(",
"1",
")",
"for",
"x",
"in",
"range",
"(",
"0",
",",
"c",
")",
":",
"for",
"y",
"in",
"range",
"(",
"0",
",",
"r",
")",
":",
"_r",
"=",
"(",
"(",
"x",
"*",
"r",
")",
"+",
"y",
")",
"*",
"e",
"_c",
"=",
"(",
"(",
"(",
"x",
"*",
"r",
")",
"+",
"(",
"(",
"r",
"+",
"1",
")",
"*",
"y",
")",
")",
"*",
"e",
")",
"%",
"bs",
"self",
".",
"_insert_matrix_into_matrix",
"(",
"shift_rows",
",",
"I",
",",
"_r",
",",
"_c",
")",
"return",
"shift_rows"
] |
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/crypto/mq/sr.py#L2261-L2287
|
|
Robot-Will/Stino
|
a94831cd1bf40a59587a7b6cc2e9b5c4306b1bf2
|
libs/serial/serialposix.py
|
python
|
Serial.set_output_flow_control
|
(self, enable=True)
|
\
Manually control flow of outgoing data - when hardware or software flow
control is enabled.
WARNING: this function is not portable to different platforms!
|
\
Manually control flow of outgoing data - when hardware or software flow
control is enabled.
WARNING: this function is not portable to different platforms!
|
[
"\\",
"Manually",
"control",
"flow",
"of",
"outgoing",
"data",
"-",
"when",
"hardware",
"or",
"software",
"flow",
"control",
"is",
"enabled",
".",
"WARNING",
":",
"this",
"function",
"is",
"not",
"portable",
"to",
"different",
"platforms!"
] |
def set_output_flow_control(self, enable=True):
"""\
Manually control flow of outgoing data - when hardware or software flow
control is enabled.
WARNING: this function is not portable to different platforms!
"""
if not self.is_open:
raise portNotOpenError
if enable:
termios.tcflow(self.fd, termios.TCOON)
else:
termios.tcflow(self.fd, termios.TCOOFF)
|
[
"def",
"set_output_flow_control",
"(",
"self",
",",
"enable",
"=",
"True",
")",
":",
"if",
"not",
"self",
".",
"is_open",
":",
"raise",
"portNotOpenError",
"if",
"enable",
":",
"termios",
".",
"tcflow",
"(",
"self",
".",
"fd",
",",
"termios",
".",
"TCOON",
")",
"else",
":",
"termios",
".",
"tcflow",
"(",
"self",
".",
"fd",
",",
"termios",
".",
"TCOOFF",
")"
] |
https://github.com/Robot-Will/Stino/blob/a94831cd1bf40a59587a7b6cc2e9b5c4306b1bf2/libs/serial/serialposix.py#L679-L690
|
||
guildai/guildai
|
1665985a3d4d788efc1a3180ca51cc417f71ca78
|
guild/external/setuptools/_vendor/pyparsing.py
|
python
|
tokenMap
|
(func, *args)
|
return pa
|
Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional
args are passed, they are forwarded to the given function as additional arguments after
the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
parsed data to an integer using base 16.
Example (compare the last to example in L{ParserElement.transformString}::
hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
hex_ints.runTests('''
00 11 22 aa FF 0a 0d 1a
''')
upperword = Word(alphas).setParseAction(tokenMap(str.upper))
OneOrMore(upperword).runTests('''
my kingdom for a horse
''')
wd = Word(alphas).setParseAction(tokenMap(str.title))
OneOrMore(wd).setParseAction(' '.join).runTests('''
now is the winter of our discontent made glorious summer by this sun of york
''')
prints::
00 11 22 aa FF 0a 0d 1a
[0, 17, 34, 170, 255, 10, 13, 26]
my kingdom for a horse
['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
now is the winter of our discontent made glorious summer by this sun of york
['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
|
Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional
args are passed, they are forwarded to the given function as additional arguments after
the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
parsed data to an integer using base 16.
|
[
"Helper",
"to",
"define",
"a",
"parse",
"action",
"by",
"mapping",
"a",
"function",
"to",
"all",
"elements",
"of",
"a",
"ParseResults",
"list",
".",
"If",
"any",
"additional",
"args",
"are",
"passed",
"they",
"are",
"forwarded",
"to",
"the",
"given",
"function",
"as",
"additional",
"arguments",
"after",
"the",
"token",
"as",
"in",
"C",
"{",
"hex_integer",
"=",
"Word",
"(",
"hexnums",
")",
".",
"setParseAction",
"(",
"tokenMap",
"(",
"int",
"16",
"))",
"}",
"which",
"will",
"convert",
"the",
"parsed",
"data",
"to",
"an",
"integer",
"using",
"base",
"16",
"."
] |
def tokenMap(func, *args):
"""
Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional
args are passed, they are forwarded to the given function as additional arguments after
the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
parsed data to an integer using base 16.
Example (compare the last to example in L{ParserElement.transformString}::
hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
hex_ints.runTests('''
00 11 22 aa FF 0a 0d 1a
''')
upperword = Word(alphas).setParseAction(tokenMap(str.upper))
OneOrMore(upperword).runTests('''
my kingdom for a horse
''')
wd = Word(alphas).setParseAction(tokenMap(str.title))
OneOrMore(wd).setParseAction(' '.join).runTests('''
now is the winter of our discontent made glorious summer by this sun of york
''')
prints::
00 11 22 aa FF 0a 0d 1a
[0, 17, 34, 170, 255, 10, 13, 26]
my kingdom for a horse
['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
now is the winter of our discontent made glorious summer by this sun of york
['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
"""
def pa(s,l,t):
return [func(tokn, *args) for tokn in t]
try:
func_name = getattr(func, '__name__',
getattr(func, '__class__').__name__)
except Exception:
func_name = str(func)
pa.__name__ = func_name
return pa
|
[
"def",
"tokenMap",
"(",
"func",
",",
"*",
"args",
")",
":",
"def",
"pa",
"(",
"s",
",",
"l",
",",
"t",
")",
":",
"return",
"[",
"func",
"(",
"tokn",
",",
"*",
"args",
")",
"for",
"tokn",
"in",
"t",
"]",
"try",
":",
"func_name",
"=",
"getattr",
"(",
"func",
",",
"'__name__'",
",",
"getattr",
"(",
"func",
",",
"'__class__'",
")",
".",
"__name__",
")",
"except",
"Exception",
":",
"func_name",
"=",
"str",
"(",
"func",
")",
"pa",
".",
"__name__",
"=",
"func_name",
"return",
"pa"
] |
https://github.com/guildai/guildai/blob/1665985a3d4d788efc1a3180ca51cc417f71ca78/guild/external/setuptools/_vendor/pyparsing.py#L4825-L4867
|
|
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/bosch_shc/cover.py
|
python
|
ShutterControlCover.close_cover
|
(self, **kwargs)
|
Close cover.
|
Close cover.
|
[
"Close",
"cover",
"."
] |
def close_cover(self, **kwargs):
"""Close cover."""
self._device.level = 0.0
|
[
"def",
"close_cover",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_device",
".",
"level",
"=",
"0.0"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/bosch_shc/cover.py#L86-L88
|
||
mlflow/mlflow
|
364aca7daf0fcee3ec407ae0b1b16d9cb3085081
|
mlflow/entities/run_info.py
|
python
|
RunInfo._copy_with_overrides
|
(self, status=None, end_time=None, lifecycle_stage=None)
|
return RunInfo.from_proto(proto)
|
A copy of the RunInfo with certain attributes modified.
|
A copy of the RunInfo with certain attributes modified.
|
[
"A",
"copy",
"of",
"the",
"RunInfo",
"with",
"certain",
"attributes",
"modified",
"."
] |
def _copy_with_overrides(self, status=None, end_time=None, lifecycle_stage=None):
"""A copy of the RunInfo with certain attributes modified."""
proto = self.to_proto()
if status:
proto.status = status
if end_time:
proto.end_time = end_time
if lifecycle_stage:
proto.lifecycle_stage = lifecycle_stage
return RunInfo.from_proto(proto)
|
[
"def",
"_copy_with_overrides",
"(",
"self",
",",
"status",
"=",
"None",
",",
"end_time",
"=",
"None",
",",
"lifecycle_stage",
"=",
"None",
")",
":",
"proto",
"=",
"self",
".",
"to_proto",
"(",
")",
"if",
"status",
":",
"proto",
".",
"status",
"=",
"status",
"if",
"end_time",
":",
"proto",
".",
"end_time",
"=",
"end_time",
"if",
"lifecycle_stage",
":",
"proto",
".",
"lifecycle_stage",
"=",
"lifecycle_stage",
"return",
"RunInfo",
".",
"from_proto",
"(",
"proto",
")"
] |
https://github.com/mlflow/mlflow/blob/364aca7daf0fcee3ec407ae0b1b16d9cb3085081/mlflow/entities/run_info.py#L84-L93
|
|
andresriancho/w3af
|
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
|
w3af/plugins/attack/db/sqlmap/thirdparty/xdot/xdot.py
|
python
|
Edge.__init__
|
(self, src, dst, points, shapes)
|
[] |
def __init__(self, src, dst, points, shapes):
Element.__init__(self, shapes)
self.src = src
self.dst = dst
self.points = points
|
[
"def",
"__init__",
"(",
"self",
",",
"src",
",",
"dst",
",",
"points",
",",
"shapes",
")",
":",
"Element",
".",
"__init__",
"(",
"self",
",",
"shapes",
")",
"self",
".",
"src",
"=",
"src",
"self",
".",
"dst",
"=",
"dst",
"self",
".",
"points",
"=",
"points"
] |
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/plugins/attack/db/sqlmap/thirdparty/xdot/xdot.py#L419-L423
|
||||
francisck/DanderSpritz_docs
|
86bb7caca5a957147f120b18bb5c31f299914904
|
Python/Core/Lib/logging/__init__.py
|
python
|
_removeHandlerRef
|
(wr)
|
return
|
Remove a handler reference from the internal cleanup list.
|
Remove a handler reference from the internal cleanup list.
|
[
"Remove",
"a",
"handler",
"reference",
"from",
"the",
"internal",
"cleanup",
"list",
"."
] |
def _removeHandlerRef(wr):
"""
Remove a handler reference from the internal cleanup list.
"""
if _acquireLock is not None:
_acquireLock()
try:
if wr in _handlerList:
_handlerList.remove(wr)
finally:
_releaseLock()
return
|
[
"def",
"_removeHandlerRef",
"(",
"wr",
")",
":",
"if",
"_acquireLock",
"is",
"not",
"None",
":",
"_acquireLock",
"(",
")",
"try",
":",
"if",
"wr",
"in",
"_handlerList",
":",
"_handlerList",
".",
"remove",
"(",
"wr",
")",
"finally",
":",
"_releaseLock",
"(",
")",
"return"
] |
https://github.com/francisck/DanderSpritz_docs/blob/86bb7caca5a957147f120b18bb5c31f299914904/Python/Core/Lib/logging/__init__.py#L526-L538
|
|
jiangxinyang227/bert-for-task
|
3e7aed9e3c757ebc22aabfd4f3fb7b4cd81b010a
|
albert_task/ner_task/bilstm_crf.py
|
python
|
BiLSTMCRF.get_pred
|
(self, new_logits, trans_params=None)
|
得到预测值
:param new_logits:
:param trans_params:
:return:
|
得到预测值
:param new_logits:
:param trans_params:
:return:
|
[
"得到预测值",
":",
"param",
"new_logits",
":",
":",
"param",
"trans_params",
":",
":",
"return",
":"
] |
def get_pred(self, new_logits, trans_params=None):
"""
得到预测值
:param new_logits:
:param trans_params:
:return:
"""
with tf.name_scope("maskedOutput"):
viterbi_sequence, viterbi_score = tf.contrib.crf.crf_decode(new_logits, trans_params,
self.sequence_lens)
return viterbi_sequence
|
[
"def",
"get_pred",
"(",
"self",
",",
"new_logits",
",",
"trans_params",
"=",
"None",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"\"maskedOutput\"",
")",
":",
"viterbi_sequence",
",",
"viterbi_score",
"=",
"tf",
".",
"contrib",
".",
"crf",
".",
"crf_decode",
"(",
"new_logits",
",",
"trans_params",
",",
"self",
".",
"sequence_lens",
")",
"return",
"viterbi_sequence"
] |
https://github.com/jiangxinyang227/bert-for-task/blob/3e7aed9e3c757ebc22aabfd4f3fb7b4cd81b010a/albert_task/ner_task/bilstm_crf.py#L123-L133
|
||
pypa/pipenv
|
b21baade71a86ab3ee1429f71fbc14d4f95fb75d
|
pipenv/project.py
|
python
|
Project._parse_pipfile
|
(self, contents)
|
[] |
def _parse_pipfile(self, contents):
# type: (str) -> Union[tomlkit.toml_document.TOMLDocument, TPipfile]
try:
return tomlkit.parse(contents)
except Exception:
# We lose comments here, but it's for the best.)
# Fallback to toml parser, for large files.
return toml.loads(contents)
|
[
"def",
"_parse_pipfile",
"(",
"self",
",",
"contents",
")",
":",
"# type: (str) -> Union[tomlkit.toml_document.TOMLDocument, TPipfile]",
"try",
":",
"return",
"tomlkit",
".",
"parse",
"(",
"contents",
")",
"except",
"Exception",
":",
"# We lose comments here, but it's for the best.)",
"# Fallback to toml parser, for large files.",
"return",
"toml",
".",
"loads",
"(",
"contents",
")"
] |
https://github.com/pypa/pipenv/blob/b21baade71a86ab3ee1429f71fbc14d4f95fb75d/pipenv/project.py#L544-L551
|
||||
pyansys/pymapdl
|
c07291fc062b359abf0e92b95a92d753a95ef3d7
|
ansys/mapdl/core/_commands/solution/dynamic_options.py
|
python
|
DynamicOptions.betad
|
(self, value="", **kwargs)
|
return self.run(command, **kwargs)
|
Defines the stiffness matrix multiplier for damping.
APDL Command: BETAD
Parameters
----------
value
Stiffness matrix multiplier for damping.
Notes
-----
This command defines the stiffness matrix multiplier β used to form the
viscous damping matrix [C] = β [K] where [K] is the stiffness matrix.
Values of : β may also be input as a material property (use the BETD
label on the MP command). If BETD is included, the BETD value is added
to the BETAD value as appropriate (see Damping Matrices in the
Mechanical APDL Theory Reference). Damping is not used in the static
(ANTYPE,STATIC) or buckling (ANTYPE,BUCKLE) analyses.
This command is also valid in PREP7.
|
Defines the stiffness matrix multiplier for damping.
|
[
"Defines",
"the",
"stiffness",
"matrix",
"multiplier",
"for",
"damping",
"."
] |
def betad(self, value="", **kwargs):
"""Defines the stiffness matrix multiplier for damping.
APDL Command: BETAD
Parameters
----------
value
Stiffness matrix multiplier for damping.
Notes
-----
This command defines the stiffness matrix multiplier β used to form the
viscous damping matrix [C] = β [K] where [K] is the stiffness matrix.
Values of : β may also be input as a material property (use the BETD
label on the MP command). If BETD is included, the BETD value is added
to the BETAD value as appropriate (see Damping Matrices in the
Mechanical APDL Theory Reference). Damping is not used in the static
(ANTYPE,STATIC) or buckling (ANTYPE,BUCKLE) analyses.
This command is also valid in PREP7.
"""
command = f"BETAD,{value}"
return self.run(command, **kwargs)
|
[
"def",
"betad",
"(",
"self",
",",
"value",
"=",
"\"\"",
",",
"*",
"*",
"kwargs",
")",
":",
"command",
"=",
"f\"BETAD,{value}\"",
"return",
"self",
".",
"run",
"(",
"command",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/pyansys/pymapdl/blob/c07291fc062b359abf0e92b95a92d753a95ef3d7/ansys/mapdl/core/_commands/solution/dynamic_options.py#L28-L52
|
|
TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials
|
5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e
|
tensorflow_dl_models/research/object_detection/core/preprocessor.py
|
python
|
random_pixel_value_scale
|
(image, minval=0.9, maxval=1.1, seed=None)
|
return image
|
Scales each value in the pixels of the image.
This function scales each pixel independent of the other ones.
For each value in image tensor, draws a random number between
minval and maxval and multiples the values with them.
Args:
image: rank 3 float32 tensor contains 1 image -> [height, width, channels]
with pixel values varying between [0, 1].
minval: lower ratio of scaling pixel values.
maxval: upper ratio of scaling pixel values.
seed: random seed.
Returns:
image: image which is the same shape as input image.
|
Scales each value in the pixels of the image.
|
[
"Scales",
"each",
"value",
"in",
"the",
"pixels",
"of",
"the",
"image",
"."
] |
def random_pixel_value_scale(image, minval=0.9, maxval=1.1, seed=None):
"""Scales each value in the pixels of the image.
This function scales each pixel independent of the other ones.
For each value in image tensor, draws a random number between
minval and maxval and multiples the values with them.
Args:
image: rank 3 float32 tensor contains 1 image -> [height, width, channels]
with pixel values varying between [0, 1].
minval: lower ratio of scaling pixel values.
maxval: upper ratio of scaling pixel values.
seed: random seed.
Returns:
image: image which is the same shape as input image.
"""
with tf.name_scope('RandomPixelValueScale', values=[image]):
color_coef = tf.random_uniform(
tf.shape(image),
minval=minval,
maxval=maxval,
dtype=tf.float32,
seed=seed)
image = tf.multiply(image, color_coef)
image = tf.clip_by_value(image, 0.0, 1.0)
return image
|
[
"def",
"random_pixel_value_scale",
"(",
"image",
",",
"minval",
"=",
"0.9",
",",
"maxval",
"=",
"1.1",
",",
"seed",
"=",
"None",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"'RandomPixelValueScale'",
",",
"values",
"=",
"[",
"image",
"]",
")",
":",
"color_coef",
"=",
"tf",
".",
"random_uniform",
"(",
"tf",
".",
"shape",
"(",
"image",
")",
",",
"minval",
"=",
"minval",
",",
"maxval",
"=",
"maxval",
",",
"dtype",
"=",
"tf",
".",
"float32",
",",
"seed",
"=",
"seed",
")",
"image",
"=",
"tf",
".",
"multiply",
"(",
"image",
",",
"color_coef",
")",
"image",
"=",
"tf",
".",
"clip_by_value",
"(",
"image",
",",
"0.0",
",",
"1.0",
")",
"return",
"image"
] |
https://github.com/TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials/blob/5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e/tensorflow_dl_models/research/object_detection/core/preprocessor.py#L565-L592
|
|
shapely/shapely
|
9258e6dd4dcca61699d69c2a5853a486b132ed86
|
shapely/geometry/base.py
|
python
|
BaseGeometry.distance
|
(self, other)
|
return float(shapely.distance(self, other))
|
Unitless distance to other geometry (float)
|
Unitless distance to other geometry (float)
|
[
"Unitless",
"distance",
"to",
"other",
"geometry",
"(",
"float",
")"
] |
def distance(self, other):
"""Unitless distance to other geometry (float)"""
return float(shapely.distance(self, other))
|
[
"def",
"distance",
"(",
"self",
",",
"other",
")",
":",
"return",
"float",
"(",
"shapely",
".",
"distance",
"(",
"self",
",",
"other",
")",
")"
] |
https://github.com/shapely/shapely/blob/9258e6dd4dcca61699d69c2a5853a486b132ed86/shapely/geometry/base.py#L236-L238
|
|
dmlc/dgl
|
8d14a739bc9e446d6c92ef83eafe5782398118de
|
python/dgl/nn/pytorch/conv/gmmconv.py
|
python
|
GMMConv.set_allow_zero_in_degree
|
(self, set_value)
|
r"""
Description
-----------
Set allow_zero_in_degree flag.
Parameters
----------
set_value : bool
The value to be set to the flag.
|
r"""
|
[
"r"
] |
def set_allow_zero_in_degree(self, set_value):
r"""
Description
-----------
Set allow_zero_in_degree flag.
Parameters
----------
set_value : bool
The value to be set to the flag.
"""
self._allow_zero_in_degree = set_value
|
[
"def",
"set_allow_zero_in_degree",
"(",
"self",
",",
"set_value",
")",
":",
"self",
".",
"_allow_zero_in_degree",
"=",
"set_value"
] |
https://github.com/dmlc/dgl/blob/8d14a739bc9e446d6c92ef83eafe5782398118de/python/dgl/nn/pytorch/conv/gmmconv.py#L174-L186
|
||
jodal/pyspotify
|
770aee08de274951b63e60bdd8835188b58e7862
|
spotify/connection.py
|
python
|
Connection.allow_network
|
(self)
|
return self._allow_network
|
Whether or not network access is allowed at all.
Defaults to :class:`True`. Setting this to :class:`False` turns on
offline mode.
|
Whether or not network access is allowed at all.
|
[
"Whether",
"or",
"not",
"network",
"access",
"is",
"allowed",
"at",
"all",
"."
] |
def allow_network(self):
"""Whether or not network access is allowed at all.
Defaults to :class:`True`. Setting this to :class:`False` turns on
offline mode.
"""
return self._allow_network
|
[
"def",
"allow_network",
"(",
"self",
")",
":",
"return",
"self",
".",
"_allow_network"
] |
https://github.com/jodal/pyspotify/blob/770aee08de274951b63e60bdd8835188b58e7862/spotify/connection.py#L76-L82
|
|
zaxlct/imooc-django
|
daf1ced745d3d21989e8191b658c293a511b37fd
|
extra_apps/xadmin/views/list.py
|
python
|
ResultItem.tagattrs
|
(self)
|
return mark_safe(
'%s%s' % ((self.tag_attrs and ' '.join(self.tag_attrs) or ''),
(self.classes and (' class="%s"' % ' '.join(self.classes)) or '')))
|
[] |
def tagattrs(self):
return mark_safe(
'%s%s' % ((self.tag_attrs and ' '.join(self.tag_attrs) or ''),
(self.classes and (' class="%s"' % ' '.join(self.classes)) or '')))
|
[
"def",
"tagattrs",
"(",
"self",
")",
":",
"return",
"mark_safe",
"(",
"'%s%s'",
"%",
"(",
"(",
"self",
".",
"tag_attrs",
"and",
"' '",
".",
"join",
"(",
"self",
".",
"tag_attrs",
")",
"or",
"''",
")",
",",
"(",
"self",
".",
"classes",
"and",
"(",
"' class=\"%s\"'",
"%",
"' '",
".",
"join",
"(",
"self",
".",
"classes",
")",
")",
"or",
"''",
")",
")",
")"
] |
https://github.com/zaxlct/imooc-django/blob/daf1ced745d3d21989e8191b658c293a511b37fd/extra_apps/xadmin/views/list.py#L78-L81
|
|||
memray/seq2seq-keyphrase
|
9145c63ebdc4c3bc431f8091dc52547a46804012
|
emolga/models/encdec.py
|
python
|
Encoder.build_encoder
|
(self, source, context=None, return_embed=False, return_sequence=False)
|
return X_out
|
Build the Encoder Computational Graph
For the default configurations (with attention)
with_context=False, return_sequence=True, return_embed=True
Input:
source : source text, a list of indexes [nb_sample * max_len]
context: None
Return:
For Attention model:
return_sequence=True: to return the embedding at each time, not just the end state
return_embed=True:
X_out: a list of vectors [nb_sample, max_len, 2*enc_hidden_dim], encoding of each time state (concatenate both forward and backward RNN)
X: embedding of text X [nb_sample, max_len, enc_embedd_dim]
X_mask: mask, an array showing which elements in X are not 0 [nb_sample, max_len]
X_tail: encoding of ending of X, seems not make sense for bidirectional model (head+tail) [nb_sample, 2*enc_hidden_dim]
there's bug on X_tail, but luckily we don't use it often
nb_sample: number of samples, defined by batch size
max_len: max length of sentence (should be same after padding)
|
Build the Encoder Computational Graph
|
[
"Build",
"the",
"Encoder",
"Computational",
"Graph"
] |
def build_encoder(self, source, context=None, return_embed=False, return_sequence=False):
"""
Build the Encoder Computational Graph
For the default configurations (with attention)
with_context=False, return_sequence=True, return_embed=True
Input:
source : source text, a list of indexes [nb_sample * max_len]
context: None
Return:
For Attention model:
return_sequence=True: to return the embedding at each time, not just the end state
return_embed=True:
X_out: a list of vectors [nb_sample, max_len, 2*enc_hidden_dim], encoding of each time state (concatenate both forward and backward RNN)
X: embedding of text X [nb_sample, max_len, enc_embedd_dim]
X_mask: mask, an array showing which elements in X are not 0 [nb_sample, max_len]
X_tail: encoding of ending of X, seems not make sense for bidirectional model (head+tail) [nb_sample, 2*enc_hidden_dim]
there's bug on X_tail, but luckily we don't use it often
nb_sample: number of samples, defined by batch size
max_len: max length of sentence (should be same after padding)
"""
# Initial state
Init_h = None
if self.use_context:
Init_h = self.Initializer(context)
# word embedding
if not self.config['bidirectional']:
X, X_mask = self.Embed(source, True)
X_out = self.RNN(X, X_mask, C=context, init_h=Init_h, return_sequence=return_sequence)
if return_sequence:
X_tail = X_out[:, -1]
else:
X_tail = X_out
else:
# reverse the source for backwardRNN
source2 = source[:, ::-1]
# map text to embedding
X, X_mask = self.Embed(source, True)
X2, X2_mask = self.Embed(source2, True)
# get the encoding at each time t. [Bug?] run forwardRNN on the reverse text?
X_out1 = self.backwardRNN(X, X_mask, C=context, init_h=Init_h, return_sequence=return_sequence)
X_out2 = self.forwardRNN(X2, X2_mask, C=context, init_h=Init_h, return_sequence=return_sequence)
# concatenate vectors of both forward and backward
if not return_sequence:
# [Bug]I think the X_out of backwardRNN is time 0, but for forwardRNN is ending time
X_out = T.concatenate([X_out1, X_out2], axis=1)
X_tail = X_out
else:
# reverse the encoding of forwardRNN(actually backwardRNN), so the X_out is backward
X_out = T.concatenate([X_out1, X_out2[:, ::-1, :]], axis=2)
# [Bug] X_out1[-1] is time 0, but X_out2[-1] is ending time
X_tail = T.concatenate([X_out1[:, -1], X_out2[:, -1]], axis=1)
X_mask = T.cast(X_mask, dtype='float32')
if return_embed:
return X_out, X, X_mask, X_tail
return X_out
|
[
"def",
"build_encoder",
"(",
"self",
",",
"source",
",",
"context",
"=",
"None",
",",
"return_embed",
"=",
"False",
",",
"return_sequence",
"=",
"False",
")",
":",
"# Initial state",
"Init_h",
"=",
"None",
"if",
"self",
".",
"use_context",
":",
"Init_h",
"=",
"self",
".",
"Initializer",
"(",
"context",
")",
"# word embedding",
"if",
"not",
"self",
".",
"config",
"[",
"'bidirectional'",
"]",
":",
"X",
",",
"X_mask",
"=",
"self",
".",
"Embed",
"(",
"source",
",",
"True",
")",
"X_out",
"=",
"self",
".",
"RNN",
"(",
"X",
",",
"X_mask",
",",
"C",
"=",
"context",
",",
"init_h",
"=",
"Init_h",
",",
"return_sequence",
"=",
"return_sequence",
")",
"if",
"return_sequence",
":",
"X_tail",
"=",
"X_out",
"[",
":",
",",
"-",
"1",
"]",
"else",
":",
"X_tail",
"=",
"X_out",
"else",
":",
"# reverse the source for backwardRNN",
"source2",
"=",
"source",
"[",
":",
",",
":",
":",
"-",
"1",
"]",
"# map text to embedding",
"X",
",",
"X_mask",
"=",
"self",
".",
"Embed",
"(",
"source",
",",
"True",
")",
"X2",
",",
"X2_mask",
"=",
"self",
".",
"Embed",
"(",
"source2",
",",
"True",
")",
"# get the encoding at each time t. [Bug?] run forwardRNN on the reverse text?",
"X_out1",
"=",
"self",
".",
"backwardRNN",
"(",
"X",
",",
"X_mask",
",",
"C",
"=",
"context",
",",
"init_h",
"=",
"Init_h",
",",
"return_sequence",
"=",
"return_sequence",
")",
"X_out2",
"=",
"self",
".",
"forwardRNN",
"(",
"X2",
",",
"X2_mask",
",",
"C",
"=",
"context",
",",
"init_h",
"=",
"Init_h",
",",
"return_sequence",
"=",
"return_sequence",
")",
"# concatenate vectors of both forward and backward",
"if",
"not",
"return_sequence",
":",
"# [Bug]I think the X_out of backwardRNN is time 0, but for forwardRNN is ending time",
"X_out",
"=",
"T",
".",
"concatenate",
"(",
"[",
"X_out1",
",",
"X_out2",
"]",
",",
"axis",
"=",
"1",
")",
"X_tail",
"=",
"X_out",
"else",
":",
"# reverse the encoding of forwardRNN(actually backwardRNN), so the X_out is backward",
"X_out",
"=",
"T",
".",
"concatenate",
"(",
"[",
"X_out1",
",",
"X_out2",
"[",
":",
",",
":",
":",
"-",
"1",
",",
":",
"]",
"]",
",",
"axis",
"=",
"2",
")",
"# [Bug] X_out1[-1] is time 0, but X_out2[-1] is ending time",
"X_tail",
"=",
"T",
".",
"concatenate",
"(",
"[",
"X_out1",
"[",
":",
",",
"-",
"1",
"]",
",",
"X_out2",
"[",
":",
",",
"-",
"1",
"]",
"]",
",",
"axis",
"=",
"1",
")",
"X_mask",
"=",
"T",
".",
"cast",
"(",
"X_mask",
",",
"dtype",
"=",
"'float32'",
")",
"if",
"return_embed",
":",
"return",
"X_out",
",",
"X",
",",
"X_mask",
",",
"X_tail",
"return",
"X_out"
] |
https://github.com/memray/seq2seq-keyphrase/blob/9145c63ebdc4c3bc431f8091dc52547a46804012/emolga/models/encdec.py#L257-L317
|
|
CoinAlpha/hummingbot
|
36f6149c1644c07cd36795b915f38b8f49b798e7
|
hummingbot/connector/derivative/bybit_perpetual/bybit_perpetual_api_order_book_data_source.py
|
python
|
BybitPerpetualAPIOrderBookDataSource._create_websocket_connection
|
(self, url: str)
|
Initialize WebSocket client for UserStreamDataSource
|
Initialize WebSocket client for UserStreamDataSource
|
[
"Initialize",
"WebSocket",
"client",
"for",
"UserStreamDataSource"
] |
async def _create_websocket_connection(self, url: str) -> BybitPerpetualWebSocketAdaptor:
"""
Initialize WebSocket client for UserStreamDataSource
"""
try:
session = await self._get_session()
ws = await session.ws_connect(url)
return BybitPerpetualWebSocketAdaptor(websocket=ws)
except asyncio.CancelledError:
raise
except Exception as ex:
self.logger().network(f"Unexpected error occurred during {CONSTANTS.EXCHANGE_NAME} WebSocket Connection "
f"({ex})")
raise
|
[
"async",
"def",
"_create_websocket_connection",
"(",
"self",
",",
"url",
":",
"str",
")",
"->",
"BybitPerpetualWebSocketAdaptor",
":",
"try",
":",
"session",
"=",
"await",
"self",
".",
"_get_session",
"(",
")",
"ws",
"=",
"await",
"session",
".",
"ws_connect",
"(",
"url",
")",
"return",
"BybitPerpetualWebSocketAdaptor",
"(",
"websocket",
"=",
"ws",
")",
"except",
"asyncio",
".",
"CancelledError",
":",
"raise",
"except",
"Exception",
"as",
"ex",
":",
"self",
".",
"logger",
"(",
")",
".",
"network",
"(",
"f\"Unexpected error occurred during {CONSTANTS.EXCHANGE_NAME} WebSocket Connection \"",
"f\"({ex})\"",
")",
"raise"
] |
https://github.com/CoinAlpha/hummingbot/blob/36f6149c1644c07cd36795b915f38b8f49b798e7/hummingbot/connector/derivative/bybit_perpetual/bybit_perpetual_api_order_book_data_source.py#L76-L89
|
||
IronLanguages/main
|
a949455434b1fda8c783289e897e78a9a0caabb5
|
External.LCA_RESTRICTED/Languages/CPython/27/Lib/idlelib/tabbedpages.py
|
python
|
TabSet._arrange_tabs
|
(self)
|
Arrange the tabs in rows, in the order in which they were added.
If n_rows >= 1, this will be the number of rows used. Otherwise the
number of rows will be calculated according to the number of tabs and
max_tabs_per_row. In this case, the number of rows may change when
adding/removing tabs.
|
Arrange the tabs in rows, in the order in which they were added.
|
[
"Arrange",
"the",
"tabs",
"in",
"rows",
"in",
"the",
"order",
"in",
"which",
"they",
"were",
"added",
"."
] |
def _arrange_tabs(self):
"""
Arrange the tabs in rows, in the order in which they were added.
If n_rows >= 1, this will be the number of rows used. Otherwise the
number of rows will be calculated according to the number of tabs and
max_tabs_per_row. In this case, the number of rows may change when
adding/removing tabs.
"""
# remove all tabs and rows
for tab_name in self._tabs.keys():
self._tabs.pop(tab_name).destroy()
self._reset_tab_rows()
if not self._tab_names:
return
if self.n_rows is not None and self.n_rows > 0:
n_rows = self.n_rows
else:
# calculate the required number of rows
n_rows = (len(self._tab_names) - 1) // self.max_tabs_per_row + 1
# not expanding the tabs with more than one row is very ugly
expand_tabs = self.expand_tabs or n_rows > 1
i = 0 # index in self._tab_names
for row_index in xrange(n_rows):
# calculate required number of tabs in this row
n_tabs = (len(self._tab_names) - i - 1) // (n_rows - row_index) + 1
tab_names = self._tab_names[i:i + n_tabs]
i += n_tabs
self._add_tab_row(tab_names, expand_tabs)
# re-select selected tab so it is properly displayed
selected = self._selected_tab
self.set_selected_tab(None)
if selected in self._tab_names:
self.set_selected_tab(selected)
|
[
"def",
"_arrange_tabs",
"(",
"self",
")",
":",
"# remove all tabs and rows",
"for",
"tab_name",
"in",
"self",
".",
"_tabs",
".",
"keys",
"(",
")",
":",
"self",
".",
"_tabs",
".",
"pop",
"(",
"tab_name",
")",
".",
"destroy",
"(",
")",
"self",
".",
"_reset_tab_rows",
"(",
")",
"if",
"not",
"self",
".",
"_tab_names",
":",
"return",
"if",
"self",
".",
"n_rows",
"is",
"not",
"None",
"and",
"self",
".",
"n_rows",
">",
"0",
":",
"n_rows",
"=",
"self",
".",
"n_rows",
"else",
":",
"# calculate the required number of rows",
"n_rows",
"=",
"(",
"len",
"(",
"self",
".",
"_tab_names",
")",
"-",
"1",
")",
"//",
"self",
".",
"max_tabs_per_row",
"+",
"1",
"# not expanding the tabs with more than one row is very ugly",
"expand_tabs",
"=",
"self",
".",
"expand_tabs",
"or",
"n_rows",
">",
"1",
"i",
"=",
"0",
"# index in self._tab_names",
"for",
"row_index",
"in",
"xrange",
"(",
"n_rows",
")",
":",
"# calculate required number of tabs in this row",
"n_tabs",
"=",
"(",
"len",
"(",
"self",
".",
"_tab_names",
")",
"-",
"i",
"-",
"1",
")",
"//",
"(",
"n_rows",
"-",
"row_index",
")",
"+",
"1",
"tab_names",
"=",
"self",
".",
"_tab_names",
"[",
"i",
":",
"i",
"+",
"n_tabs",
"]",
"i",
"+=",
"n_tabs",
"self",
".",
"_add_tab_row",
"(",
"tab_names",
",",
"expand_tabs",
")",
"# re-select selected tab so it is properly displayed",
"selected",
"=",
"self",
".",
"_selected_tab",
"self",
".",
"set_selected_tab",
"(",
"None",
")",
"if",
"selected",
"in",
"self",
".",
"_tab_names",
":",
"self",
".",
"set_selected_tab",
"(",
"selected",
")"
] |
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/idlelib/tabbedpages.py#L135-L173
|
||
scaleway/postal-address
|
968f19aadf254695d7383ff9f6012f1bfaf1f7a4
|
postal_address/address.py
|
python
|
Address.__setitem__
|
(self, key, value)
|
Set a field's value.
Only base fields are allowed to be set explicitely.
|
Set a field's value.
|
[
"Set",
"a",
"field",
"s",
"value",
"."
] |
def __setitem__(self, key, value):
""" Set a field's value.
Only base fields are allowed to be set explicitely.
"""
if not isinstance(key, basestring):
raise TypeError
if not (isinstance(value, basestring) or value is None):
raise TypeError
if key not in self.BASE_FIELD_IDS:
raise KeyError
self._fields[key] = value
|
[
"def",
"__setitem__",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"key",
",",
"basestring",
")",
":",
"raise",
"TypeError",
"if",
"not",
"(",
"isinstance",
"(",
"value",
",",
"basestring",
")",
"or",
"value",
"is",
"None",
")",
":",
"raise",
"TypeError",
"if",
"key",
"not",
"in",
"self",
".",
"BASE_FIELD_IDS",
":",
"raise",
"KeyError",
"self",
".",
"_fields",
"[",
"key",
"]",
"=",
"value"
] |
https://github.com/scaleway/postal-address/blob/968f19aadf254695d7383ff9f6012f1bfaf1f7a4/postal_address/address.py#L197-L208
|
||
samuelclay/NewsBlur
|
2c45209df01a1566ea105e04d499367f32ac9ad2
|
utils/feed_functions.py
|
python
|
Counter.__or__
|
(self, other)
|
return result
|
Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
|
Union is the maximum of value in either of the input counters.
|
[
"Union",
"is",
"the",
"maximum",
"of",
"value",
"in",
"either",
"of",
"the",
"input",
"counters",
"."
] |
def __or__(self, other):
'''Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
_max = max
result = Counter()
for elem in set(self) | set(other):
newcount = _max(self[elem], other[elem])
if newcount > 0:
result[elem] = newcount
return result
|
[
"def",
"__or__",
"(",
"self",
",",
"other",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"Counter",
")",
":",
"return",
"NotImplemented",
"_max",
"=",
"max",
"result",
"=",
"Counter",
"(",
")",
"for",
"elem",
"in",
"set",
"(",
"self",
")",
"|",
"set",
"(",
"other",
")",
":",
"newcount",
"=",
"_max",
"(",
"self",
"[",
"elem",
"]",
",",
"other",
"[",
"elem",
"]",
")",
"if",
"newcount",
">",
"0",
":",
"result",
"[",
"elem",
"]",
"=",
"newcount",
"return",
"result"
] |
https://github.com/samuelclay/NewsBlur/blob/2c45209df01a1566ea105e04d499367f32ac9ad2/utils/feed_functions.py#L377-L392
|
|
huawei-noah/vega
|
d9f13deede7f2b584e4b1d32ffdb833856129989
|
vega/networks/pytorch/customs/modnas/arch_space/torch/mobilenetv3.py
|
python
|
mobilenetv3_large
|
(cfgs=None, **kwargs)
|
return MobileNetV3(cfgs, mode='large', **kwargs)
|
Construct a MobileNetV3-Large model.
|
Construct a MobileNetV3-Large model.
|
[
"Construct",
"a",
"MobileNetV3",
"-",
"Large",
"model",
"."
] |
def mobilenetv3_large(cfgs=None, **kwargs):
"""Construct a MobileNetV3-Large model."""
cfgs = [
# k, t, c, SE, NL, s
[3, 0, 16, 0, 1, 2],
[3, 16, 16, 0, 0, 1],
[3, 64, 24, 0, 0, 2],
[3, 72, 24, 0, 0, 1],
[5, 72, 40, 1, 0, 2],
[5, 120, 40, 1, 0, 1],
[5, 120, 40, 1, 0, 1],
[3, 240, 80, 0, 1, 2],
[3, 200, 80, 0, 1, 1],
[3, 184, 80, 0, 1, 1],
[3, 184, 80, 0, 1, 1],
[3, 480, 112, 1, 1, 1],
[3, 672, 112, 1, 1, 1],
[5, 672, 160, 1, 1, 2],
[5, 960, 160, 1, 1, 1],
[5, 960, 160, 1, 1, 1]
] if cfgs is None else cfgs
return MobileNetV3(cfgs, mode='large', **kwargs)
|
[
"def",
"mobilenetv3_large",
"(",
"cfgs",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"cfgs",
"=",
"[",
"# k, t, c, SE, NL, s",
"[",
"3",
",",
"0",
",",
"16",
",",
"0",
",",
"1",
",",
"2",
"]",
",",
"[",
"3",
",",
"16",
",",
"16",
",",
"0",
",",
"0",
",",
"1",
"]",
",",
"[",
"3",
",",
"64",
",",
"24",
",",
"0",
",",
"0",
",",
"2",
"]",
",",
"[",
"3",
",",
"72",
",",
"24",
",",
"0",
",",
"0",
",",
"1",
"]",
",",
"[",
"5",
",",
"72",
",",
"40",
",",
"1",
",",
"0",
",",
"2",
"]",
",",
"[",
"5",
",",
"120",
",",
"40",
",",
"1",
",",
"0",
",",
"1",
"]",
",",
"[",
"5",
",",
"120",
",",
"40",
",",
"1",
",",
"0",
",",
"1",
"]",
",",
"[",
"3",
",",
"240",
",",
"80",
",",
"0",
",",
"1",
",",
"2",
"]",
",",
"[",
"3",
",",
"200",
",",
"80",
",",
"0",
",",
"1",
",",
"1",
"]",
",",
"[",
"3",
",",
"184",
",",
"80",
",",
"0",
",",
"1",
",",
"1",
"]",
",",
"[",
"3",
",",
"184",
",",
"80",
",",
"0",
",",
"1",
",",
"1",
"]",
",",
"[",
"3",
",",
"480",
",",
"112",
",",
"1",
",",
"1",
",",
"1",
"]",
",",
"[",
"3",
",",
"672",
",",
"112",
",",
"1",
",",
"1",
",",
"1",
"]",
",",
"[",
"5",
",",
"672",
",",
"160",
",",
"1",
",",
"1",
",",
"2",
"]",
",",
"[",
"5",
",",
"960",
",",
"160",
",",
"1",
",",
"1",
",",
"1",
"]",
",",
"[",
"5",
",",
"960",
",",
"160",
",",
"1",
",",
"1",
",",
"1",
"]",
"]",
"if",
"cfgs",
"is",
"None",
"else",
"cfgs",
"return",
"MobileNetV3",
"(",
"cfgs",
",",
"mode",
"=",
"'large'",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/huawei-noah/vega/blob/d9f13deede7f2b584e4b1d32ffdb833856129989/vega/networks/pytorch/customs/modnas/arch_space/torch/mobilenetv3.py#L271-L292
|
|
pyfa-org/Pyfa
|
feaa52c36beeda21ab380fc74d8f871b81d49729
|
gui/builtinViews/fittingView.py
|
python
|
FittingView.click
|
(self, event)
|
Handle click event on modules.
This is only useful for the State column. If multiple items are selected,
and we have clicked the State column, iterate through the selections and
change State
|
Handle click event on modules.
|
[
"Handle",
"click",
"event",
"on",
"modules",
"."
] |
def click(self, event):
"""
Handle click event on modules.
This is only useful for the State column. If multiple items are selected,
and we have clicked the State column, iterate through the selections and
change State
"""
clickedRow, _, col = self.HitTestSubItem(event.Position)
# only do State column and ignore invalid rows
if clickedRow != -1 and clickedRow not in self.blanks and col == self.getColIndex(State):
selectedRows = []
currentRow = self.GetFirstSelected()
while currentRow != -1 and clickedRow not in self.blanks:
selectedRows.append(currentRow)
currentRow = self.GetNextSelected(currentRow)
if clickedRow not in selectedRows:
try:
selectedMods = [self.mods[clickedRow]]
except IndexError:
return
else:
selectedMods = self.getSelectedMods()
click = "ctrl" if event.GetModifiers() == wx.MOD_CONTROL or event.middleIsDown else "right" if event.GetButton() == 3 else "left"
try:
mainMod = self.mods[clickedRow]
except IndexError:
return
if mainMod.isEmpty:
return
fitID = self.mainFrame.getActiveFit()
fit = Fit.getInstance().getFit(fitID)
if mainMod not in fit.modules:
return
mainPosition = fit.modules.index(mainMod)
if event.GetModifiers() == wx.MOD_ALT:
positions = getSimilarModPositions(fit.modules, mainMod)
else:
positions = []
for position, mod in enumerate(fit.modules):
if mod in selectedMods:
positions.append(position)
self.mainFrame.command.Submit(cmd.GuiChangeLocalModuleStatesCommand(
fitID=fitID,
mainPosition=mainPosition,
positions=positions,
click=click))
# update state tooltip
tooltip = self.activeColumns[col].getToolTip(self.mods[clickedRow])
if tooltip:
self.SetToolTip(tooltip)
else:
event.Skip()
|
[
"def",
"click",
"(",
"self",
",",
"event",
")",
":",
"clickedRow",
",",
"_",
",",
"col",
"=",
"self",
".",
"HitTestSubItem",
"(",
"event",
".",
"Position",
")",
"# only do State column and ignore invalid rows",
"if",
"clickedRow",
"!=",
"-",
"1",
"and",
"clickedRow",
"not",
"in",
"self",
".",
"blanks",
"and",
"col",
"==",
"self",
".",
"getColIndex",
"(",
"State",
")",
":",
"selectedRows",
"=",
"[",
"]",
"currentRow",
"=",
"self",
".",
"GetFirstSelected",
"(",
")",
"while",
"currentRow",
"!=",
"-",
"1",
"and",
"clickedRow",
"not",
"in",
"self",
".",
"blanks",
":",
"selectedRows",
".",
"append",
"(",
"currentRow",
")",
"currentRow",
"=",
"self",
".",
"GetNextSelected",
"(",
"currentRow",
")",
"if",
"clickedRow",
"not",
"in",
"selectedRows",
":",
"try",
":",
"selectedMods",
"=",
"[",
"self",
".",
"mods",
"[",
"clickedRow",
"]",
"]",
"except",
"IndexError",
":",
"return",
"else",
":",
"selectedMods",
"=",
"self",
".",
"getSelectedMods",
"(",
")",
"click",
"=",
"\"ctrl\"",
"if",
"event",
".",
"GetModifiers",
"(",
")",
"==",
"wx",
".",
"MOD_CONTROL",
"or",
"event",
".",
"middleIsDown",
"else",
"\"right\"",
"if",
"event",
".",
"GetButton",
"(",
")",
"==",
"3",
"else",
"\"left\"",
"try",
":",
"mainMod",
"=",
"self",
".",
"mods",
"[",
"clickedRow",
"]",
"except",
"IndexError",
":",
"return",
"if",
"mainMod",
".",
"isEmpty",
":",
"return",
"fitID",
"=",
"self",
".",
"mainFrame",
".",
"getActiveFit",
"(",
")",
"fit",
"=",
"Fit",
".",
"getInstance",
"(",
")",
".",
"getFit",
"(",
"fitID",
")",
"if",
"mainMod",
"not",
"in",
"fit",
".",
"modules",
":",
"return",
"mainPosition",
"=",
"fit",
".",
"modules",
".",
"index",
"(",
"mainMod",
")",
"if",
"event",
".",
"GetModifiers",
"(",
")",
"==",
"wx",
".",
"MOD_ALT",
":",
"positions",
"=",
"getSimilarModPositions",
"(",
"fit",
".",
"modules",
",",
"mainMod",
")",
"else",
":",
"positions",
"=",
"[",
"]",
"for",
"position",
",",
"mod",
"in",
"enumerate",
"(",
"fit",
".",
"modules",
")",
":",
"if",
"mod",
"in",
"selectedMods",
":",
"positions",
".",
"append",
"(",
"position",
")",
"self",
".",
"mainFrame",
".",
"command",
".",
"Submit",
"(",
"cmd",
".",
"GuiChangeLocalModuleStatesCommand",
"(",
"fitID",
"=",
"fitID",
",",
"mainPosition",
"=",
"mainPosition",
",",
"positions",
"=",
"positions",
",",
"click",
"=",
"click",
")",
")",
"# update state tooltip",
"tooltip",
"=",
"self",
".",
"activeColumns",
"[",
"col",
"]",
".",
"getToolTip",
"(",
"self",
".",
"mods",
"[",
"clickedRow",
"]",
")",
"if",
"tooltip",
":",
"self",
".",
"SetToolTip",
"(",
"tooltip",
")",
"else",
":",
"event",
".",
"Skip",
"(",
")"
] |
https://github.com/pyfa-org/Pyfa/blob/feaa52c36beeda21ab380fc74d8f871b81d49729/gui/builtinViews/fittingView.py#L669-L729
|
||
kubeflow-kale/kale
|
bda9d296822e56ba8fe76b0072e656005da04905
|
backend/kale/rpc/rok.py
|
python
|
check_rok_availability
|
(request)
|
Check if Rok is available.
|
Check if Rok is available.
|
[
"Check",
"if",
"Rok",
"is",
"available",
"."
] |
def check_rok_availability(request):
"""Check if Rok is available."""
log = request.log if hasattr(request, "log") else logger
try:
rok = rokutils.get_client()
except ImportError:
log.exception("Failed to import RokClient")
raise RPCNotFoundError(details="Rok Gateway Client module not found",
trans_id=request.trans_id)
except Exception:
log.exception("Failed to initialize RokClient")
raise RPCServiceUnavailableError(details=("Failed to initialize"
" RokClient"),
trans_id=request.trans_id)
try:
rok.account_info()
except Exception:
log.exception("Failed to retrieve account information")
raise RPCServiceUnavailableError(details="Failed to access Rok",
trans_id=request.trans_id)
name = podutils.get_pod_name()
namespace = podutils.get_namespace()
try:
suggestions = rok.version_register_suggest(rokutils.DEFAULT_BUCKET,
name, "jupyter",
"params:lab",
{"namespace": namespace},
ignore_env=True)
except Exception as e:
log.exception("Failed to list lab suggestions")
message = "%s: %s" % (e.__class__.__name__, e)
raise RPCServiceUnavailableError(message=message,
details=("Rok cannot list notebooks"
" in this namespace"),
trans_id=request.trans_id)
if not any(s["value"] == name for s in suggestions):
log.error("Could not find notebook '%s' in list of suggestions", name)
raise RPCNotFoundError(details=("Could not find this notebook in"
" notebooks listed by Rok"),
trans_id=request.trans_id)
|
[
"def",
"check_rok_availability",
"(",
"request",
")",
":",
"log",
"=",
"request",
".",
"log",
"if",
"hasattr",
"(",
"request",
",",
"\"log\"",
")",
"else",
"logger",
"try",
":",
"rok",
"=",
"rokutils",
".",
"get_client",
"(",
")",
"except",
"ImportError",
":",
"log",
".",
"exception",
"(",
"\"Failed to import RokClient\"",
")",
"raise",
"RPCNotFoundError",
"(",
"details",
"=",
"\"Rok Gateway Client module not found\"",
",",
"trans_id",
"=",
"request",
".",
"trans_id",
")",
"except",
"Exception",
":",
"log",
".",
"exception",
"(",
"\"Failed to initialize RokClient\"",
")",
"raise",
"RPCServiceUnavailableError",
"(",
"details",
"=",
"(",
"\"Failed to initialize\"",
"\" RokClient\"",
")",
",",
"trans_id",
"=",
"request",
".",
"trans_id",
")",
"try",
":",
"rok",
".",
"account_info",
"(",
")",
"except",
"Exception",
":",
"log",
".",
"exception",
"(",
"\"Failed to retrieve account information\"",
")",
"raise",
"RPCServiceUnavailableError",
"(",
"details",
"=",
"\"Failed to access Rok\"",
",",
"trans_id",
"=",
"request",
".",
"trans_id",
")",
"name",
"=",
"podutils",
".",
"get_pod_name",
"(",
")",
"namespace",
"=",
"podutils",
".",
"get_namespace",
"(",
")",
"try",
":",
"suggestions",
"=",
"rok",
".",
"version_register_suggest",
"(",
"rokutils",
".",
"DEFAULT_BUCKET",
",",
"name",
",",
"\"jupyter\"",
",",
"\"params:lab\"",
",",
"{",
"\"namespace\"",
":",
"namespace",
"}",
",",
"ignore_env",
"=",
"True",
")",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"exception",
"(",
"\"Failed to list lab suggestions\"",
")",
"message",
"=",
"\"%s: %s\"",
"%",
"(",
"e",
".",
"__class__",
".",
"__name__",
",",
"e",
")",
"raise",
"RPCServiceUnavailableError",
"(",
"message",
"=",
"message",
",",
"details",
"=",
"(",
"\"Rok cannot list notebooks\"",
"\" in this namespace\"",
")",
",",
"trans_id",
"=",
"request",
".",
"trans_id",
")",
"if",
"not",
"any",
"(",
"s",
"[",
"\"value\"",
"]",
"==",
"name",
"for",
"s",
"in",
"suggestions",
")",
":",
"log",
".",
"error",
"(",
"\"Could not find notebook '%s' in list of suggestions\"",
",",
"name",
")",
"raise",
"RPCNotFoundError",
"(",
"details",
"=",
"(",
"\"Could not find this notebook in\"",
"\" notebooks listed by Rok\"",
")",
",",
"trans_id",
"=",
"request",
".",
"trans_id",
")"
] |
https://github.com/kubeflow-kale/kale/blob/bda9d296822e56ba8fe76b0072e656005da04905/backend/kale/rpc/rok.py#L40-L82
|
||
shellphish/ictf-framework
|
c0384f12060cf47442a52f516c6e78bd722f208a
|
database/support/mysql-connector-python-2.1.3/lib/mysql/connector/fabric/connection.py
|
python
|
Fabric.get_sharding_information
|
(self, tables=None, database=None)
|
Get and cache the sharding information for given tables
This method is fetching sharding information from MySQL Fabric
and caches the result. The tables argument must be sequence
of sequences contain the name of the database and table. If no
database is given, the value for the database argument will
be used.
Examples:
tables = [('salary',), ('employees',)]
get_sharding_information(tables, database='employees')
tables = [('salary', 'employees'), ('employees', employees)]
get_sharding_information(tables)
Raises InterfaceError on errors; ValueError when something is wrong
with the tables argument.
|
Get and cache the sharding information for given tables
|
[
"Get",
"and",
"cache",
"the",
"sharding",
"information",
"for",
"given",
"tables"
] |
def get_sharding_information(self, tables=None, database=None):
"""Get and cache the sharding information for given tables
This method is fetching sharding information from MySQL Fabric
and caches the result. The tables argument must be sequence
of sequences contain the name of the database and table. If no
database is given, the value for the database argument will
be used.
Examples:
tables = [('salary',), ('employees',)]
get_sharding_information(tables, database='employees')
tables = [('salary', 'employees'), ('employees', employees)]
get_sharding_information(tables)
Raises InterfaceError on errors; ValueError when something is wrong
with the tables argument.
"""
if not isinstance(tables, (list, tuple)):
raise ValueError("tables should be a sequence")
patterns = []
for table in tables:
if not isinstance(table, (list, tuple)) and not database:
raise ValueError("No database specified for table {0}".format(
table))
if isinstance(table, (list, tuple)):
dbase = table[1]
tbl = table[0]
else:
dbase = database
tbl = table
patterns.append("{0}.{1}".format(dbase, tbl))
inst = self.get_instance()
try:
fset = inst.execute(
'dump', 'sharding_information', self._version_token,
','.join(patterns)
)
except (Fault, socket.error) as exc:
msg = "Looking up sharding information failed : {error}".format(
error=str(exc))
raise InterfaceError(msg)
for row in fset.rows():
self._cache.sharding_cache_table(
FabricShard(row.schema_name, row.table_name, row.column_name,
row.lower_bound, row.shard_id, row.type_name,
row.group_id, row.global_group)
)
|
[
"def",
"get_sharding_information",
"(",
"self",
",",
"tables",
"=",
"None",
",",
"database",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"tables",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"raise",
"ValueError",
"(",
"\"tables should be a sequence\"",
")",
"patterns",
"=",
"[",
"]",
"for",
"table",
"in",
"tables",
":",
"if",
"not",
"isinstance",
"(",
"table",
",",
"(",
"list",
",",
"tuple",
")",
")",
"and",
"not",
"database",
":",
"raise",
"ValueError",
"(",
"\"No database specified for table {0}\"",
".",
"format",
"(",
"table",
")",
")",
"if",
"isinstance",
"(",
"table",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"dbase",
"=",
"table",
"[",
"1",
"]",
"tbl",
"=",
"table",
"[",
"0",
"]",
"else",
":",
"dbase",
"=",
"database",
"tbl",
"=",
"table",
"patterns",
".",
"append",
"(",
"\"{0}.{1}\"",
".",
"format",
"(",
"dbase",
",",
"tbl",
")",
")",
"inst",
"=",
"self",
".",
"get_instance",
"(",
")",
"try",
":",
"fset",
"=",
"inst",
".",
"execute",
"(",
"'dump'",
",",
"'sharding_information'",
",",
"self",
".",
"_version_token",
",",
"','",
".",
"join",
"(",
"patterns",
")",
")",
"except",
"(",
"Fault",
",",
"socket",
".",
"error",
")",
"as",
"exc",
":",
"msg",
"=",
"\"Looking up sharding information failed : {error}\"",
".",
"format",
"(",
"error",
"=",
"str",
"(",
"exc",
")",
")",
"raise",
"InterfaceError",
"(",
"msg",
")",
"for",
"row",
"in",
"fset",
".",
"rows",
"(",
")",
":",
"self",
".",
"_cache",
".",
"sharding_cache_table",
"(",
"FabricShard",
"(",
"row",
".",
"schema_name",
",",
"row",
".",
"table_name",
",",
"row",
".",
"column_name",
",",
"row",
".",
"lower_bound",
",",
"row",
".",
"shard_id",
",",
"row",
".",
"type_name",
",",
"row",
".",
"group_id",
",",
"row",
".",
"global_group",
")",
")"
] |
https://github.com/shellphish/ictf-framework/blob/c0384f12060cf47442a52f516c6e78bd722f208a/database/support/mysql-connector-python-2.1.3/lib/mysql/connector/fabric/connection.py#L864-L917
|
||
JaniceWuo/MovieRecommend
|
4c86db64ca45598917d304f535413df3bc9fea65
|
movierecommend/venv1/Lib/site-packages/django/contrib/gis/db/backends/mysql/operations.py
|
python
|
MySQLOperations.get_db_converters
|
(self, expression)
|
return converters
|
[] |
def get_db_converters(self, expression):
converters = super(MySQLOperations, self).get_db_converters(expression)
if isinstance(expression.output_field, GeometryField) and self.uses_invalid_empty_geometry_collection:
converters.append(self.convert_invalid_empty_geometry_collection)
return converters
|
[
"def",
"get_db_converters",
"(",
"self",
",",
"expression",
")",
":",
"converters",
"=",
"super",
"(",
"MySQLOperations",
",",
"self",
")",
".",
"get_db_converters",
"(",
"expression",
")",
"if",
"isinstance",
"(",
"expression",
".",
"output_field",
",",
"GeometryField",
")",
"and",
"self",
".",
"uses_invalid_empty_geometry_collection",
":",
"converters",
".",
"append",
"(",
"self",
".",
"convert_invalid_empty_geometry_collection",
")",
"return",
"converters"
] |
https://github.com/JaniceWuo/MovieRecommend/blob/4c86db64ca45598917d304f535413df3bc9fea65/movierecommend/venv1/Lib/site-packages/django/contrib/gis/db/backends/mysql/operations.py#L100-L104
|
|||
AppScale/gts
|
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
|
AppServer/lib/django-1.3/django/middleware/common.py
|
python
|
CommonMiddleware.process_response
|
(self, request, response)
|
return response
|
Send broken link emails and calculate the Etag, if needed.
|
Send broken link emails and calculate the Etag, if needed.
|
[
"Send",
"broken",
"link",
"emails",
"and",
"calculate",
"the",
"Etag",
"if",
"needed",
"."
] |
def process_response(self, request, response):
"Send broken link emails and calculate the Etag, if needed."
if response.status_code == 404:
if settings.SEND_BROKEN_LINK_EMAILS and not settings.DEBUG:
# If the referrer was from an internal link or a non-search-engine site,
# send a note to the managers.
domain = request.get_host()
referer = request.META.get('HTTP_REFERER', None)
is_internal = _is_internal_request(domain, referer)
path = request.get_full_path()
if referer and not _is_ignorable_404(path) and (is_internal or '?' not in referer):
ua = request.META.get('HTTP_USER_AGENT', '<none>')
ip = request.META.get('REMOTE_ADDR', '<none>')
mail_managers("Broken %slink on %s" % ((is_internal and 'INTERNAL ' or ''), domain),
"Referrer: %s\nRequested URL: %s\nUser agent: %s\nIP address: %s\n" \
% (referer, request.get_full_path(), ua, ip),
fail_silently=True)
return response
# Use ETags, if requested.
if settings.USE_ETAGS:
if response.has_header('ETag'):
etag = response['ETag']
else:
etag = '"%s"' % md5_constructor(response.content).hexdigest()
if response.status_code >= 200 and response.status_code < 300 and request.META.get('HTTP_IF_NONE_MATCH') == etag:
cookies = response.cookies
response = http.HttpResponseNotModified()
response.cookies = cookies
else:
response['ETag'] = etag
return response
|
[
"def",
"process_response",
"(",
"self",
",",
"request",
",",
"response",
")",
":",
"if",
"response",
".",
"status_code",
"==",
"404",
":",
"if",
"settings",
".",
"SEND_BROKEN_LINK_EMAILS",
"and",
"not",
"settings",
".",
"DEBUG",
":",
"# If the referrer was from an internal link or a non-search-engine site,",
"# send a note to the managers.",
"domain",
"=",
"request",
".",
"get_host",
"(",
")",
"referer",
"=",
"request",
".",
"META",
".",
"get",
"(",
"'HTTP_REFERER'",
",",
"None",
")",
"is_internal",
"=",
"_is_internal_request",
"(",
"domain",
",",
"referer",
")",
"path",
"=",
"request",
".",
"get_full_path",
"(",
")",
"if",
"referer",
"and",
"not",
"_is_ignorable_404",
"(",
"path",
")",
"and",
"(",
"is_internal",
"or",
"'?'",
"not",
"in",
"referer",
")",
":",
"ua",
"=",
"request",
".",
"META",
".",
"get",
"(",
"'HTTP_USER_AGENT'",
",",
"'<none>'",
")",
"ip",
"=",
"request",
".",
"META",
".",
"get",
"(",
"'REMOTE_ADDR'",
",",
"'<none>'",
")",
"mail_managers",
"(",
"\"Broken %slink on %s\"",
"%",
"(",
"(",
"is_internal",
"and",
"'INTERNAL '",
"or",
"''",
")",
",",
"domain",
")",
",",
"\"Referrer: %s\\nRequested URL: %s\\nUser agent: %s\\nIP address: %s\\n\"",
"%",
"(",
"referer",
",",
"request",
".",
"get_full_path",
"(",
")",
",",
"ua",
",",
"ip",
")",
",",
"fail_silently",
"=",
"True",
")",
"return",
"response",
"# Use ETags, if requested.",
"if",
"settings",
".",
"USE_ETAGS",
":",
"if",
"response",
".",
"has_header",
"(",
"'ETag'",
")",
":",
"etag",
"=",
"response",
"[",
"'ETag'",
"]",
"else",
":",
"etag",
"=",
"'\"%s\"'",
"%",
"md5_constructor",
"(",
"response",
".",
"content",
")",
".",
"hexdigest",
"(",
")",
"if",
"response",
".",
"status_code",
">=",
"200",
"and",
"response",
".",
"status_code",
"<",
"300",
"and",
"request",
".",
"META",
".",
"get",
"(",
"'HTTP_IF_NONE_MATCH'",
")",
"==",
"etag",
":",
"cookies",
"=",
"response",
".",
"cookies",
"response",
"=",
"http",
".",
"HttpResponseNotModified",
"(",
")",
"response",
".",
"cookies",
"=",
"cookies",
"else",
":",
"response",
"[",
"'ETag'",
"]",
"=",
"etag",
"return",
"response"
] |
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-1.3/django/middleware/common.py#L92-L124
|
|
carbonblack/cbapi-python
|
24d677ffd99aee911c2c76ecb5528e4e9320c7cc
|
src/cbapi/psc/alerts_query.py
|
python
|
CBAnalyticsAlertSearchQuery.set_device_locations
|
(self, locations)
|
return self
|
Restricts the alerts that this query is performed on to the specified
device locations.
:param locations list: List of device locations to look for. Valid values are "ONSITE", "OFFSITE",
and "UNKNOWN".
:return: This instance.
|
Restricts the alerts that this query is performed on to the specified
device locations.
|
[
"Restricts",
"the",
"alerts",
"that",
"this",
"query",
"is",
"performed",
"on",
"to",
"the",
"specified",
"device",
"locations",
"."
] |
def set_device_locations(self, locations):
"""
Restricts the alerts that this query is performed on to the specified
device locations.
:param locations list: List of device locations to look for. Valid values are "ONSITE", "OFFSITE",
and "UNKNOWN".
:return: This instance.
"""
if not all((location in CBAnalyticsAlertSearchQuery.VALID_LOCATIONS)
for location in locations):
raise ApiError("One or more invalid device locations")
self._update_criteria("device_location", locations)
return self
|
[
"def",
"set_device_locations",
"(",
"self",
",",
"locations",
")",
":",
"if",
"not",
"all",
"(",
"(",
"location",
"in",
"CBAnalyticsAlertSearchQuery",
".",
"VALID_LOCATIONS",
")",
"for",
"location",
"in",
"locations",
")",
":",
"raise",
"ApiError",
"(",
"\"One or more invalid device locations\"",
")",
"self",
".",
"_update_criteria",
"(",
"\"device_location\"",
",",
"locations",
")",
"return",
"self"
] |
https://github.com/carbonblack/cbapi-python/blob/24d677ffd99aee911c2c76ecb5528e4e9320c7cc/src/cbapi/psc/alerts_query.py#L567-L580
|
|
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/jupyter_client/threaded.py
|
python
|
ThreadedZMQSocketChannel._handle_recv
|
(self, msg)
|
Callback for stream.on_recv.
Unpacks message, and calls handlers with it.
|
Callback for stream.on_recv.
|
[
"Callback",
"for",
"stream",
".",
"on_recv",
"."
] |
def _handle_recv(self, msg):
"""Callback for stream.on_recv.
Unpacks message, and calls handlers with it.
"""
ident,smsg = self.session.feed_identities(msg)
msg = self.session.deserialize(smsg)
# let client inspect messages
if self._inspect:
self._inspect(msg)
self.call_handlers(msg)
|
[
"def",
"_handle_recv",
"(",
"self",
",",
"msg",
")",
":",
"ident",
",",
"smsg",
"=",
"self",
".",
"session",
".",
"feed_identities",
"(",
"msg",
")",
"msg",
"=",
"self",
".",
"session",
".",
"deserialize",
"(",
"smsg",
")",
"# let client inspect messages",
"if",
"self",
".",
"_inspect",
":",
"self",
".",
"_inspect",
"(",
"msg",
")",
"self",
".",
"call_handlers",
"(",
"msg",
")"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/jupyter_client/threaded.py#L87-L97
|
||
pymedusa/Medusa
|
1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38
|
ext/tvdbapiv2/models/episode.py
|
python
|
Episode.director
|
(self, director)
|
Sets the director of this Episode.
:param director: The director of this Episode.
:type: text_type
|
Sets the director of this Episode.
|
[
"Sets",
"the",
"director",
"of",
"this",
"Episode",
"."
] |
def director(self, director):
"""
Sets the director of this Episode.
:param director: The director of this Episode.
:type: text_type
"""
self._director = director
|
[
"def",
"director",
"(",
"self",
",",
"director",
")",
":",
"self",
".",
"_director",
"=",
"director"
] |
https://github.com/pymedusa/Medusa/blob/1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38/ext/tvdbapiv2/models/episode.py#L280-L288
|
||
ganeti/ganeti
|
d340a9ddd12f501bef57da421b5f9b969a4ba905
|
lib/hypervisor/hv_lxc.py
|
python
|
LXCHypervisor._LoadInstanceStash
|
(self, instance_name)
|
Load information stashed in file which was created by
L{_SaveInstanceStash}.
|
Load information stashed in file which was created by
L{_SaveInstanceStash}.
|
[
"Load",
"information",
"stashed",
"in",
"file",
"which",
"was",
"created",
"by",
"L",
"{",
"_SaveInstanceStash",
"}",
"."
] |
def _LoadInstanceStash(self, instance_name):
"""Load information stashed in file which was created by
L{_SaveInstanceStash}.
"""
stash_file = self._InstanceStashFilePath(instance_name)
try:
return serializer.Load(utils.ReadFile(stash_file))
except (EnvironmentError, ValueError) as err:
raise HypervisorError("Failed to load instance stash file %s : %s" %
(stash_file, err))
|
[
"def",
"_LoadInstanceStash",
"(",
"self",
",",
"instance_name",
")",
":",
"stash_file",
"=",
"self",
".",
"_InstanceStashFilePath",
"(",
"instance_name",
")",
"try",
":",
"return",
"serializer",
".",
"Load",
"(",
"utils",
".",
"ReadFile",
"(",
"stash_file",
")",
")",
"except",
"(",
"EnvironmentError",
",",
"ValueError",
")",
"as",
"err",
":",
"raise",
"HypervisorError",
"(",
"\"Failed to load instance stash file %s : %s\"",
"%",
"(",
"stash_file",
",",
"err",
")",
")"
] |
https://github.com/ganeti/ganeti/blob/d340a9ddd12f501bef57da421b5f9b969a4ba905/lib/hypervisor/hv_lxc.py#L232-L242
|
||
PrefectHQ/prefect
|
67bdc94e2211726d99561f6f52614bec8970e981
|
src/prefect/core/parameter.py
|
python
|
Parameter.serialize
|
(self)
|
return prefect.serialization.task.ParameterSchema().dump(self)
|
Creates a serialized representation of this parameter
Returns:
- dict representing this parameter
|
Creates a serialized representation of this parameter
|
[
"Creates",
"a",
"serialized",
"representation",
"of",
"this",
"parameter"
] |
def serialize(self) -> Dict[str, Any]:
"""
Creates a serialized representation of this parameter
Returns:
- dict representing this parameter
"""
return prefect.serialization.task.ParameterSchema().dump(self)
|
[
"def",
"serialize",
"(",
"self",
")",
"->",
"Dict",
"[",
"str",
",",
"Any",
"]",
":",
"return",
"prefect",
".",
"serialization",
".",
"task",
".",
"ParameterSchema",
"(",
")",
".",
"dump",
"(",
"self",
")"
] |
https://github.com/PrefectHQ/prefect/blob/67bdc94e2211726d99561f6f52614bec8970e981/src/prefect/core/parameter.py#L109-L116
|
|
Autodesk/molecular-design-toolkit
|
5f45a47fea21d3603899a6366cb163024f0e2ec4
|
moldesign/molecules/atomcollections.py
|
python
|
AtomGroup.center_of_mass
|
(self)
|
return com
|
units.Vector[length]: The (x,y,z) coordinates of this object's center of mass
|
units.Vector[length]: The (x,y,z) coordinates of this object's center of mass
|
[
"units",
".",
"Vector",
"[",
"length",
"]",
":",
"The",
"(",
"x",
"y",
"z",
")",
"coordinates",
"of",
"this",
"object",
"s",
"center",
"of",
"mass"
] |
def center_of_mass(self):
""" units.Vector[length]: The (x,y,z) coordinates of this object's center of mass """
if self.num_atoms == 0: # nicer exception than divide-by-zero
raise ValueError('"%s" has no atoms' % str(self))
total_mass = 0.0 * u.default.mass
com = np.zeros(3) * u.default.length * u.default.mass
for atom in self.atoms:
total_mass += atom.mass
com += atom.position * atom.mass
com = com / total_mass
return com
|
[
"def",
"center_of_mass",
"(",
"self",
")",
":",
"if",
"self",
".",
"num_atoms",
"==",
"0",
":",
"# nicer exception than divide-by-zero",
"raise",
"ValueError",
"(",
"'\"%s\" has no atoms'",
"%",
"str",
"(",
"self",
")",
")",
"total_mass",
"=",
"0.0",
"*",
"u",
".",
"default",
".",
"mass",
"com",
"=",
"np",
".",
"zeros",
"(",
"3",
")",
"*",
"u",
".",
"default",
".",
"length",
"*",
"u",
".",
"default",
".",
"mass",
"for",
"atom",
"in",
"self",
".",
"atoms",
":",
"total_mass",
"+=",
"atom",
".",
"mass",
"com",
"+=",
"atom",
".",
"position",
"*",
"atom",
".",
"mass",
"com",
"=",
"com",
"/",
"total_mass",
"return",
"com"
] |
https://github.com/Autodesk/molecular-design-toolkit/blob/5f45a47fea21d3603899a6366cb163024f0e2ec4/moldesign/molecules/atomcollections.py#L180-L191
|
|
Vespa314/bilibili-api
|
42b6b90aa7c141f5cfb0fdc754435518106f6966
|
bilibili-video/GetAssDanmaku.py
|
python
|
GetVideoInfo
|
(aid,appkey,page = 1,AppSecret=None,fav = None)
|
return video
|
[] |
def GetVideoInfo(aid,appkey,page = 1,AppSecret=None,fav = None):
paras = {'id': GetString(aid),'page': GetString(page)}
if fav != None:
paras['fav'] = fav
url = 'http://api.bilibili.cn/view?'+GetSign(paras,appkey,AppSecret)
jsoninfo = JsonInfo(url)
video = Video(aid,jsoninfo.Getvalue('title'))
video.guankan = jsoninfo.Getvalue('play')
video.commentNumber = jsoninfo.Getvalue('review')
video.danmu = jsoninfo.Getvalue('video_review')
video.shoucang = jsoninfo.Getvalue('favorites')
video.description = jsoninfo.Getvalue('description')
video.tag = []
taglist = jsoninfo.Getvalue('tag')
if taglist != None:
for tag in taglist.split(','):
video.tag.append(tag)
video.cover = jsoninfo.Getvalue('pic')
video.author = User(jsoninfo.Getvalue('mid'),jsoninfo.Getvalue('author'))
video.page = jsoninfo.Getvalue('pages')
video.date = jsoninfo.Getvalue('created_at')
video.credit = jsoninfo.Getvalue('credit')
video.coin = jsoninfo.Getvalue('coins')
video.spid = jsoninfo.Getvalue('spid')
video.cid = jsoninfo.Getvalue('cid')
video.offsite = jsoninfo.Getvalue('offsite')
video.partname = jsoninfo.Getvalue('partname')
video.src = jsoninfo.Getvalue('src')
video.tid = jsoninfo.Getvalue('tid')
video.typename = jsoninfo.Getvalue('typename')
video.instant_server = jsoninfo.Getvalue('instant_server')
return video
|
[
"def",
"GetVideoInfo",
"(",
"aid",
",",
"appkey",
",",
"page",
"=",
"1",
",",
"AppSecret",
"=",
"None",
",",
"fav",
"=",
"None",
")",
":",
"paras",
"=",
"{",
"'id'",
":",
"GetString",
"(",
"aid",
")",
",",
"'page'",
":",
"GetString",
"(",
"page",
")",
"}",
"if",
"fav",
"!=",
"None",
":",
"paras",
"[",
"'fav'",
"]",
"=",
"fav",
"url",
"=",
"'http://api.bilibili.cn/view?'",
"+",
"GetSign",
"(",
"paras",
",",
"appkey",
",",
"AppSecret",
")",
"jsoninfo",
"=",
"JsonInfo",
"(",
"url",
")",
"video",
"=",
"Video",
"(",
"aid",
",",
"jsoninfo",
".",
"Getvalue",
"(",
"'title'",
")",
")",
"video",
".",
"guankan",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'play'",
")",
"video",
".",
"commentNumber",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'review'",
")",
"video",
".",
"danmu",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'video_review'",
")",
"video",
".",
"shoucang",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'favorites'",
")",
"video",
".",
"description",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'description'",
")",
"video",
".",
"tag",
"=",
"[",
"]",
"taglist",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'tag'",
")",
"if",
"taglist",
"!=",
"None",
":",
"for",
"tag",
"in",
"taglist",
".",
"split",
"(",
"','",
")",
":",
"video",
".",
"tag",
".",
"append",
"(",
"tag",
")",
"video",
".",
"cover",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'pic'",
")",
"video",
".",
"author",
"=",
"User",
"(",
"jsoninfo",
".",
"Getvalue",
"(",
"'mid'",
")",
",",
"jsoninfo",
".",
"Getvalue",
"(",
"'author'",
")",
")",
"video",
".",
"page",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'pages'",
")",
"video",
".",
"date",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'created_at'",
")",
"video",
".",
"credit",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'credit'",
")",
"video",
".",
"coin",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'coins'",
")",
"video",
".",
"spid",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'spid'",
")",
"video",
".",
"cid",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'cid'",
")",
"video",
".",
"offsite",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'offsite'",
")",
"video",
".",
"partname",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'partname'",
")",
"video",
".",
"src",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'src'",
")",
"video",
".",
"tid",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'tid'",
")",
"video",
".",
"typename",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'typename'",
")",
"video",
".",
"instant_server",
"=",
"jsoninfo",
".",
"Getvalue",
"(",
"'instant_server'",
")",
"return",
"video"
] |
https://github.com/Vespa314/bilibili-api/blob/42b6b90aa7c141f5cfb0fdc754435518106f6966/bilibili-video/GetAssDanmaku.py#L316-L347
|
|||
Cadene/tensorflow-model-zoo.torch
|
990b10ffc22d4c8eacb2a502f20415b4f70c74c2
|
models/research/learned_optimizer/optimizer/hierarchical_rnn.py
|
python
|
HierarchicalRNN._compute_scaled_and_ms_grads
|
(self, grad, state)
|
return grads_scaled, mean_squared_gradients, grads_accum
|
Computes the scaled gradient and the mean squared gradients.
Gradients are also accumulated across different timescales if appropriate.
Args:
grad: The gradient tensor for this layer.
state: The optimizer state for this layer.
Returns:
The scaled gradients, mean squared gradients, and accumulated gradients.
|
Computes the scaled gradient and the mean squared gradients.
|
[
"Computes",
"the",
"scaled",
"gradient",
"and",
"the",
"mean",
"squared",
"gradients",
"."
] |
def _compute_scaled_and_ms_grads(self, grad, state):
"""Computes the scaled gradient and the mean squared gradients.
Gradients are also accumulated across different timescales if appropriate.
Args:
grad: The gradient tensor for this layer.
state: The optimizer state for this layer.
Returns:
The scaled gradients, mean squared gradients, and accumulated gradients.
"""
input_decays = [state["inp_decay"]]
scale_decays = [state["scl_decay"]]
if self.use_multiple_scale_decays and self.num_gradient_scales > 1:
for i in range(self.num_gradient_scales - 1):
scale_decays.append(tf.sqrt(scale_decays[i]))
for i in range(self.num_gradient_scales - 1):
# Each accumulator on twice the timescale of the one before.
input_decays.append(tf.sqrt(input_decays[i]))
grads_accum = []
grads_scaled = []
mean_squared_gradients = []
# populate the scaled gradients and associated mean_squared values
if self.num_gradient_scales > 0:
for i, decay in enumerate(input_decays):
if self.num_gradient_scales == 1:
# We don't accumulate if no scales, just take the current gradient.
grad_accum = grad
else:
# The state vars are 1-indexed.
old_accum = state["grad_accum{}".format(i + 1)]
grad_accum = grad * (1. - decay) + old_accum * decay
grads_accum.append(grad_accum)
sd = scale_decays[i if self.use_multiple_scale_decays else 0]
grad_scaled, ms = utils.rms_scaling(grad_accum, sd,
state["ms{}".format(i + 1)],
update_ms=True)
grads_scaled.append(grad_scaled)
mean_squared_gradients.append(ms)
return grads_scaled, mean_squared_gradients, grads_accum
|
[
"def",
"_compute_scaled_and_ms_grads",
"(",
"self",
",",
"grad",
",",
"state",
")",
":",
"input_decays",
"=",
"[",
"state",
"[",
"\"inp_decay\"",
"]",
"]",
"scale_decays",
"=",
"[",
"state",
"[",
"\"scl_decay\"",
"]",
"]",
"if",
"self",
".",
"use_multiple_scale_decays",
"and",
"self",
".",
"num_gradient_scales",
">",
"1",
":",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"num_gradient_scales",
"-",
"1",
")",
":",
"scale_decays",
".",
"append",
"(",
"tf",
".",
"sqrt",
"(",
"scale_decays",
"[",
"i",
"]",
")",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"num_gradient_scales",
"-",
"1",
")",
":",
"# Each accumulator on twice the timescale of the one before.",
"input_decays",
".",
"append",
"(",
"tf",
".",
"sqrt",
"(",
"input_decays",
"[",
"i",
"]",
")",
")",
"grads_accum",
"=",
"[",
"]",
"grads_scaled",
"=",
"[",
"]",
"mean_squared_gradients",
"=",
"[",
"]",
"# populate the scaled gradients and associated mean_squared values",
"if",
"self",
".",
"num_gradient_scales",
">",
"0",
":",
"for",
"i",
",",
"decay",
"in",
"enumerate",
"(",
"input_decays",
")",
":",
"if",
"self",
".",
"num_gradient_scales",
"==",
"1",
":",
"# We don't accumulate if no scales, just take the current gradient.",
"grad_accum",
"=",
"grad",
"else",
":",
"# The state vars are 1-indexed.",
"old_accum",
"=",
"state",
"[",
"\"grad_accum{}\"",
".",
"format",
"(",
"i",
"+",
"1",
")",
"]",
"grad_accum",
"=",
"grad",
"*",
"(",
"1.",
"-",
"decay",
")",
"+",
"old_accum",
"*",
"decay",
"grads_accum",
".",
"append",
"(",
"grad_accum",
")",
"sd",
"=",
"scale_decays",
"[",
"i",
"if",
"self",
".",
"use_multiple_scale_decays",
"else",
"0",
"]",
"grad_scaled",
",",
"ms",
"=",
"utils",
".",
"rms_scaling",
"(",
"grad_accum",
",",
"sd",
",",
"state",
"[",
"\"ms{}\"",
".",
"format",
"(",
"i",
"+",
"1",
")",
"]",
",",
"update_ms",
"=",
"True",
")",
"grads_scaled",
".",
"append",
"(",
"grad_scaled",
")",
"mean_squared_gradients",
".",
"append",
"(",
"ms",
")",
"return",
"grads_scaled",
",",
"mean_squared_gradients",
",",
"grads_accum"
] |
https://github.com/Cadene/tensorflow-model-zoo.torch/blob/990b10ffc22d4c8eacb2a502f20415b4f70c74c2/models/research/learned_optimizer/optimizer/hierarchical_rnn.py#L437-L482
|
|
python-visualization/folium
|
67aab11039cd990d73fdf14566380286835ff84b
|
folium/utilities.py
|
python
|
parse_options
|
(**kwargs)
|
return {camelize(key): value
for key, value in kwargs.items()
if value is not None}
|
Return a dict with lower-camelcase keys and non-None values..
|
Return a dict with lower-camelcase keys and non-None values..
|
[
"Return",
"a",
"dict",
"with",
"lower",
"-",
"camelcase",
"keys",
"and",
"non",
"-",
"None",
"values",
".."
] |
def parse_options(**kwargs):
"""Return a dict with lower-camelcase keys and non-None values.."""
return {camelize(key): value
for key, value in kwargs.items()
if value is not None}
|
[
"def",
"parse_options",
"(",
"*",
"*",
"kwargs",
")",
":",
"return",
"{",
"camelize",
"(",
"key",
")",
":",
"value",
"for",
"key",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
"if",
"value",
"is",
"not",
"None",
"}"
] |
https://github.com/python-visualization/folium/blob/67aab11039cd990d73fdf14566380286835ff84b/folium/utilities.py#L471-L475
|
|
GadgetReactor/pyHS100
|
e03c192c8ca5a22116fd7605151d8bb10d0255e1
|
pyHS100/smartstrip.py
|
python
|
SmartStrip.set_alias
|
(self, alias: str, *, index: int = -1)
|
Sets the alias for a plug
:param index: plug index
:param alias: new alias
:raises SmartDeviceException: on error
:raises SmartStripException: index out of bounds
|
Sets the alias for a plug
|
[
"Sets",
"the",
"alias",
"for",
"a",
"plug"
] |
def set_alias(self, alias: str, *, index: int = -1):
"""Sets the alias for a plug
:param index: plug index
:param alias: new alias
:raises SmartDeviceException: on error
:raises SmartStripException: index out of bounds
"""
# Renaming the whole strip
if index < 0:
return super().set_alias(alias)
self.raise_for_index(index)
self.plugs[index].set_alias(alias)
|
[
"def",
"set_alias",
"(",
"self",
",",
"alias",
":",
"str",
",",
"*",
",",
"index",
":",
"int",
"=",
"-",
"1",
")",
":",
"# Renaming the whole strip",
"if",
"index",
"<",
"0",
":",
"return",
"super",
"(",
")",
".",
"set_alias",
"(",
"alias",
")",
"self",
".",
"raise_for_index",
"(",
"index",
")",
"self",
".",
"plugs",
"[",
"index",
"]",
".",
"set_alias",
"(",
"alias",
")"
] |
https://github.com/GadgetReactor/pyHS100/blob/e03c192c8ca5a22116fd7605151d8bb10d0255e1/pyHS100/smartstrip.py#L315-L328
|
||
ZENGXH/DMM_Net
|
a6308688cbcf411db9072aa68efbe485dde02a9b
|
dmm/dataloader/collate.py
|
python
|
cocotrain_collate
|
(batch)
|
return [batch_imgs, batch_tar, transposed[2], transposed[3]]
|
r"""Puts each data field into a tensor with outer dimension batch size
batch: [ (imgs, targets, seq_name, starting_frame), # for batch 1
(....),
]
imgs: list of Tensor
imgs of different batch may has different length
|
r"""Puts each data field into a tensor with outer dimension batch size
batch: [ (imgs, targets, seq_name, starting_frame), # for batch 1
(....),
]
imgs: list of Tensor
imgs of different batch may has different length
|
[
"r",
"Puts",
"each",
"data",
"field",
"into",
"a",
"tensor",
"with",
"outer",
"dimension",
"batch",
"size",
"batch",
":",
"[",
"(",
"imgs",
"targets",
"seq_name",
"starting_frame",
")",
"#",
"for",
"batch",
"1",
"(",
"....",
")",
"]",
"imgs",
":",
"list",
"of",
"Tensor",
"imgs",
"of",
"different",
"batch",
"may",
"has",
"different",
"length"
] |
def cocotrain_collate(batch):
r"""Puts each data field into a tensor with outer dimension batch size
batch: [ (imgs, targets, seq_name, starting_frame), # for batch 1
(....),
]
imgs: list of Tensor
imgs of different batch may has different length
"""
#elem_type = type(batch[0])
transposed = list(zip(*batch))
imgs = transposed[0] # expect: list of list of Tensor
targets = transposed[1]
#for im in imgs:
# logging.info('len {}'.format(len(im)))
assert(type(imgs) == tuple), \
'type: {}, len {} BatchContent {}; shape {}'.format(type(imgs), len(imgs), len(transposed), len(imgs[0]))
imgs = list(imgs)
assert(type(imgs[0]) == list), type(imgs[0])
B = len(batch)
CHECKEQ(B, len(imgs))
#if not args.pad_video:
# return default_collate(batch)
# max_len = [len(vid) for vid in imgs]
# max_len = np.array(max_len).max()
# create empty frames
# input_shape = imgs[0][0].shape
# empty_frame = imgs[0][0].new_zeros(input_shape)
targets = list(targets)
targets = [[ torch.from_numpy(tar_cur_frame) for tar_cur_frame in target_cur_vid[0]] for target_cur_vid in targets]
# empty_target = targets[0][0].new_zeros(targets[0][0].shape)
for b in range(B):
#while len(imgs[b]) < max_len:
# imgs[b].append(empty_frame)
# targets[b].append(empty_target)
imgs[b] = torch.stack(imgs[b]) # Len, D, H, W
targets[b] = torch.stack(targets[b])
batch_imgs = torch.stack(imgs) # B, Len, D, H, W
batch_tar = torch.stack(targets)
CHECK5D(batch_imgs)
return [batch_imgs, batch_tar, transposed[2], transposed[3]]
|
[
"def",
"cocotrain_collate",
"(",
"batch",
")",
":",
"#elem_type = type(batch[0])",
"transposed",
"=",
"list",
"(",
"zip",
"(",
"*",
"batch",
")",
")",
"imgs",
"=",
"transposed",
"[",
"0",
"]",
"# expect: list of list of Tensor ",
"targets",
"=",
"transposed",
"[",
"1",
"]",
"#for im in imgs:",
"# logging.info('len {}'.format(len(im)))",
"assert",
"(",
"type",
"(",
"imgs",
")",
"==",
"tuple",
")",
",",
"'type: {}, len {} BatchContent {}; shape {}'",
".",
"format",
"(",
"type",
"(",
"imgs",
")",
",",
"len",
"(",
"imgs",
")",
",",
"len",
"(",
"transposed",
")",
",",
"len",
"(",
"imgs",
"[",
"0",
"]",
")",
")",
"imgs",
"=",
"list",
"(",
"imgs",
")",
"assert",
"(",
"type",
"(",
"imgs",
"[",
"0",
"]",
")",
"==",
"list",
")",
",",
"type",
"(",
"imgs",
"[",
"0",
"]",
")",
"B",
"=",
"len",
"(",
"batch",
")",
"CHECKEQ",
"(",
"B",
",",
"len",
"(",
"imgs",
")",
")",
"#if not args.pad_video:",
"# return default_collate(batch)",
"# max_len = [len(vid) for vid in imgs]",
"# max_len = np.array(max_len).max() ",
"# create empty frames ",
"# input_shape = imgs[0][0].shape ",
"# empty_frame = imgs[0][0].new_zeros(input_shape)",
"targets",
"=",
"list",
"(",
"targets",
")",
"targets",
"=",
"[",
"[",
"torch",
".",
"from_numpy",
"(",
"tar_cur_frame",
")",
"for",
"tar_cur_frame",
"in",
"target_cur_vid",
"[",
"0",
"]",
"]",
"for",
"target_cur_vid",
"in",
"targets",
"]",
"# empty_target = targets[0][0].new_zeros(targets[0][0].shape)",
"for",
"b",
"in",
"range",
"(",
"B",
")",
":",
"#while len(imgs[b]) < max_len:",
"# imgs[b].append(empty_frame)",
"# targets[b].append(empty_target)",
"imgs",
"[",
"b",
"]",
"=",
"torch",
".",
"stack",
"(",
"imgs",
"[",
"b",
"]",
")",
"# Len, D, H, W ",
"targets",
"[",
"b",
"]",
"=",
"torch",
".",
"stack",
"(",
"targets",
"[",
"b",
"]",
")",
"batch_imgs",
"=",
"torch",
".",
"stack",
"(",
"imgs",
")",
"# B, Len, D, H, W",
"batch_tar",
"=",
"torch",
".",
"stack",
"(",
"targets",
")",
"CHECK5D",
"(",
"batch_imgs",
")",
"return",
"[",
"batch_imgs",
",",
"batch_tar",
",",
"transposed",
"[",
"2",
"]",
",",
"transposed",
"[",
"3",
"]",
"]"
] |
https://github.com/ZENGXH/DMM_Net/blob/a6308688cbcf411db9072aa68efbe485dde02a9b/dmm/dataloader/collate.py#L80-L124
|
|
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/light/__init__.py
|
python
|
LightEntity.color_mode
|
(self)
|
return self._attr_color_mode
|
Return the color mode of the light.
|
Return the color mode of the light.
|
[
"Return",
"the",
"color",
"mode",
"of",
"the",
"light",
"."
] |
def color_mode(self) -> str | None:
"""Return the color mode of the light."""
return self._attr_color_mode
|
[
"def",
"color_mode",
"(",
"self",
")",
"->",
"str",
"|",
"None",
":",
"return",
"self",
".",
"_attr_color_mode"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/light/__init__.py#L715-L717
|
|
CedricGuillemet/Imogen
|
ee417b42747ed5b46cb11b02ef0c3630000085b3
|
bin/Lib/ipaddress.py
|
python
|
_collapse_addresses_internal
|
(addresses)
|
Loops through the addresses, collapsing concurrent netblocks.
Example:
ip1 = IPv4Network('192.0.2.0/26')
ip2 = IPv4Network('192.0.2.64/26')
ip3 = IPv4Network('192.0.2.128/26')
ip4 = IPv4Network('192.0.2.192/26')
_collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
[IPv4Network('192.0.2.0/24')]
This shouldn't be called directly; it is called via
collapse_addresses([]).
Args:
addresses: A list of IPv4Network's or IPv6Network's
Returns:
A list of IPv4Network's or IPv6Network's depending on what we were
passed.
|
Loops through the addresses, collapsing concurrent netblocks.
|
[
"Loops",
"through",
"the",
"addresses",
"collapsing",
"concurrent",
"netblocks",
"."
] |
def _collapse_addresses_internal(addresses):
"""Loops through the addresses, collapsing concurrent netblocks.
Example:
ip1 = IPv4Network('192.0.2.0/26')
ip2 = IPv4Network('192.0.2.64/26')
ip3 = IPv4Network('192.0.2.128/26')
ip4 = IPv4Network('192.0.2.192/26')
_collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
[IPv4Network('192.0.2.0/24')]
This shouldn't be called directly; it is called via
collapse_addresses([]).
Args:
addresses: A list of IPv4Network's or IPv6Network's
Returns:
A list of IPv4Network's or IPv6Network's depending on what we were
passed.
"""
# First merge
to_merge = list(addresses)
subnets = {}
while to_merge:
net = to_merge.pop()
supernet = net.supernet()
existing = subnets.get(supernet)
if existing is None:
subnets[supernet] = net
elif existing != net:
# Merge consecutive subnets
del subnets[supernet]
to_merge.append(supernet)
# Then iterate over resulting networks, skipping subsumed subnets
last = None
for net in sorted(subnets.values()):
if last is not None:
# Since they are sorted, last.network_address <= net.network_address
# is a given.
if last.broadcast_address >= net.broadcast_address:
continue
yield net
last = net
|
[
"def",
"_collapse_addresses_internal",
"(",
"addresses",
")",
":",
"# First merge",
"to_merge",
"=",
"list",
"(",
"addresses",
")",
"subnets",
"=",
"{",
"}",
"while",
"to_merge",
":",
"net",
"=",
"to_merge",
".",
"pop",
"(",
")",
"supernet",
"=",
"net",
".",
"supernet",
"(",
")",
"existing",
"=",
"subnets",
".",
"get",
"(",
"supernet",
")",
"if",
"existing",
"is",
"None",
":",
"subnets",
"[",
"supernet",
"]",
"=",
"net",
"elif",
"existing",
"!=",
"net",
":",
"# Merge consecutive subnets",
"del",
"subnets",
"[",
"supernet",
"]",
"to_merge",
".",
"append",
"(",
"supernet",
")",
"# Then iterate over resulting networks, skipping subsumed subnets",
"last",
"=",
"None",
"for",
"net",
"in",
"sorted",
"(",
"subnets",
".",
"values",
"(",
")",
")",
":",
"if",
"last",
"is",
"not",
"None",
":",
"# Since they are sorted, last.network_address <= net.network_address",
"# is a given.",
"if",
"last",
".",
"broadcast_address",
">=",
"net",
".",
"broadcast_address",
":",
"continue",
"yield",
"net",
"last",
"=",
"net"
] |
https://github.com/CedricGuillemet/Imogen/blob/ee417b42747ed5b46cb11b02ef0c3630000085b3/bin/Lib/ipaddress.py#L257-L303
|
||
scipy/scipy
|
e0a749f01e79046642ccfdc419edbf9e7ca141ad
|
scipy/ndimage/_filters.py
|
python
|
uniform_filter
|
(input, size=3, output=None, mode="reflect",
cval=0.0, origin=0)
|
return output
|
Multidimensional uniform filter.
Parameters
----------
%(input)s
size : int or sequence of ints, optional
The sizes of the uniform filter are given for each axis as a
sequence, or as a single number, in which case the size is
equal for all axes.
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
uniform_filter : ndarray
Filtered array. Has the same shape as `input`.
Notes
-----
The multidimensional filter is implemented as a sequence of
1-D uniform filters. The intermediate arrays are stored
in the same data type as the output. Therefore, for output types
with a limited precision, the results may be imprecise because
intermediate results may be stored with insufficient precision.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.uniform_filter(ascent, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
|
Multidimensional uniform filter.
|
[
"Multidimensional",
"uniform",
"filter",
"."
] |
def uniform_filter(input, size=3, output=None, mode="reflect",
cval=0.0, origin=0):
"""Multidimensional uniform filter.
Parameters
----------
%(input)s
size : int or sequence of ints, optional
The sizes of the uniform filter are given for each axis as a
sequence, or as a single number, in which case the size is
equal for all axes.
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
uniform_filter : ndarray
Filtered array. Has the same shape as `input`.
Notes
-----
The multidimensional filter is implemented as a sequence of
1-D uniform filters. The intermediate arrays are stored
in the same data type as the output. Therefore, for output types
with a limited precision, the results may be imprecise because
intermediate results may be stored with insufficient precision.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.uniform_filter(ascent, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
input = numpy.asarray(input)
output = _ni_support._get_output(output, input,
complex_output=input.dtype.kind == 'c')
sizes = _ni_support._normalize_sequence(size, input.ndim)
origins = _ni_support._normalize_sequence(origin, input.ndim)
modes = _ni_support._normalize_sequence(mode, input.ndim)
axes = list(range(input.ndim))
axes = [(axes[ii], sizes[ii], origins[ii], modes[ii])
for ii in range(len(axes)) if sizes[ii] > 1]
if len(axes) > 0:
for axis, size, origin, mode in axes:
uniform_filter1d(input, int(size), axis, output, mode,
cval, origin)
input = output
else:
output[...] = input[...]
return output
|
[
"def",
"uniform_filter",
"(",
"input",
",",
"size",
"=",
"3",
",",
"output",
"=",
"None",
",",
"mode",
"=",
"\"reflect\"",
",",
"cval",
"=",
"0.0",
",",
"origin",
"=",
"0",
")",
":",
"input",
"=",
"numpy",
".",
"asarray",
"(",
"input",
")",
"output",
"=",
"_ni_support",
".",
"_get_output",
"(",
"output",
",",
"input",
",",
"complex_output",
"=",
"input",
".",
"dtype",
".",
"kind",
"==",
"'c'",
")",
"sizes",
"=",
"_ni_support",
".",
"_normalize_sequence",
"(",
"size",
",",
"input",
".",
"ndim",
")",
"origins",
"=",
"_ni_support",
".",
"_normalize_sequence",
"(",
"origin",
",",
"input",
".",
"ndim",
")",
"modes",
"=",
"_ni_support",
".",
"_normalize_sequence",
"(",
"mode",
",",
"input",
".",
"ndim",
")",
"axes",
"=",
"list",
"(",
"range",
"(",
"input",
".",
"ndim",
")",
")",
"axes",
"=",
"[",
"(",
"axes",
"[",
"ii",
"]",
",",
"sizes",
"[",
"ii",
"]",
",",
"origins",
"[",
"ii",
"]",
",",
"modes",
"[",
"ii",
"]",
")",
"for",
"ii",
"in",
"range",
"(",
"len",
"(",
"axes",
")",
")",
"if",
"sizes",
"[",
"ii",
"]",
">",
"1",
"]",
"if",
"len",
"(",
"axes",
")",
">",
"0",
":",
"for",
"axis",
",",
"size",
",",
"origin",
",",
"mode",
"in",
"axes",
":",
"uniform_filter1d",
"(",
"input",
",",
"int",
"(",
"size",
")",
",",
"axis",
",",
"output",
",",
"mode",
",",
"cval",
",",
"origin",
")",
"input",
"=",
"output",
"else",
":",
"output",
"[",
"...",
"]",
"=",
"input",
"[",
"...",
"]",
"return",
"output"
] |
https://github.com/scipy/scipy/blob/e0a749f01e79046642ccfdc419edbf9e7ca141ad/scipy/ndimage/_filters.py#L913-L972
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.