nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
whoosh-community/whoosh
|
5421f1ab3bb802114105b3181b7ce4f44ad7d0bb
|
src/whoosh/matching/mcore.py
|
python
|
Matcher.id
|
(self)
|
Returns the ID of the current posting.
|
Returns the ID of the current posting.
|
[
"Returns",
"the",
"ID",
"of",
"the",
"current",
"posting",
"."
] |
def id(self):
"""Returns the ID of the current posting.
"""
raise NotImplementedError
|
[
"def",
"id",
"(",
"self",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/whoosh-community/whoosh/blob/5421f1ab3bb802114105b3181b7ce4f44ad7d0bb/src/whoosh/matching/mcore.py#L195-L199
|
||
RaphielGang/Telegram-Paperplane
|
d9e6c466902dd573ddf8c805e9dc484f972a62f1
|
userbot/modules/chat.py
|
python
|
kickme
|
(leave)
|
Basically it's .kickme command
|
Basically it's .kickme command
|
[
"Basically",
"it",
"s",
".",
"kickme",
"command"
] |
async def kickme(leave):
"""Basically it's .kickme command"""
await leave.edit("`Nope, no, no, I go away`")
await bot(LeaveChannelRequest(leave.chat_id))
|
[
"async",
"def",
"kickme",
"(",
"leave",
")",
":",
"await",
"leave",
".",
"edit",
"(",
"\"`Nope, no, no, I go away`\"",
")",
"await",
"bot",
"(",
"LeaveChannelRequest",
"(",
"leave",
".",
"chat_id",
")",
")"
] |
https://github.com/RaphielGang/Telegram-Paperplane/blob/d9e6c466902dd573ddf8c805e9dc484f972a62f1/userbot/modules/chat.py#L70-L73
|
||
mrlesmithjr/Ansible
|
d44f0dc0d942bdf3bf7334b307e6048f0ee16e36
|
roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/pip/baseparser.py
|
python
|
ConfigOptionParser.get_environ_vars
|
(self)
|
Returns a generator with all environmental vars with prefix PIP_
|
Returns a generator with all environmental vars with prefix PIP_
|
[
"Returns",
"a",
"generator",
"with",
"all",
"environmental",
"vars",
"with",
"prefix",
"PIP_"
] |
def get_environ_vars(self):
"""Returns a generator with all environmental vars with prefix PIP_"""
for key, val in os.environ.items():
if _environ_prefix_re.search(key):
yield (_environ_prefix_re.sub("", key).lower(), val)
|
[
"def",
"get_environ_vars",
"(",
"self",
")",
":",
"for",
"key",
",",
"val",
"in",
"os",
".",
"environ",
".",
"items",
"(",
")",
":",
"if",
"_environ_prefix_re",
".",
"search",
"(",
"key",
")",
":",
"yield",
"(",
"_environ_prefix_re",
".",
"sub",
"(",
"\"\"",
",",
"key",
")",
".",
"lower",
"(",
")",
",",
"val",
")"
] |
https://github.com/mrlesmithjr/Ansible/blob/d44f0dc0d942bdf3bf7334b307e6048f0ee16e36/roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/pip/baseparser.py#L270-L274
|
||
kovidgoyal/calibre
|
2b41671370f2a9eb1109b9ae901ccf915f1bd0c8
|
src/calibre/db/cache.py
|
python
|
Cache.field_ids_for
|
(self, name, book_id)
|
Return the ids (as a tuple) for the values that the field ``name`` has on the book
identified by ``book_id``. If there are no values, or no such book, or
no such field, an empty tuple is returned.
|
Return the ids (as a tuple) for the values that the field ``name`` has on the book
identified by ``book_id``. If there are no values, or no such book, or
no such field, an empty tuple is returned.
|
[
"Return",
"the",
"ids",
"(",
"as",
"a",
"tuple",
")",
"for",
"the",
"values",
"that",
"the",
"field",
"name",
"has",
"on",
"the",
"book",
"identified",
"by",
"book_id",
".",
"If",
"there",
"are",
"no",
"values",
"or",
"no",
"such",
"book",
"or",
"no",
"such",
"field",
"an",
"empty",
"tuple",
"is",
"returned",
"."
] |
def field_ids_for(self, name, book_id):
'''
Return the ids (as a tuple) for the values that the field ``name`` has on the book
identified by ``book_id``. If there are no values, or no such book, or
no such field, an empty tuple is returned.
'''
try:
return self.fields[name].ids_for_book(book_id)
except (KeyError, IndexError):
return ()
|
[
"def",
"field_ids_for",
"(",
"self",
",",
"name",
",",
"book_id",
")",
":",
"try",
":",
"return",
"self",
".",
"fields",
"[",
"name",
"]",
".",
"ids_for_book",
"(",
"book_id",
")",
"except",
"(",
"KeyError",
",",
"IndexError",
")",
":",
"return",
"(",
")"
] |
https://github.com/kovidgoyal/calibre/blob/2b41671370f2a9eb1109b9ae901ccf915f1bd0c8/src/calibre/db/cache.py#L503-L512
|
||
TheAlgorithms/Python
|
9af2eef9b3761bf51580dedfb6fa7136ca0c5c2c
|
project_euler/problem_035/sol1.py
|
python
|
is_prime
|
(n: int)
|
return seive[n]
|
For 2 <= n <= 1000000, return True if n is prime.
>>> is_prime(87)
False
>>> is_prime(23)
True
>>> is_prime(25363)
False
|
For 2 <= n <= 1000000, return True if n is prime.
>>> is_prime(87)
False
>>> is_prime(23)
True
>>> is_prime(25363)
False
|
[
"For",
"2",
"<",
"=",
"n",
"<",
"=",
"1000000",
"return",
"True",
"if",
"n",
"is",
"prime",
".",
">>>",
"is_prime",
"(",
"87",
")",
"False",
">>>",
"is_prime",
"(",
"23",
")",
"True",
">>>",
"is_prime",
"(",
"25363",
")",
"False"
] |
def is_prime(n: int) -> bool:
"""
For 2 <= n <= 1000000, return True if n is prime.
>>> is_prime(87)
False
>>> is_prime(23)
True
>>> is_prime(25363)
False
"""
return seive[n]
|
[
"def",
"is_prime",
"(",
"n",
":",
"int",
")",
"->",
"bool",
":",
"return",
"seive",
"[",
"n",
"]"
] |
https://github.com/TheAlgorithms/Python/blob/9af2eef9b3761bf51580dedfb6fa7136ca0c5c2c/project_euler/problem_035/sol1.py#L29-L39
|
|
eirannejad/pyRevit
|
49c0b7eb54eb343458ce1365425e6552d0c47d44
|
site-packages/websocket/_core.py
|
python
|
WebSocket.recv
|
(self)
|
Receive string data(byte array) from the server.
return value: string(byte array) value.
|
Receive string data(byte array) from the server.
|
[
"Receive",
"string",
"data",
"(",
"byte",
"array",
")",
"from",
"the",
"server",
"."
] |
def recv(self):
"""
Receive string data(byte array) from the server.
return value: string(byte array) value.
"""
with self.readlock:
opcode, data = self.recv_data()
if six.PY3 and opcode == ABNF.OPCODE_TEXT:
return data.decode("utf-8")
elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY:
return data
else:
return ''
|
[
"def",
"recv",
"(",
"self",
")",
":",
"with",
"self",
".",
"readlock",
":",
"opcode",
",",
"data",
"=",
"self",
".",
"recv_data",
"(",
")",
"if",
"six",
".",
"PY3",
"and",
"opcode",
"==",
"ABNF",
".",
"OPCODE_TEXT",
":",
"return",
"data",
".",
"decode",
"(",
"\"utf-8\"",
")",
"elif",
"opcode",
"==",
"ABNF",
".",
"OPCODE_TEXT",
"or",
"opcode",
"==",
"ABNF",
".",
"OPCODE_BINARY",
":",
"return",
"data",
"else",
":",
"return",
"''"
] |
https://github.com/eirannejad/pyRevit/blob/49c0b7eb54eb343458ce1365425e6552d0c47d44/site-packages/websocket/_core.py#L289-L302
|
||
zhl2008/awd-platform
|
0416b31abea29743387b10b3914581fbe8e7da5e
|
web_flaskbb/lib/python2.7/site-packages/sqlalchemy/orm/attributes.py
|
python
|
ScalarAttributeImpl.fire_replace_event
|
(self, state, dict_, value, previous, initiator)
|
return value
|
[] |
def fire_replace_event(self, state, dict_, value, previous, initiator):
for fn in self.dispatch.set:
value = fn(
state, value, previous, initiator or self._replace_token)
return value
|
[
"def",
"fire_replace_event",
"(",
"self",
",",
"state",
",",
"dict_",
",",
"value",
",",
"previous",
",",
"initiator",
")",
":",
"for",
"fn",
"in",
"self",
".",
"dispatch",
".",
"set",
":",
"value",
"=",
"fn",
"(",
"state",
",",
"value",
",",
"previous",
",",
"initiator",
"or",
"self",
".",
"_replace_token",
")",
"return",
"value"
] |
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/sqlalchemy/orm/attributes.py#L712-L716
|
|||
scrapinghub/portia
|
606467d278eab2236afcb3d260cb03bf6fb906a0
|
slybot/slybot/plugins/scrapely_annotations/processors.py
|
python
|
ItemField.name
|
(self)
|
return getattr(self.extractor, u'name', self._field)
|
Field unique name.
|
Field unique name.
|
[
"Field",
"unique",
"name",
"."
] |
def name(self):
"""Field unique name."""
return getattr(self.extractor, u'name', self._field)
|
[
"def",
"name",
"(",
"self",
")",
":",
"return",
"getattr",
"(",
"self",
".",
"extractor",
",",
"u'name'",
",",
"self",
".",
"_field",
")"
] |
https://github.com/scrapinghub/portia/blob/606467d278eab2236afcb3d260cb03bf6fb906a0/slybot/slybot/plugins/scrapely_annotations/processors.py#L430-L432
|
|
CvvT/dumpDex
|
92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1
|
python/idc.py
|
python
|
NameEx
|
(fromaddr, ea)
|
Get visible name of program byte
This function returns name of byte as it is displayed on the screen.
If a name contains illegal characters, IDA replaces them by the
substitution character during displaying. See IDA.CFG for the
definition of the substitution character.
@param fromaddr: the referring address. May be BADADDR.
Allows to retrieve local label addresses in functions.
If a local name is not found, then a global name is
returned.
@param ea: linear address
@return: "" - byte has no name
|
Get visible name of program byte
|
[
"Get",
"visible",
"name",
"of",
"program",
"byte"
] |
def NameEx(fromaddr, ea):
"""
Get visible name of program byte
This function returns name of byte as it is displayed on the screen.
If a name contains illegal characters, IDA replaces them by the
substitution character during displaying. See IDA.CFG for the
definition of the substitution character.
@param fromaddr: the referring address. May be BADADDR.
Allows to retrieve local label addresses in functions.
If a local name is not found, then a global name is
returned.
@param ea: linear address
@return: "" - byte has no name
"""
name = idaapi.get_name(fromaddr, ea)
if not name:
return ""
else:
return name
|
[
"def",
"NameEx",
"(",
"fromaddr",
",",
"ea",
")",
":",
"name",
"=",
"idaapi",
".",
"get_name",
"(",
"fromaddr",
",",
"ea",
")",
"if",
"not",
"name",
":",
"return",
"\"\"",
"else",
":",
"return",
"name"
] |
https://github.com/CvvT/dumpDex/blob/92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1/python/idc.py#L2124-L2146
|
||
projectatomic/atomic
|
d5f3f19c4f18b24d5ccf47a10d39dbc99af4697a
|
Atomic/trust.py
|
python
|
Trust.get_gpg_id
|
(self, keys)
|
return keylist
|
Return GPG identity, either bracketed <email> or ID string
comma separated if more than one key
see gpg2 parsing documentation:
http://git.gnupg.org/cgi-bin/gitweb.cgi?p=gnupg.git;a=blob_plain;f=doc/DETAILS
:param keys: list of gpg key file paths (keyPath) and/or inline key payload (keyData)
:return: comma-separated string of key ids or empty string
|
Return GPG identity, either bracketed <email> or ID string
comma separated if more than one key
see gpg2 parsing documentation:
http://git.gnupg.org/cgi-bin/gitweb.cgi?p=gnupg.git;a=blob_plain;f=doc/DETAILS
:param keys: list of gpg key file paths (keyPath) and/or inline key payload (keyData)
:return: comma-separated string of key ids or empty string
|
[
"Return",
"GPG",
"identity",
"either",
"bracketed",
"<email",
">",
"or",
"ID",
"string",
"comma",
"separated",
"if",
"more",
"than",
"one",
"key",
"see",
"gpg2",
"parsing",
"documentation",
":",
"http",
":",
"//",
"git",
".",
"gnupg",
".",
"org",
"/",
"cgi",
"-",
"bin",
"/",
"gitweb",
".",
"cgi?p",
"=",
"gnupg",
".",
"git",
";",
"a",
"=",
"blob_plain",
";",
"f",
"=",
"doc",
"/",
"DETAILS",
":",
"param",
"keys",
":",
"list",
"of",
"gpg",
"key",
"file",
"paths",
"(",
"keyPath",
")",
"and",
"/",
"or",
"inline",
"key",
"payload",
"(",
"keyData",
")",
":",
"return",
":",
"comma",
"-",
"separated",
"string",
"of",
"key",
"ids",
"or",
"empty",
"string"
] |
def get_gpg_id(self, keys):
"""
Return GPG identity, either bracketed <email> or ID string
comma separated if more than one key
see gpg2 parsing documentation:
http://git.gnupg.org/cgi-bin/gitweb.cgi?p=gnupg.git;a=blob_plain;f=doc/DETAILS
:param keys: list of gpg key file paths (keyPath) and/or inline key payload (keyData)
:return: comma-separated string of key ids or empty string
"""
if not keys:
return ""
(keylist, tmpkey) = None, None
for key in keys:
if not os.path.exists(key):
with tempfile.NamedTemporaryFile(delete=False, dir="/run/") as tmpkey:
tmpkey.write(b64decode(key))
key = tmpkey.name
cmd = ["gpg2", "--with-colons", key]
try:
stderr = None if self.args.debug else util.DEVNULL
results = util.check_output(cmd, stderr=stderr).decode('utf-8')
except util.FileNotFound:
results = ""
if tmpkey:
if os.path.exists(tmpkey.name):
os.remove(tmpkey.name)
lines = results.split('\n')
for line in lines:
if line.startswith("uid:") or line.startswith("pub:"):
uid = line.split(':')[9]
if not uid: # Newer gpg2 versions output dedicated 'uid'
continue # lines and leave it blank on 'pub' lines.
# bracketed email
parsed_uid = uid.partition('<')[-1].rpartition('>')[0]
if not parsed_uid:
parsed_uid = uid
if keylist:
keylist = ",".join([keylist, parsed_uid])
else:
keylist = parsed_uid
return keylist
|
[
"def",
"get_gpg_id",
"(",
"self",
",",
"keys",
")",
":",
"if",
"not",
"keys",
":",
"return",
"\"\"",
"(",
"keylist",
",",
"tmpkey",
")",
"=",
"None",
",",
"None",
"for",
"key",
"in",
"keys",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"key",
")",
":",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"delete",
"=",
"False",
",",
"dir",
"=",
"\"/run/\"",
")",
"as",
"tmpkey",
":",
"tmpkey",
".",
"write",
"(",
"b64decode",
"(",
"key",
")",
")",
"key",
"=",
"tmpkey",
".",
"name",
"cmd",
"=",
"[",
"\"gpg2\"",
",",
"\"--with-colons\"",
",",
"key",
"]",
"try",
":",
"stderr",
"=",
"None",
"if",
"self",
".",
"args",
".",
"debug",
"else",
"util",
".",
"DEVNULL",
"results",
"=",
"util",
".",
"check_output",
"(",
"cmd",
",",
"stderr",
"=",
"stderr",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"except",
"util",
".",
"FileNotFound",
":",
"results",
"=",
"\"\"",
"if",
"tmpkey",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"tmpkey",
".",
"name",
")",
":",
"os",
".",
"remove",
"(",
"tmpkey",
".",
"name",
")",
"lines",
"=",
"results",
".",
"split",
"(",
"'\\n'",
")",
"for",
"line",
"in",
"lines",
":",
"if",
"line",
".",
"startswith",
"(",
"\"uid:\"",
")",
"or",
"line",
".",
"startswith",
"(",
"\"pub:\"",
")",
":",
"uid",
"=",
"line",
".",
"split",
"(",
"':'",
")",
"[",
"9",
"]",
"if",
"not",
"uid",
":",
"# Newer gpg2 versions output dedicated 'uid'",
"continue",
"# lines and leave it blank on 'pub' lines.",
"# bracketed email",
"parsed_uid",
"=",
"uid",
".",
"partition",
"(",
"'<'",
")",
"[",
"-",
"1",
"]",
".",
"rpartition",
"(",
"'>'",
")",
"[",
"0",
"]",
"if",
"not",
"parsed_uid",
":",
"parsed_uid",
"=",
"uid",
"if",
"keylist",
":",
"keylist",
"=",
"\",\"",
".",
"join",
"(",
"[",
"keylist",
",",
"parsed_uid",
"]",
")",
"else",
":",
"keylist",
"=",
"parsed_uid",
"return",
"keylist"
] |
https://github.com/projectatomic/atomic/blob/d5f3f19c4f18b24d5ccf47a10d39dbc99af4697a/Atomic/trust.py#L487-L527
|
|
TencentCloud/tencentcloud-sdk-python
|
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
|
tencentcloud/bmvpc/v20180625/bmvpc_client.py
|
python
|
BmvpcClient.ModifyVpnConnectionAttribute
|
(self, request)
|
本接口(ModifyVpnConnectionAttribute)用于修改VPN通道。
:param request: Request instance for ModifyVpnConnectionAttribute.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpnConnectionAttributeRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpnConnectionAttributeResponse`
|
本接口(ModifyVpnConnectionAttribute)用于修改VPN通道。
|
[
"本接口(ModifyVpnConnectionAttribute)用于修改VPN通道。"
] |
def ModifyVpnConnectionAttribute(self, request):
"""本接口(ModifyVpnConnectionAttribute)用于修改VPN通道。
:param request: Request instance for ModifyVpnConnectionAttribute.
:type request: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpnConnectionAttributeRequest`
:rtype: :class:`tencentcloud.bmvpc.v20180625.models.ModifyVpnConnectionAttributeResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyVpnConnectionAttribute", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyVpnConnectionAttributeResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
|
[
"def",
"ModifyVpnConnectionAttribute",
"(",
"self",
",",
"request",
")",
":",
"try",
":",
"params",
"=",
"request",
".",
"_serialize",
"(",
")",
"body",
"=",
"self",
".",
"call",
"(",
"\"ModifyVpnConnectionAttribute\"",
",",
"params",
")",
"response",
"=",
"json",
".",
"loads",
"(",
"body",
")",
"if",
"\"Error\"",
"not",
"in",
"response",
"[",
"\"Response\"",
"]",
":",
"model",
"=",
"models",
".",
"ModifyVpnConnectionAttributeResponse",
"(",
")",
"model",
".",
"_deserialize",
"(",
"response",
"[",
"\"Response\"",
"]",
")",
"return",
"model",
"else",
":",
"code",
"=",
"response",
"[",
"\"Response\"",
"]",
"[",
"\"Error\"",
"]",
"[",
"\"Code\"",
"]",
"message",
"=",
"response",
"[",
"\"Response\"",
"]",
"[",
"\"Error\"",
"]",
"[",
"\"Message\"",
"]",
"reqid",
"=",
"response",
"[",
"\"Response\"",
"]",
"[",
"\"RequestId\"",
"]",
"raise",
"TencentCloudSDKException",
"(",
"code",
",",
"message",
",",
"reqid",
")",
"except",
"Exception",
"as",
"e",
":",
"if",
"isinstance",
"(",
"e",
",",
"TencentCloudSDKException",
")",
":",
"raise",
"else",
":",
"raise",
"TencentCloudSDKException",
"(",
"e",
".",
"message",
",",
"e",
".",
"message",
")"
] |
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/bmvpc/v20180625/bmvpc_client.py#L1524-L1549
|
||
prody/ProDy
|
b24bbf58aa8fffe463c8548ae50e3955910e5b7f
|
prody/__init__.py
|
python
|
confProDy
|
(*args, **kwargs)
|
Configure ProDy.
|
Configure ProDy.
|
[
"Configure",
"ProDy",
"."
] |
def confProDy(*args, **kwargs):
"""Configure ProDy."""
if args:
values = []
for option in args:
try:
values.append(SETTINGS[option])
except KeyError:
raise KeyError('{0:s} is not a valid configuration option'
.format(repr(option)))
if len(values) == 1:
return values[0]
else:
return values
for option, value in kwargs.items():
try:
default, acceptable, setter = CONFIGURATION[option]
except KeyError:
raise KeyError('{0:s} is not a valid configuration option'
.format(repr(option)))
else:
try:
value = type(default)(value)
except ValueError:
raise TypeError('{0:s} must be a {1:s}'
.format(option, type(default).__name__))
if acceptable is not None and value not in acceptable:
raise ValueError('{0:s} must be one of {1:s}'.format(option,
joinRepr(acceptable, sort=True,
last=', or ')))
SETTINGS[option] = value
LOGGER.info('ProDy is configured: {0:s}={1:s}'
.format(option, repr(value)))
SETTINGS.save()
if setter is not None:
setter(value)
|
[
"def",
"confProDy",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"args",
":",
"values",
"=",
"[",
"]",
"for",
"option",
"in",
"args",
":",
"try",
":",
"values",
".",
"append",
"(",
"SETTINGS",
"[",
"option",
"]",
")",
"except",
"KeyError",
":",
"raise",
"KeyError",
"(",
"'{0:s} is not a valid configuration option'",
".",
"format",
"(",
"repr",
"(",
"option",
")",
")",
")",
"if",
"len",
"(",
"values",
")",
"==",
"1",
":",
"return",
"values",
"[",
"0",
"]",
"else",
":",
"return",
"values",
"for",
"option",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"try",
":",
"default",
",",
"acceptable",
",",
"setter",
"=",
"CONFIGURATION",
"[",
"option",
"]",
"except",
"KeyError",
":",
"raise",
"KeyError",
"(",
"'{0:s} is not a valid configuration option'",
".",
"format",
"(",
"repr",
"(",
"option",
")",
")",
")",
"else",
":",
"try",
":",
"value",
"=",
"type",
"(",
"default",
")",
"(",
"value",
")",
"except",
"ValueError",
":",
"raise",
"TypeError",
"(",
"'{0:s} must be a {1:s}'",
".",
"format",
"(",
"option",
",",
"type",
"(",
"default",
")",
".",
"__name__",
")",
")",
"if",
"acceptable",
"is",
"not",
"None",
"and",
"value",
"not",
"in",
"acceptable",
":",
"raise",
"ValueError",
"(",
"'{0:s} must be one of {1:s}'",
".",
"format",
"(",
"option",
",",
"joinRepr",
"(",
"acceptable",
",",
"sort",
"=",
"True",
",",
"last",
"=",
"', or '",
")",
")",
")",
"SETTINGS",
"[",
"option",
"]",
"=",
"value",
"LOGGER",
".",
"info",
"(",
"'ProDy is configured: {0:s}={1:s}'",
".",
"format",
"(",
"option",
",",
"repr",
"(",
"value",
")",
")",
")",
"SETTINGS",
".",
"save",
"(",
")",
"if",
"setter",
"is",
"not",
"None",
":",
"setter",
"(",
"value",
")"
] |
https://github.com/prody/ProDy/blob/b24bbf58aa8fffe463c8548ae50e3955910e5b7f/prody/__init__.py#L164-L202
|
||
mgear-dev/mgear
|
06ddc26c5adb5eab07ca470c7fafa77404c8a1de
|
scripts/mgear/maya/pickWalk.py
|
python
|
walkRight
|
(node, add=False, multi=False)
|
Walk right
Arguments:
node (dagNode or list of dagNode): the starting object for the
pickwalk
add (bool, optional): If True add to selection
multi (bool, optional): If true, selects all the siblings
|
Walk right
|
[
"Walk",
"right"
] |
def walkRight(node, add=False, multi=False):
"""Walk right
Arguments:
node (dagNode or list of dagNode): the starting object for the
pickwalk
add (bool, optional): If True add to selection
multi (bool, optional): If true, selects all the siblings
"""
_walk(node, "right", add, multi)
|
[
"def",
"walkRight",
"(",
"node",
",",
"add",
"=",
"False",
",",
"multi",
"=",
"False",
")",
":",
"_walk",
"(",
"node",
",",
"\"right\"",
",",
"add",
",",
"multi",
")"
] |
https://github.com/mgear-dev/mgear/blob/06ddc26c5adb5eab07ca470c7fafa77404c8a1de/scripts/mgear/maya/pickWalk.py#L464-L474
|
||
AutodeskRoboticsLab/Mimic
|
85447f0d346be66988303a6a054473d92f1ed6f4
|
mimic/scripts/extern/pyqtgraph_0_11_0/pyqtgraph/dockarea/DockArea.py
|
python
|
DockArea.saveState
|
(self)
|
return state
|
Return a serialized (storable) representation of the state of
all Docks in this DockArea.
|
Return a serialized (storable) representation of the state of
all Docks in this DockArea.
|
[
"Return",
"a",
"serialized",
"(",
"storable",
")",
"representation",
"of",
"the",
"state",
"of",
"all",
"Docks",
"in",
"this",
"DockArea",
"."
] |
def saveState(self):
"""
Return a serialized (storable) representation of the state of
all Docks in this DockArea."""
if self.topContainer is None:
main = None
else:
main = self.childState(self.topContainer)
state = {'main': main, 'float': []}
for a in self.tempAreas:
geo = a.win.geometry()
geo = (geo.x(), geo.y(), geo.width(), geo.height())
state['float'].append((a.saveState(), geo))
return state
|
[
"def",
"saveState",
"(",
"self",
")",
":",
"if",
"self",
".",
"topContainer",
"is",
"None",
":",
"main",
"=",
"None",
"else",
":",
"main",
"=",
"self",
".",
"childState",
"(",
"self",
".",
"topContainer",
")",
"state",
"=",
"{",
"'main'",
":",
"main",
",",
"'float'",
":",
"[",
"]",
"}",
"for",
"a",
"in",
"self",
".",
"tempAreas",
":",
"geo",
"=",
"a",
".",
"win",
".",
"geometry",
"(",
")",
"geo",
"=",
"(",
"geo",
".",
"x",
"(",
")",
",",
"geo",
".",
"y",
"(",
")",
",",
"geo",
".",
"width",
"(",
")",
",",
"geo",
".",
"height",
"(",
")",
")",
"state",
"[",
"'float'",
"]",
".",
"append",
"(",
"(",
"a",
".",
"saveState",
"(",
")",
",",
"geo",
")",
")",
"return",
"state"
] |
https://github.com/AutodeskRoboticsLab/Mimic/blob/85447f0d346be66988303a6a054473d92f1ed6f4/mimic/scripts/extern/pyqtgraph_0_11_0/pyqtgraph/dockarea/DockArea.py#L189-L204
|
|
leo-editor/leo-editor
|
383d6776d135ef17d73d935a2f0ecb3ac0e99494
|
leo/commands/commanderOutlineCommands.py
|
python
|
unmarkAll
|
(self, event=None)
|
Unmark all nodes in the entire outline.
|
Unmark all nodes in the entire outline.
|
[
"Unmark",
"all",
"nodes",
"in",
"the",
"entire",
"outline",
"."
] |
def unmarkAll(self, event=None):
"""Unmark all nodes in the entire outline."""
c, current, u = self, self.p, self.undoer
undoType = 'Unmark All'
if not current:
return
c.endEditing()
u.beforeChangeGroup(current, undoType)
changed = False
p = None # To keep pylint happy.
for p in c.all_unique_positions():
if p.isMarked():
bunch = u.beforeMark(p, undoType)
# c.clearMarked(p) # Very slow: calls a hook.
p.v.clearMarked()
p.setDirty()
u.afterMark(p, undoType, bunch)
changed = True
if changed:
g.doHook("clear-all-marks", c=c, p=p)
c.setChanged()
u.afterChangeGroup(current, undoType)
c.redraw_after_icons_changed()
|
[
"def",
"unmarkAll",
"(",
"self",
",",
"event",
"=",
"None",
")",
":",
"c",
",",
"current",
",",
"u",
"=",
"self",
",",
"self",
".",
"p",
",",
"self",
".",
"undoer",
"undoType",
"=",
"'Unmark All'",
"if",
"not",
"current",
":",
"return",
"c",
".",
"endEditing",
"(",
")",
"u",
".",
"beforeChangeGroup",
"(",
"current",
",",
"undoType",
")",
"changed",
"=",
"False",
"p",
"=",
"None",
"# To keep pylint happy.",
"for",
"p",
"in",
"c",
".",
"all_unique_positions",
"(",
")",
":",
"if",
"p",
".",
"isMarked",
"(",
")",
":",
"bunch",
"=",
"u",
".",
"beforeMark",
"(",
"p",
",",
"undoType",
")",
"# c.clearMarked(p) # Very slow: calls a hook.",
"p",
".",
"v",
".",
"clearMarked",
"(",
")",
"p",
".",
"setDirty",
"(",
")",
"u",
".",
"afterMark",
"(",
"p",
",",
"undoType",
",",
"bunch",
")",
"changed",
"=",
"True",
"if",
"changed",
":",
"g",
".",
"doHook",
"(",
"\"clear-all-marks\"",
",",
"c",
"=",
"c",
",",
"p",
"=",
"p",
")",
"c",
".",
"setChanged",
"(",
")",
"u",
".",
"afterChangeGroup",
"(",
"current",
",",
"undoType",
")",
"c",
".",
"redraw_after_icons_changed",
"(",
")"
] |
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/commands/commanderOutlineCommands.py#L1377-L1399
|
||
oracle/graalpython
|
577e02da9755d916056184ec441c26e00b70145c
|
graalpython/lib-python/3/xml/dom/minidom.py
|
python
|
ProcessingInstruction._set_nodeName
|
(self, value)
|
[] |
def _set_nodeName(self, value):
self.target = value
|
[
"def",
"_set_nodeName",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"target",
"=",
"value"
] |
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/xml/dom/minidom.py#L984-L985
|
||||
IronLanguages/ironpython3
|
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
|
Src/StdLib/Lib/asyncio/selector_events.py
|
python
|
_SelectorTransport.get_write_buffer_size
|
(self)
|
return len(self._buffer)
|
[] |
def get_write_buffer_size(self):
return len(self._buffer)
|
[
"def",
"get_write_buffer_size",
"(",
"self",
")",
":",
"return",
"len",
"(",
"self",
".",
"_buffer",
")"
] |
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/asyncio/selector_events.py#L620-L621
|
|||
eth-brownie/brownie
|
754bda9f0a294b2beb86453d5eca4ff769a877c8
|
brownie/network/state.py
|
python
|
Chain.time
|
(self)
|
return int(time.time() + self._time_offset)
|
Return the current epoch time from the test RPC as an int
|
Return the current epoch time from the test RPC as an int
|
[
"Return",
"the",
"current",
"epoch",
"time",
"from",
"the",
"test",
"RPC",
"as",
"an",
"int"
] |
def time(self) -> int:
"""Return the current epoch time from the test RPC as an int"""
return int(time.time() + self._time_offset)
|
[
"def",
"time",
"(",
"self",
")",
"->",
"int",
":",
"return",
"int",
"(",
"time",
".",
"time",
"(",
")",
"+",
"self",
".",
"_time_offset",
")"
] |
https://github.com/eth-brownie/brownie/blob/754bda9f0a294b2beb86453d5eca4ff769a877c8/brownie/network/state.py#L360-L362
|
|
easyw/kicadStepUpMod
|
9d78e59b97cedc4915ee3a290126a88dcdf11277
|
constrainator.py
|
python
|
sanitizeSk
|
(s_name, edg_tol)
|
simplifying & sanitizing sketches
|
simplifying & sanitizing sketches
|
[
"simplifying",
"&",
"sanitizing",
"sketches"
] |
def sanitizeSk(s_name, edg_tol):
''' simplifying & sanitizing sketches '''
#global edge_tolerance
s=FreeCAD.ActiveDocument.getObject(s_name)
FreeCAD.Console.PrintWarning('check to sanitize\n')
if 'Sketcher' in s.TypeId:
idx_to_del=[]
for i,g in enumerate (s.Geometry):
#print(g,i)
if 'Line' in str(g):
#print(g.length())
if g.length() <= edg_tol:
FreeCAD.Console.PrintMessage(str(g)+' '+str(i)+' too short\n')
idx_to_del.append(i)
elif 'Circle' in str(g):
if g.Radius <= edg_tol:
FreeCAD.Console.PrintMessage(str(g)+' '+str(i)+' too short\n')
idx_to_del.append(i)
j=0
if len(idx_to_del) >0:
FreeCAD.Console.PrintMessage(u'sanitizing '+s.Label)
FreeCAD.Console.PrintMessage('\n')
for i in idx_to_del:
s.delGeometry(i-j)
j+=1
|
[
"def",
"sanitizeSk",
"(",
"s_name",
",",
"edg_tol",
")",
":",
"#global edge_tolerance",
"s",
"=",
"FreeCAD",
".",
"ActiveDocument",
".",
"getObject",
"(",
"s_name",
")",
"FreeCAD",
".",
"Console",
".",
"PrintWarning",
"(",
"'check to sanitize\\n'",
")",
"if",
"'Sketcher'",
"in",
"s",
".",
"TypeId",
":",
"idx_to_del",
"=",
"[",
"]",
"for",
"i",
",",
"g",
"in",
"enumerate",
"(",
"s",
".",
"Geometry",
")",
":",
"#print(g,i)",
"if",
"'Line'",
"in",
"str",
"(",
"g",
")",
":",
"#print(g.length())",
"if",
"g",
".",
"length",
"(",
")",
"<=",
"edg_tol",
":",
"FreeCAD",
".",
"Console",
".",
"PrintMessage",
"(",
"str",
"(",
"g",
")",
"+",
"' '",
"+",
"str",
"(",
"i",
")",
"+",
"' too short\\n'",
")",
"idx_to_del",
".",
"append",
"(",
"i",
")",
"elif",
"'Circle'",
"in",
"str",
"(",
"g",
")",
":",
"if",
"g",
".",
"Radius",
"<=",
"edg_tol",
":",
"FreeCAD",
".",
"Console",
".",
"PrintMessage",
"(",
"str",
"(",
"g",
")",
"+",
"' '",
"+",
"str",
"(",
"i",
")",
"+",
"' too short\\n'",
")",
"idx_to_del",
".",
"append",
"(",
"i",
")",
"j",
"=",
"0",
"if",
"len",
"(",
"idx_to_del",
")",
">",
"0",
":",
"FreeCAD",
".",
"Console",
".",
"PrintMessage",
"(",
"u'sanitizing '",
"+",
"s",
".",
"Label",
")",
"FreeCAD",
".",
"Console",
".",
"PrintMessage",
"(",
"'\\n'",
")",
"for",
"i",
"in",
"idx_to_del",
":",
"s",
".",
"delGeometry",
"(",
"i",
"-",
"j",
")",
"j",
"+=",
"1"
] |
https://github.com/easyw/kicadStepUpMod/blob/9d78e59b97cedc4915ee3a290126a88dcdf11277/constrainator.py#L124-L149
|
||
tobegit3hub/deep_image_model
|
8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e
|
java_predict_client/src/main/proto/tensorflow/python/ops/data_flow_ops.py
|
python
|
QueueBase.names
|
(self)
|
return self._names
|
The list of names for each component of a queue element.
|
The list of names for each component of a queue element.
|
[
"The",
"list",
"of",
"names",
"for",
"each",
"component",
"of",
"a",
"queue",
"element",
"."
] |
def names(self):
"""The list of names for each component of a queue element."""
return self._names
|
[
"def",
"names",
"(",
"self",
")",
":",
"return",
"self",
".",
"_names"
] |
https://github.com/tobegit3hub/deep_image_model/blob/8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e/java_predict_client/src/main/proto/tensorflow/python/ops/data_flow_ops.py#L235-L237
|
|
fab-jul/imgcomp-cvpr
|
f03ce0bfa846f7ba1bf9b7ba415b082efe5c192c
|
code/inputpipeline.py
|
python
|
RecordsDataset.__init__
|
(self, name, records_glob, feature_key, num_images, no_matches_hint)
|
[] |
def __init__(self, name, records_glob, feature_key, num_images, no_matches_hint):
if no_matches_hint and len(glob.glob(records_glob)) == 0:
raise RecordsDataset.NoRecordsFoundException(
'No matches for {}. ({})'.format(records_glob, no_matches_hint))
self.name = name
self.records_glob = records_glob
self.feature_key = feature_key
self.num_images = num_images
|
[
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"records_glob",
",",
"feature_key",
",",
"num_images",
",",
"no_matches_hint",
")",
":",
"if",
"no_matches_hint",
"and",
"len",
"(",
"glob",
".",
"glob",
"(",
"records_glob",
")",
")",
"==",
"0",
":",
"raise",
"RecordsDataset",
".",
"NoRecordsFoundException",
"(",
"'No matches for {}. ({})'",
".",
"format",
"(",
"records_glob",
",",
"no_matches_hint",
")",
")",
"self",
".",
"name",
"=",
"name",
"self",
".",
"records_glob",
"=",
"records_glob",
"self",
".",
"feature_key",
"=",
"feature_key",
"self",
".",
"num_images",
"=",
"num_images"
] |
https://github.com/fab-jul/imgcomp-cvpr/blob/f03ce0bfa846f7ba1bf9b7ba415b082efe5c192c/code/inputpipeline.py#L46-L54
|
||||
jupyter/enterprise_gateway
|
1a529b13f3d9ab94411e4751d4bd35bafd6bbc2e
|
enterprise_gateway/services/processproxies/yarn.py
|
python
|
YarnClusterProcessProxy.handle_timeout
|
(self)
|
Checks to see if the kernel launch timeout has been exceeded while awaiting connection info.
|
Checks to see if the kernel launch timeout has been exceeded while awaiting connection info.
|
[
"Checks",
"to",
"see",
"if",
"the",
"kernel",
"launch",
"timeout",
"has",
"been",
"exceeded",
"while",
"awaiting",
"connection",
"info",
"."
] |
async def handle_timeout(self):
"""Checks to see if the kernel launch timeout has been exceeded while awaiting connection info."""
await asyncio.sleep(poll_interval)
time_interval = RemoteProcessProxy.get_time_diff(self.start_time, RemoteProcessProxy.get_current_time())
if time_interval > self.kernel_launch_timeout:
reason = "Application ID is None. Failed to submit a new application to YARN within {} seconds. " \
"Check Enterprise Gateway log for more information.". \
format(self.kernel_launch_timeout)
error_http_code = 500
if self._get_application_id(True):
if self._query_app_state_by_id(self.application_id) != "RUNNING":
reason = "YARN resources unavailable after {} seconds for app {}, launch timeout: {}! " \
"Check YARN configuration.".format(time_interval, self.application_id,
self.kernel_launch_timeout)
error_http_code = 503
else:
reason = "App {} is RUNNING, but waited too long ({} secs) to get connection file. " \
"Check YARN logs for more information.".format(self.application_id,
self.kernel_launch_timeout)
await asyncio.get_event_loop().run_in_executor(None, self.kill)
timeout_message = "KernelID: '{}' launch timeout due to: {}".format(self.kernel_id, reason)
self.log_and_raise(http_status_code=error_http_code, reason=timeout_message)
|
[
"async",
"def",
"handle_timeout",
"(",
"self",
")",
":",
"await",
"asyncio",
".",
"sleep",
"(",
"poll_interval",
")",
"time_interval",
"=",
"RemoteProcessProxy",
".",
"get_time_diff",
"(",
"self",
".",
"start_time",
",",
"RemoteProcessProxy",
".",
"get_current_time",
"(",
")",
")",
"if",
"time_interval",
">",
"self",
".",
"kernel_launch_timeout",
":",
"reason",
"=",
"\"Application ID is None. Failed to submit a new application to YARN within {} seconds. \"",
"\"Check Enterprise Gateway log for more information.\"",
".",
"format",
"(",
"self",
".",
"kernel_launch_timeout",
")",
"error_http_code",
"=",
"500",
"if",
"self",
".",
"_get_application_id",
"(",
"True",
")",
":",
"if",
"self",
".",
"_query_app_state_by_id",
"(",
"self",
".",
"application_id",
")",
"!=",
"\"RUNNING\"",
":",
"reason",
"=",
"\"YARN resources unavailable after {} seconds for app {}, launch timeout: {}! \"",
"\"Check YARN configuration.\"",
".",
"format",
"(",
"time_interval",
",",
"self",
".",
"application_id",
",",
"self",
".",
"kernel_launch_timeout",
")",
"error_http_code",
"=",
"503",
"else",
":",
"reason",
"=",
"\"App {} is RUNNING, but waited too long ({} secs) to get connection file. \"",
"\"Check YARN logs for more information.\"",
".",
"format",
"(",
"self",
".",
"application_id",
",",
"self",
".",
"kernel_launch_timeout",
")",
"await",
"asyncio",
".",
"get_event_loop",
"(",
")",
".",
"run_in_executor",
"(",
"None",
",",
"self",
".",
"kill",
")",
"timeout_message",
"=",
"\"KernelID: '{}' launch timeout due to: {}\"",
".",
"format",
"(",
"self",
".",
"kernel_id",
",",
"reason",
")",
"self",
".",
"log_and_raise",
"(",
"http_status_code",
"=",
"error_http_code",
",",
"reason",
"=",
"timeout_message",
")"
] |
https://github.com/jupyter/enterprise_gateway/blob/1a529b13f3d9ab94411e4751d4bd35bafd6bbc2e/enterprise_gateway/services/processproxies/yarn.py#L317-L339
|
||
Ultimaker/Cura
|
a1622c77ea7259ecb956acd6de07b7d34b7ac52b
|
cura/Machines/Models/FirstStartMachineActionsModel.py
|
python
|
FirstStartMachineActionsModel.reset
|
(self)
|
Resets the current action index to 0 so the wizard panel can show actions from the beginning.
|
Resets the current action index to 0 so the wizard panel can show actions from the beginning.
|
[
"Resets",
"the",
"current",
"action",
"index",
"to",
"0",
"so",
"the",
"wizard",
"panel",
"can",
"show",
"actions",
"from",
"the",
"beginning",
"."
] |
def reset(self) -> None:
"""Resets the current action index to 0 so the wizard panel can show actions from the beginning."""
self._current_action_index = 0
self.currentActionIndexChanged.emit()
if self.count == 0:
self.allFinished.emit()
|
[
"def",
"reset",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_current_action_index",
"=",
"0",
"self",
".",
"currentActionIndexChanged",
".",
"emit",
"(",
")",
"if",
"self",
".",
"count",
"==",
"0",
":",
"self",
".",
"allFinished",
".",
"emit",
"(",
")"
] |
https://github.com/Ultimaker/Cura/blob/a1622c77ea7259ecb956acd6de07b7d34b7ac52b/cura/Machines/Models/FirstStartMachineActionsModel.py#L77-L84
|
||
cslarsen/wpm
|
6e48d8b750c7828166b67a532ff03d62584fb953
|
wpm/stats.py
|
python
|
Timestamp.now
|
()
|
return datetime.datetime.utcnow()
|
Returns current UTC time.
|
Returns current UTC time.
|
[
"Returns",
"current",
"UTC",
"time",
"."
] |
def now():
"""Returns current UTC time."""
return datetime.datetime.utcnow()
|
[
"def",
"now",
"(",
")",
":",
"return",
"datetime",
".",
"datetime",
".",
"utcnow",
"(",
")"
] |
https://github.com/cslarsen/wpm/blob/6e48d8b750c7828166b67a532ff03d62584fb953/wpm/stats.py#L30-L32
|
|
maxpumperla/betago
|
ff06b467e16d7a7a22555d14181b723d853e1a70
|
betago/dataloader/index_processor.py
|
python
|
KGSIndex.__init__
|
(self,
kgs_url='http://u-go.net/gamerecords/',
index_page='kgs_index.html',
data_directory='data')
|
Create an index of zip files containing SGF data of actual Go Games on KGS.
Parameters:
-----------
kgs_url: URL with links to zip files of games
index_page: Name of local html file of kgs_url
data_directory: name of directory relative to current path to store SGF data
|
Create an index of zip files containing SGF data of actual Go Games on KGS.
|
[
"Create",
"an",
"index",
"of",
"zip",
"files",
"containing",
"SGF",
"data",
"of",
"actual",
"Go",
"Games",
"on",
"KGS",
"."
] |
def __init__(self,
kgs_url='http://u-go.net/gamerecords/',
index_page='kgs_index.html',
data_directory='data'):
'''
Create an index of zip files containing SGF data of actual Go Games on KGS.
Parameters:
-----------
kgs_url: URL with links to zip files of games
index_page: Name of local html file of kgs_url
data_directory: name of directory relative to current path to store SGF data
'''
self.kgs_url = kgs_url
self.index_page = index_page
self.data_directory = data_directory
self.file_info = []
self.urls = []
self.load_index()
|
[
"def",
"__init__",
"(",
"self",
",",
"kgs_url",
"=",
"'http://u-go.net/gamerecords/'",
",",
"index_page",
"=",
"'kgs_index.html'",
",",
"data_directory",
"=",
"'data'",
")",
":",
"self",
".",
"kgs_url",
"=",
"kgs_url",
"self",
".",
"index_page",
"=",
"index_page",
"self",
".",
"data_directory",
"=",
"data_directory",
"self",
".",
"file_info",
"=",
"[",
"]",
"self",
".",
"urls",
"=",
"[",
"]",
"self",
".",
"load_index",
"(",
")"
] |
https://github.com/maxpumperla/betago/blob/ff06b467e16d7a7a22555d14181b723d853e1a70/betago/dataloader/index_processor.py#L30-L48
|
||
dagwieers/mrepo
|
a55cbc737d8bade92070d38e4dbb9a24be4b477f
|
up2date_client/rhnChannel.py
|
python
|
rhnChannel.keys
|
(self)
|
return self.dict.keys()
|
[] |
def keys(self):
return self.dict.keys()
|
[
"def",
"keys",
"(",
"self",
")",
":",
"return",
"self",
".",
"dict",
".",
"keys",
"(",
")"
] |
https://github.com/dagwieers/mrepo/blob/a55cbc737d8bade92070d38e4dbb9a24be4b477f/up2date_client/rhnChannel.py#L52-L53
|
|||
bruderstein/PythonScript
|
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
|
PythonLib/min/_pydecimal.py
|
python
|
Context.__setattr__
|
(self, name, value)
|
[] |
def __setattr__(self, name, value):
if name == 'prec':
return self._set_integer_check(name, value, 1, 'inf')
elif name == 'Emin':
return self._set_integer_check(name, value, '-inf', 0)
elif name == 'Emax':
return self._set_integer_check(name, value, 0, 'inf')
elif name == 'capitals':
return self._set_integer_check(name, value, 0, 1)
elif name == 'clamp':
return self._set_integer_check(name, value, 0, 1)
elif name == 'rounding':
if not value in _rounding_modes:
# raise TypeError even for strings to have consistency
# among various implementations.
raise TypeError("%s: invalid rounding mode" % value)
return object.__setattr__(self, name, value)
elif name == 'flags' or name == 'traps':
return self._set_signal_dict(name, value)
elif name == '_ignored_flags':
return object.__setattr__(self, name, value)
else:
raise AttributeError(
"'decimal.Context' object has no attribute '%s'" % name)
|
[
"def",
"__setattr__",
"(",
"self",
",",
"name",
",",
"value",
")",
":",
"if",
"name",
"==",
"'prec'",
":",
"return",
"self",
".",
"_set_integer_check",
"(",
"name",
",",
"value",
",",
"1",
",",
"'inf'",
")",
"elif",
"name",
"==",
"'Emin'",
":",
"return",
"self",
".",
"_set_integer_check",
"(",
"name",
",",
"value",
",",
"'-inf'",
",",
"0",
")",
"elif",
"name",
"==",
"'Emax'",
":",
"return",
"self",
".",
"_set_integer_check",
"(",
"name",
",",
"value",
",",
"0",
",",
"'inf'",
")",
"elif",
"name",
"==",
"'capitals'",
":",
"return",
"self",
".",
"_set_integer_check",
"(",
"name",
",",
"value",
",",
"0",
",",
"1",
")",
"elif",
"name",
"==",
"'clamp'",
":",
"return",
"self",
".",
"_set_integer_check",
"(",
"name",
",",
"value",
",",
"0",
",",
"1",
")",
"elif",
"name",
"==",
"'rounding'",
":",
"if",
"not",
"value",
"in",
"_rounding_modes",
":",
"# raise TypeError even for strings to have consistency",
"# among various implementations.",
"raise",
"TypeError",
"(",
"\"%s: invalid rounding mode\"",
"%",
"value",
")",
"return",
"object",
".",
"__setattr__",
"(",
"self",
",",
"name",
",",
"value",
")",
"elif",
"name",
"==",
"'flags'",
"or",
"name",
"==",
"'traps'",
":",
"return",
"self",
".",
"_set_signal_dict",
"(",
"name",
",",
"value",
")",
"elif",
"name",
"==",
"'_ignored_flags'",
":",
"return",
"object",
".",
"__setattr__",
"(",
"self",
",",
"name",
",",
"value",
")",
"else",
":",
"raise",
"AttributeError",
"(",
"\"'decimal.Context' object has no attribute '%s'\"",
"%",
"name",
")"
] |
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/min/_pydecimal.py#L3949-L3972
|
||||
LexPredict/lexpredict-contraxsuite
|
1d5a2540d31f8f3f1adc442cfa13a7c007319899
|
contraxsuite_services/apps/task/celery_backend/managers.py
|
python
|
TaskManager.get_all_expired
|
(self, expires)
|
return self.filter(own_date_done__lt=now() - maybe_timedelta(expires))
|
Get all expired task results.
|
Get all expired task results.
|
[
"Get",
"all",
"expired",
"task",
"results",
"."
] |
def get_all_expired(self, expires):
"""Get all expired task results."""
return self.filter(own_date_done__lt=now() - maybe_timedelta(expires))
|
[
"def",
"get_all_expired",
"(",
"self",
",",
"expires",
")",
":",
"return",
"self",
".",
"filter",
"(",
"own_date_done__lt",
"=",
"now",
"(",
")",
"-",
"maybe_timedelta",
"(",
"expires",
")",
")"
] |
https://github.com/LexPredict/lexpredict-contraxsuite/blob/1d5a2540d31f8f3f1adc442cfa13a7c007319899/contraxsuite_services/apps/task/celery_backend/managers.py#L487-L489
|
|
pculture/miro
|
d8e4594441939514dd2ac29812bf37087bb3aea5
|
tv/lib/singleclick.py
|
python
|
filter_existing_feed_urls
|
(urls)
|
return [u for u in urls if feed.lookup_feed(u) is None]
|
Takes a list of feed urls and returns a list of urls that aren't
already being managed by Miro.
:param urls: list of urls to filter
:returns: list of urls not already in Miro
|
Takes a list of feed urls and returns a list of urls that aren't
already being managed by Miro.
|
[
"Takes",
"a",
"list",
"of",
"feed",
"urls",
"and",
"returns",
"a",
"list",
"of",
"urls",
"that",
"aren",
"t",
"already",
"being",
"managed",
"by",
"Miro",
"."
] |
def filter_existing_feed_urls(urls):
"""Takes a list of feed urls and returns a list of urls that aren't
already being managed by Miro.
:param urls: list of urls to filter
:returns: list of urls not already in Miro
"""
return [u for u in urls if feed.lookup_feed(u) is None]
|
[
"def",
"filter_existing_feed_urls",
"(",
"urls",
")",
":",
"return",
"[",
"u",
"for",
"u",
"in",
"urls",
"if",
"feed",
".",
"lookup_feed",
"(",
"u",
")",
"is",
"None",
"]"
] |
https://github.com/pculture/miro/blob/d8e4594441939514dd2ac29812bf37087bb3aea5/tv/lib/singleclick.py#L270-L278
|
|
lovelylain/pyctp
|
fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d
|
example/ctp/futures/ApiStruct.py
|
python
|
UserRightsAssign.__init__
|
(self, BrokerID='', UserID='', DRIdentityID=0)
|
[] |
def __init__(self, BrokerID='', UserID='', DRIdentityID=0):
self.BrokerID = '' #应用单元代码, char[11]
self.UserID = '' #用户代码, char[16]
self.DRIdentityID = ''
|
[
"def",
"__init__",
"(",
"self",
",",
"BrokerID",
"=",
"''",
",",
"UserID",
"=",
"''",
",",
"DRIdentityID",
"=",
"0",
")",
":",
"self",
".",
"BrokerID",
"=",
"''",
"#应用单元代码, char[11]",
"self",
".",
"UserID",
"=",
"''",
"#用户代码, char[16]",
"self",
".",
"DRIdentityID",
"=",
"''"
] |
https://github.com/lovelylain/pyctp/blob/fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d/example/ctp/futures/ApiStruct.py#L5881-L5884
|
||||
JordyZomer/autoSubTakeover
|
3825a35a34043d71843cd1ced8fa468198ae7587
|
build/lib/autosubtakeover/_version.py
|
python
|
git_get_keywords
|
(versionfile_abs)
|
return keywords
|
Extract version information from the given file.
|
Extract version information from the given file.
|
[
"Extract",
"version",
"information",
"from",
"the",
"given",
"file",
"."
] |
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
|
[
"def",
"git_get_keywords",
"(",
"versionfile_abs",
")",
":",
"# the code embedded in _version.py can just fetch the value of these",
"# keywords. When used from setup.py, we don't want to import _version.py,",
"# so we do it with a regexp instead. This function is not used from",
"# _version.py.",
"keywords",
"=",
"{",
"}",
"try",
":",
"f",
"=",
"open",
"(",
"versionfile_abs",
",",
"\"r\"",
")",
"for",
"line",
"in",
"f",
".",
"readlines",
"(",
")",
":",
"if",
"line",
".",
"strip",
"(",
")",
".",
"startswith",
"(",
"\"git_refnames =\"",
")",
":",
"mo",
"=",
"re",
".",
"search",
"(",
"r'=\\s*\"(.*)\"'",
",",
"line",
")",
"if",
"mo",
":",
"keywords",
"[",
"\"refnames\"",
"]",
"=",
"mo",
".",
"group",
"(",
"1",
")",
"if",
"line",
".",
"strip",
"(",
")",
".",
"startswith",
"(",
"\"git_full =\"",
")",
":",
"mo",
"=",
"re",
".",
"search",
"(",
"r'=\\s*\"(.*)\"'",
",",
"line",
")",
"if",
"mo",
":",
"keywords",
"[",
"\"full\"",
"]",
"=",
"mo",
".",
"group",
"(",
"1",
")",
"if",
"line",
".",
"strip",
"(",
")",
".",
"startswith",
"(",
"\"git_date =\"",
")",
":",
"mo",
"=",
"re",
".",
"search",
"(",
"r'=\\s*\"(.*)\"'",
",",
"line",
")",
"if",
"mo",
":",
"keywords",
"[",
"\"date\"",
"]",
"=",
"mo",
".",
"group",
"(",
"1",
")",
"f",
".",
"close",
"(",
")",
"except",
"EnvironmentError",
":",
"pass",
"return",
"keywords"
] |
https://github.com/JordyZomer/autoSubTakeover/blob/3825a35a34043d71843cd1ced8fa468198ae7587/build/lib/autosubtakeover/_version.py#L146-L171
|
|
minerllabs/minerl
|
0123527c334c96ebb3f0cf313df1552fa4302691
|
minerl/data/download.py
|
python
|
download
|
(
directory: Optional[str] = None,
environment: Optional[str] = None,
competition: Optional[str] = None,
resolution: str = 'low',
texture_pack: int = 0,
update_environment_variables: bool = True,
disable_cache: bool = False,
)
|
return directory
|
Low-level interface for downloading MineRL dataset.
Using the `python -m minerl.data.download` CLI script is preferred because it performs
more input validation and hides internal-use arguments.
Run this command with `environment=None` and `competition=None` to download a minimal
dataset with 2 demonstrations from each environment.
Provide the `environment` or `competition` arguments to download a full dataset for
a particular environment or competition.
Args:
directory: Destination folder for downloading MineRL datasets. If None, then use
the `MINERL_DATA_ROOT` environment variable, or error if this environment
variable is not set.
environment: The name of a MineRL environment or None. If this argument is the
name of a MineRL environment and `competition` is None, then this function
downloads the full dataset for the specifies MineRL environment.
If both `environment=None` and `competition=None`, then this function
downloads a minimal dataset.
competition: The name of a MineRL competition ("diamond" or "basalt") or None. If
this argument is the name of a MineRL environment and `competition` is None,
then this function downloads the full dataset for the specified MineRL
competition.
If both `environment=None` and `competition=None`, then this function
downloads a minimal dataset.
resolution: For internal use only. One of ['low', 'high'] corresponding to video
resolutions of [64x64,1024x1024] respectively (note: high resolution is not currently
supported).
texture_pack: For internal use only. 0: default Minecraft texture
pack, 1: flat semi-realistic texture pack.
update_environment_variables: For internal use only. If True, then export of
MINERL_DATA_ROOT environment variable (note: for some os this is only for the
current shell).
disable_cache: If False (default), then the tar download and other temporary
download files are saved inside `directory`.
If disable_cache is False on
a future call to this function and temporary download files are detected, then
the download is resumed from previous download progress. If disable_cache is
False on a future call to this function and the completed tar file is
detected, then the download is skipped entirely and we immediately extract the tar
to `directory`.
|
Low-level interface for downloading MineRL dataset.
|
[
"Low",
"-",
"level",
"interface",
"for",
"downloading",
"MineRL",
"dataset",
"."
] |
def download(
directory: Optional[str] = None,
environment: Optional[str] = None,
competition: Optional[str] = None,
resolution: str = 'low',
texture_pack: int = 0,
update_environment_variables: bool = True,
disable_cache: bool = False,
) -> None:
"""Low-level interface for downloading MineRL dataset.
Using the `python -m minerl.data.download` CLI script is preferred because it performs
more input validation and hides internal-use arguments.
Run this command with `environment=None` and `competition=None` to download a minimal
dataset with 2 demonstrations from each environment.
Provide the `environment` or `competition` arguments to download a full dataset for
a particular environment or competition.
Args:
directory: Destination folder for downloading MineRL datasets. If None, then use
the `MINERL_DATA_ROOT` environment variable, or error if this environment
variable is not set.
environment: The name of a MineRL environment or None. If this argument is the
name of a MineRL environment and `competition` is None, then this function
downloads the full dataset for the specifies MineRL environment.
If both `environment=None` and `competition=None`, then this function
downloads a minimal dataset.
competition: The name of a MineRL competition ("diamond" or "basalt") or None. If
this argument is the name of a MineRL environment and `competition` is None,
then this function downloads the full dataset for the specified MineRL
competition.
If both `environment=None` and `competition=None`, then this function
downloads a minimal dataset.
resolution: For internal use only. One of ['low', 'high'] corresponding to video
resolutions of [64x64,1024x1024] respectively (note: high resolution is not currently
supported).
texture_pack: For internal use only. 0: default Minecraft texture
pack, 1: flat semi-realistic texture pack.
update_environment_variables: For internal use only. If True, then export of
MINERL_DATA_ROOT environment variable (note: for some os this is only for the
current shell).
disable_cache: If False (default), then the tar download and other temporary
download files are saved inside `directory`.
If disable_cache is False on
a future call to this function and temporary download files are detected, then
the download is resumed from previous download progress. If disable_cache is
False on a future call to this function and the completed tar file is
detected, then the download is skipped entirely and we immediately extract the tar
to `directory`.
"""
assert texture_pack in (0, 1)
if competition is not None and environment is not None:
raise ValueError(
f"At most one of the `competition={competition}` and `environment={environment}` "
"arguments can be non-None."
)
if competition is None and environment is None:
logger.warning("DOWNLOADING ONLY THE MINIMAL DATASET by default.")
logger.info("For information on downloading full "
"datasets see the docstring for minerl.data.download or "
"https://minerl.readthedocs.io/en/latest/tutorials/data_sampling.html#downloading-the-minerl-dataset-with-minerl-data-download" # noqa: E501
)
if directory is None:
if 'MINERL_DATA_ROOT' in os.environ and len(os.environ['MINERL_DATA_ROOT']) > 0:
directory = os.environ['MINERL_DATA_ROOT']
else:
raise ValueError("Provided directory is None and $MINERL_DATA_ROOT is not defined")
elif update_environment_variables:
os.environ['MINERL_DATA_ROOT'] = os.path.expanduser(
os.path.expandvars(os.path.normpath(directory)))
if os.path.exists(directory):
try:
assert_version(directory)
except RuntimeError as r:
if r.comparison == "less":
raise r
logger.error(str(r))
logger.error("Deleting existing data and forcing a data update!")
try:
shutil.rmtree(directory)
except Exception as e:
logger.error("Could not delete {}. Do you have permission?".format(directory))
raise e
try:
os.makedirs(directory)
except:
pass
download_path = os.path.join(directory,
'download') if not disable_cache else tempfile.mkdtemp()
mirrors = [
"https://minerl.s3.amazonaws.com/",
"https://minerl-asia.s3.amazonaws.com/",
"https://minerl-europe.s3.amazonaws.com/"]
if environment is None and competition is None:
competition = 'minimal_all'
if competition is not None:
logger.info("Downloading dataset for {} competition(s)".format(competition))
competition_string = competition + '_'
if competition == 'minimal_all':
min_str = '_minimal'
competition_string = ''
else:
min_str = ''
filename = "v{}/{}data_texture_{}_{}_res{}.tar".format(DATA_VERSION,
competition_string,
texture_pack,
resolution,
min_str)
else:
logger.info(f"Downloading dataset for {environment} to {directory}")
filename = f"v{DATA_VERSION}/{environment}.tar"
urls = [mirror + filename for mirror in mirrors]
try:
# logger.info("Fetching download hash ...")
# obj.fetch_hash_sums()
# TODO: Add flag to verify hash
# logger.warning("As of MineRL 0.3.0 automatic hash checking has been disabled.")
logger.info("Starting download ...")
dest_file = os.path.join(download_path, filename)
os.makedirs(os.path.dirname(dest_file), exist_ok=True)
download_with_resume(urls, dest_file)
except HTTPError as e:
logger.error("HTTP {} error encountered when downloading files!".format(e.code))
if environment is not None:
logger.error("Is \"{}\" a valid minerl environment?".format(environment))
return None
except URLError as e:
logger.error("URL error encountered when downloading - please try again")
logger.error(e.errno)
return None
except TimeoutError as e:
logger.error("Timeout encountered when downloading - is your connection stable")
logger.error(e.errno)
return None
except IOError as e:
logger.error("IO error encountered when downloading - please try again")
logger.error(e.errno)
return None
except KeyboardInterrupt as e:
logger.error("Download canceled by user")
return None
logging.info('Success - downloaded {}'.format(dest_file))
logging.info('Extracting downloaded files - this may take some time')
with tarfile.open(dest_file, mode="r:*") as tf:
t = Thread(target=tf.extractall(path=directory))
t.start()
while t.is_alive():
time.sleep(5)
logging.info('.', end='')
logging.info('Success - extracted files to {}'.format(directory))
if disable_cache:
logging.info('Deleting cached tar file')
os.remove(dest_file)
try:
assert_version(directory)
except RuntimeError as r:
logger.error(str(r))
return directory
|
[
"def",
"download",
"(",
"directory",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"environment",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"competition",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"resolution",
":",
"str",
"=",
"'low'",
",",
"texture_pack",
":",
"int",
"=",
"0",
",",
"update_environment_variables",
":",
"bool",
"=",
"True",
",",
"disable_cache",
":",
"bool",
"=",
"False",
",",
")",
"->",
"None",
":",
"assert",
"texture_pack",
"in",
"(",
"0",
",",
"1",
")",
"if",
"competition",
"is",
"not",
"None",
"and",
"environment",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"f\"At most one of the `competition={competition}` and `environment={environment}` \"",
"\"arguments can be non-None.\"",
")",
"if",
"competition",
"is",
"None",
"and",
"environment",
"is",
"None",
":",
"logger",
".",
"warning",
"(",
"\"DOWNLOADING ONLY THE MINIMAL DATASET by default.\"",
")",
"logger",
".",
"info",
"(",
"\"For information on downloading full \"",
"\"datasets see the docstring for minerl.data.download or \"",
"\"https://minerl.readthedocs.io/en/latest/tutorials/data_sampling.html#downloading-the-minerl-dataset-with-minerl-data-download\"",
"# noqa: E501",
")",
"if",
"directory",
"is",
"None",
":",
"if",
"'MINERL_DATA_ROOT'",
"in",
"os",
".",
"environ",
"and",
"len",
"(",
"os",
".",
"environ",
"[",
"'MINERL_DATA_ROOT'",
"]",
")",
">",
"0",
":",
"directory",
"=",
"os",
".",
"environ",
"[",
"'MINERL_DATA_ROOT'",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"\"Provided directory is None and $MINERL_DATA_ROOT is not defined\"",
")",
"elif",
"update_environment_variables",
":",
"os",
".",
"environ",
"[",
"'MINERL_DATA_ROOT'",
"]",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"os",
".",
"path",
".",
"expandvars",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"directory",
")",
")",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"directory",
")",
":",
"try",
":",
"assert_version",
"(",
"directory",
")",
"except",
"RuntimeError",
"as",
"r",
":",
"if",
"r",
".",
"comparison",
"==",
"\"less\"",
":",
"raise",
"r",
"logger",
".",
"error",
"(",
"str",
"(",
"r",
")",
")",
"logger",
".",
"error",
"(",
"\"Deleting existing data and forcing a data update!\"",
")",
"try",
":",
"shutil",
".",
"rmtree",
"(",
"directory",
")",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"Could not delete {}. Do you have permission?\"",
".",
"format",
"(",
"directory",
")",
")",
"raise",
"e",
"try",
":",
"os",
".",
"makedirs",
"(",
"directory",
")",
"except",
":",
"pass",
"download_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"'download'",
")",
"if",
"not",
"disable_cache",
"else",
"tempfile",
".",
"mkdtemp",
"(",
")",
"mirrors",
"=",
"[",
"\"https://minerl.s3.amazonaws.com/\"",
",",
"\"https://minerl-asia.s3.amazonaws.com/\"",
",",
"\"https://minerl-europe.s3.amazonaws.com/\"",
"]",
"if",
"environment",
"is",
"None",
"and",
"competition",
"is",
"None",
":",
"competition",
"=",
"'minimal_all'",
"if",
"competition",
"is",
"not",
"None",
":",
"logger",
".",
"info",
"(",
"\"Downloading dataset for {} competition(s)\"",
".",
"format",
"(",
"competition",
")",
")",
"competition_string",
"=",
"competition",
"+",
"'_'",
"if",
"competition",
"==",
"'minimal_all'",
":",
"min_str",
"=",
"'_minimal'",
"competition_string",
"=",
"''",
"else",
":",
"min_str",
"=",
"''",
"filename",
"=",
"\"v{}/{}data_texture_{}_{}_res{}.tar\"",
".",
"format",
"(",
"DATA_VERSION",
",",
"competition_string",
",",
"texture_pack",
",",
"resolution",
",",
"min_str",
")",
"else",
":",
"logger",
".",
"info",
"(",
"f\"Downloading dataset for {environment} to {directory}\"",
")",
"filename",
"=",
"f\"v{DATA_VERSION}/{environment}.tar\"",
"urls",
"=",
"[",
"mirror",
"+",
"filename",
"for",
"mirror",
"in",
"mirrors",
"]",
"try",
":",
"# logger.info(\"Fetching download hash ...\")",
"# obj.fetch_hash_sums() ",
"# TODO: Add flag to verify hash",
"# logger.warning(\"As of MineRL 0.3.0 automatic hash checking has been disabled.\")",
"logger",
".",
"info",
"(",
"\"Starting download ...\"",
")",
"dest_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"download_path",
",",
"filename",
")",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"dest_file",
")",
",",
"exist_ok",
"=",
"True",
")",
"download_with_resume",
"(",
"urls",
",",
"dest_file",
")",
"except",
"HTTPError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"HTTP {} error encountered when downloading files!\"",
".",
"format",
"(",
"e",
".",
"code",
")",
")",
"if",
"environment",
"is",
"not",
"None",
":",
"logger",
".",
"error",
"(",
"\"Is \\\"{}\\\" a valid minerl environment?\"",
".",
"format",
"(",
"environment",
")",
")",
"return",
"None",
"except",
"URLError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"URL error encountered when downloading - please try again\"",
")",
"logger",
".",
"error",
"(",
"e",
".",
"errno",
")",
"return",
"None",
"except",
"TimeoutError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"Timeout encountered when downloading - is your connection stable\"",
")",
"logger",
".",
"error",
"(",
"e",
".",
"errno",
")",
"return",
"None",
"except",
"IOError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"IO error encountered when downloading - please try again\"",
")",
"logger",
".",
"error",
"(",
"e",
".",
"errno",
")",
"return",
"None",
"except",
"KeyboardInterrupt",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"Download canceled by user\"",
")",
"return",
"None",
"logging",
".",
"info",
"(",
"'Success - downloaded {}'",
".",
"format",
"(",
"dest_file",
")",
")",
"logging",
".",
"info",
"(",
"'Extracting downloaded files - this may take some time'",
")",
"with",
"tarfile",
".",
"open",
"(",
"dest_file",
",",
"mode",
"=",
"\"r:*\"",
")",
"as",
"tf",
":",
"t",
"=",
"Thread",
"(",
"target",
"=",
"tf",
".",
"extractall",
"(",
"path",
"=",
"directory",
")",
")",
"t",
".",
"start",
"(",
")",
"while",
"t",
".",
"is_alive",
"(",
")",
":",
"time",
".",
"sleep",
"(",
"5",
")",
"logging",
".",
"info",
"(",
"'.'",
",",
"end",
"=",
"''",
")",
"logging",
".",
"info",
"(",
"'Success - extracted files to {}'",
".",
"format",
"(",
"directory",
")",
")",
"if",
"disable_cache",
":",
"logging",
".",
"info",
"(",
"'Deleting cached tar file'",
")",
"os",
".",
"remove",
"(",
"dest_file",
")",
"try",
":",
"assert_version",
"(",
"directory",
")",
"except",
"RuntimeError",
"as",
"r",
":",
"logger",
".",
"error",
"(",
"str",
"(",
"r",
")",
")",
"return",
"directory"
] |
https://github.com/minerllabs/minerl/blob/0123527c334c96ebb3f0cf313df1552fa4302691/minerl/data/download.py#L26-L201
|
|
cloudlinux/kuberdock-platform
|
8b3923c19755f3868e4142b62578d9b9857d2704
|
kubedock/kapi/lbpoll.py
|
python
|
ExternalIPsService.update_publicIP
|
(self, service, publicIP=None)
|
return rv
|
Update publicIP in service
:param service: service to update
:param publicIP: new publicIP for service
:return: updated service
|
Update publicIP in service
:param service: service to update
:param publicIP: new publicIP for service
:return: updated service
|
[
"Update",
"publicIP",
"in",
"service",
":",
"param",
"service",
":",
"service",
"to",
"update",
":",
"param",
"publicIP",
":",
"new",
"publicIP",
"for",
"service",
":",
"return",
":",
"updated",
"service"
] |
def update_publicIP(self, service, publicIP=None):
"""Update publicIP in service
:param service: service to update
:param publicIP: new publicIP for service
:return: updated service
"""
name = service['metadata']['name']
namespace = service['metadata']['namespace']
data = {'spec': {'externalIPs': [publicIP]}}
rv = self.patch(name, namespace, data)
raise_if_failure(rv, "Couldn't patch service publicIP")
return rv
|
[
"def",
"update_publicIP",
"(",
"self",
",",
"service",
",",
"publicIP",
"=",
"None",
")",
":",
"name",
"=",
"service",
"[",
"'metadata'",
"]",
"[",
"'name'",
"]",
"namespace",
"=",
"service",
"[",
"'metadata'",
"]",
"[",
"'namespace'",
"]",
"data",
"=",
"{",
"'spec'",
":",
"{",
"'externalIPs'",
":",
"[",
"publicIP",
"]",
"}",
"}",
"rv",
"=",
"self",
".",
"patch",
"(",
"name",
",",
"namespace",
",",
"data",
")",
"raise_if_failure",
"(",
"rv",
",",
"\"Couldn't patch service publicIP\"",
")",
"return",
"rv"
] |
https://github.com/cloudlinux/kuberdock-platform/blob/8b3923c19755f3868e4142b62578d9b9857d2704/kubedock/kapi/lbpoll.py#L94-L105
|
|
datastax/python-driver
|
5fdb0061f56f53b9d8d8ad67b99110899653ad77
|
cassandra/policies.py
|
python
|
HostStateListener.on_remove
|
(self, host)
|
Called when a node is removed from the cluster.
|
Called when a node is removed from the cluster.
|
[
"Called",
"when",
"a",
"node",
"is",
"removed",
"from",
"the",
"cluster",
"."
] |
def on_remove(self, host):
""" Called when a node is removed from the cluster. """
raise NotImplementedError()
|
[
"def",
"on_remove",
"(",
"self",
",",
"host",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
https://github.com/datastax/python-driver/blob/5fdb0061f56f53b9d8d8ad67b99110899653ad77/cassandra/policies.py#L88-L90
|
||
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
|
cb692f527e4e819b6c228187c5702d990a180043
|
external/Scripting Engine/Xenotix Python Scripting Engine/packages/IronPython.StdLib.2.7.4/content/Lib/random.py
|
python
|
Random.gauss
|
(self, mu, sigma)
|
return mu + z*sigma
|
Gaussian distribution.
mu is the mean, and sigma is the standard deviation. This is
slightly faster than the normalvariate() function.
Not thread-safe without a lock around calls.
|
Gaussian distribution.
|
[
"Gaussian",
"distribution",
"."
] |
def gauss(self, mu, sigma):
"""Gaussian distribution.
mu is the mean, and sigma is the standard deviation. This is
slightly faster than the normalvariate() function.
Not thread-safe without a lock around calls.
"""
# When x and y are two variables from [0, 1), uniformly
# distributed, then
#
# cos(2*pi*x)*sqrt(-2*log(1-y))
# sin(2*pi*x)*sqrt(-2*log(1-y))
#
# are two *independent* variables with normal distribution
# (mu = 0, sigma = 1).
# (Lambert Meertens)
# (corrected version; bug discovered by Mike Miller, fixed by LM)
# Multithreading note: When two threads call this function
# simultaneously, it is possible that they will receive the
# same return value. The window is very small though. To
# avoid this, you have to use a lock around all calls. (I
# didn't want to slow this down in the serial case by using a
# lock here.)
random = self.random
z = self.gauss_next
self.gauss_next = None
if z is None:
x2pi = random() * TWOPI
g2rad = _sqrt(-2.0 * _log(1.0 - random()))
z = _cos(x2pi) * g2rad
self.gauss_next = _sin(x2pi) * g2rad
return mu + z*sigma
|
[
"def",
"gauss",
"(",
"self",
",",
"mu",
",",
"sigma",
")",
":",
"# When x and y are two variables from [0, 1), uniformly",
"# distributed, then",
"#",
"# cos(2*pi*x)*sqrt(-2*log(1-y))",
"# sin(2*pi*x)*sqrt(-2*log(1-y))",
"#",
"# are two *independent* variables with normal distribution",
"# (mu = 0, sigma = 1).",
"# (Lambert Meertens)",
"# (corrected version; bug discovered by Mike Miller, fixed by LM)",
"# Multithreading note: When two threads call this function",
"# simultaneously, it is possible that they will receive the",
"# same return value. The window is very small though. To",
"# avoid this, you have to use a lock around all calls. (I",
"# didn't want to slow this down in the serial case by using a",
"# lock here.)",
"random",
"=",
"self",
".",
"random",
"z",
"=",
"self",
".",
"gauss_next",
"self",
".",
"gauss_next",
"=",
"None",
"if",
"z",
"is",
"None",
":",
"x2pi",
"=",
"random",
"(",
")",
"*",
"TWOPI",
"g2rad",
"=",
"_sqrt",
"(",
"-",
"2.0",
"*",
"_log",
"(",
"1.0",
"-",
"random",
"(",
")",
")",
")",
"z",
"=",
"_cos",
"(",
"x2pi",
")",
"*",
"g2rad",
"self",
".",
"gauss_next",
"=",
"_sin",
"(",
"x2pi",
")",
"*",
"g2rad",
"return",
"mu",
"+",
"z",
"*",
"sigma"
] |
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/external/Scripting Engine/Xenotix Python Scripting Engine/packages/IronPython.StdLib.2.7.4/content/Lib/random.py#L560-L597
|
|
khanhnamle1994/natural-language-processing
|
01d450d5ac002b0156ef4cf93a07cb508c1bcdc5
|
assignment1/.env/lib/python2.7/site-packages/numpy/lib/type_check.py
|
python
|
iscomplex
|
(x)
|
return +res
|
Returns a bool array, where True if input element is complex.
What is tested is whether the input has a non-zero imaginary part, not if
the input type is complex.
Parameters
----------
x : array_like
Input array.
Returns
-------
out : ndarray of bools
Output array.
See Also
--------
isreal
iscomplexobj : Return True if x is a complex type or an array of complex
numbers.
Examples
--------
>>> np.iscomplex([1+1j, 1+0j, 4.5, 3, 2, 2j])
array([ True, False, False, False, False, True], dtype=bool)
|
Returns a bool array, where True if input element is complex.
|
[
"Returns",
"a",
"bool",
"array",
"where",
"True",
"if",
"input",
"element",
"is",
"complex",
"."
] |
def iscomplex(x):
"""
Returns a bool array, where True if input element is complex.
What is tested is whether the input has a non-zero imaginary part, not if
the input type is complex.
Parameters
----------
x : array_like
Input array.
Returns
-------
out : ndarray of bools
Output array.
See Also
--------
isreal
iscomplexobj : Return True if x is a complex type or an array of complex
numbers.
Examples
--------
>>> np.iscomplex([1+1j, 1+0j, 4.5, 3, 2, 2j])
array([ True, False, False, False, False, True], dtype=bool)
"""
ax = asanyarray(x)
if issubclass(ax.dtype.type, _nx.complexfloating):
return ax.imag != 0
res = zeros(ax.shape, bool)
return +res
|
[
"def",
"iscomplex",
"(",
"x",
")",
":",
"ax",
"=",
"asanyarray",
"(",
"x",
")",
"if",
"issubclass",
"(",
"ax",
".",
"dtype",
".",
"type",
",",
"_nx",
".",
"complexfloating",
")",
":",
"return",
"ax",
".",
"imag",
"!=",
"0",
"res",
"=",
"zeros",
"(",
"ax",
".",
"shape",
",",
"bool",
")",
"return",
"+",
"res"
] |
https://github.com/khanhnamle1994/natural-language-processing/blob/01d450d5ac002b0156ef4cf93a07cb508c1bcdc5/assignment1/.env/lib/python2.7/site-packages/numpy/lib/type_check.py#L172-L205
|
|
guillermooo/Vintageous
|
f958207009902052aed5fcac09745f1742648604
|
vi/variables.py
|
python
|
is_key_name
|
(name)
|
return name.lower() in _SPECIAL_STRINGS
|
[] |
def is_key_name(name):
return name.lower() in _SPECIAL_STRINGS
|
[
"def",
"is_key_name",
"(",
"name",
")",
":",
"return",
"name",
".",
"lower",
"(",
")",
"in",
"_SPECIAL_STRINGS"
] |
https://github.com/guillermooo/Vintageous/blob/f958207009902052aed5fcac09745f1742648604/vi/variables.py#L49-L50
|
|||
IdentityPython/pysaml2
|
6badb32d212257bd83ffcc816f9b625f68281b47
|
src/saml2/mongo_store.py
|
python
|
IdentMDB.find_local_id
|
(self, name_id)
|
return None
|
[] |
def find_local_id(self, name_id):
cnid = to_dict(name_id, MMODS, True)
for item in self.mdb.get(name_id=cnid):
return item[self.mdb.primary_key]
return None
|
[
"def",
"find_local_id",
"(",
"self",
",",
"name_id",
")",
":",
"cnid",
"=",
"to_dict",
"(",
"name_id",
",",
"MMODS",
",",
"True",
")",
"for",
"item",
"in",
"self",
".",
"mdb",
".",
"get",
"(",
"name_id",
"=",
"cnid",
")",
":",
"return",
"item",
"[",
"self",
".",
"mdb",
".",
"primary_key",
"]",
"return",
"None"
] |
https://github.com/IdentityPython/pysaml2/blob/6badb32d212257bd83ffcc816f9b625f68281b47/src/saml2/mongo_store.py#L163-L167
|
|||
IronLanguages/main
|
a949455434b1fda8c783289e897e78a9a0caabb5
|
External.LCA_RESTRICTED/Languages/IronPython/27/Lib/StringIO.py
|
python
|
StringIO.truncate
|
(self, size=None)
|
Truncate the file's size.
If the optional size argument is present, the file is truncated to
(at most) that size. The size defaults to the current position.
The current file position is not changed unless the position
is beyond the new file size.
If the specified size exceeds the file's current size, the
file remains unchanged.
|
Truncate the file's size.
|
[
"Truncate",
"the",
"file",
"s",
"size",
"."
] |
def truncate(self, size=None):
"""Truncate the file's size.
If the optional size argument is present, the file is truncated to
(at most) that size. The size defaults to the current position.
The current file position is not changed unless the position
is beyond the new file size.
If the specified size exceeds the file's current size, the
file remains unchanged.
"""
_complain_ifclosed(self.closed)
if size is None:
size = self.pos
elif size < 0:
raise IOError(EINVAL, "Negative size not allowed")
elif size < self.pos:
self.pos = size
self.buf = self.getvalue()[:size]
self.len = size
|
[
"def",
"truncate",
"(",
"self",
",",
"size",
"=",
"None",
")",
":",
"_complain_ifclosed",
"(",
"self",
".",
"closed",
")",
"if",
"size",
"is",
"None",
":",
"size",
"=",
"self",
".",
"pos",
"elif",
"size",
"<",
"0",
":",
"raise",
"IOError",
"(",
"EINVAL",
",",
"\"Negative size not allowed\"",
")",
"elif",
"size",
"<",
"self",
".",
"pos",
":",
"self",
".",
"pos",
"=",
"size",
"self",
".",
"buf",
"=",
"self",
".",
"getvalue",
"(",
")",
"[",
":",
"size",
"]",
"self",
".",
"len",
"=",
"size"
] |
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/IronPython/27/Lib/StringIO.py#L187-L206
|
||
playframework/play1
|
0ecac3bc2421ae2dbec27a368bf671eda1c9cba5
|
python/Lib/telnetlib.py
|
python
|
Telnet.mt_interact
|
(self)
|
Multithreaded version of interact().
|
Multithreaded version of interact().
|
[
"Multithreaded",
"version",
"of",
"interact",
"()",
"."
] |
def mt_interact(self):
"""Multithreaded version of interact()."""
import thread
thread.start_new_thread(self.listener, ())
while 1:
line = sys.stdin.readline()
if not line:
break
self.write(line)
|
[
"def",
"mt_interact",
"(",
"self",
")",
":",
"import",
"thread",
"thread",
".",
"start_new_thread",
"(",
"self",
".",
"listener",
",",
"(",
")",
")",
"while",
"1",
":",
"line",
"=",
"sys",
".",
"stdin",
".",
"readline",
"(",
")",
"if",
"not",
"line",
":",
"break",
"self",
".",
"write",
"(",
"line",
")"
] |
https://github.com/playframework/play1/blob/0ecac3bc2421ae2dbec27a368bf671eda1c9cba5/python/Lib/telnetlib.py#L607-L615
|
||
Blazemeter/taurus
|
6e36b20397cf3e730e181cfebde0c8f19eb31fed
|
bzt/modules/_locustio.py
|
python
|
WorkersReader.point_from_locust
|
(timestamp, sid, data)
|
return point
|
:type timestamp: str
:type sid: str
:type data: dict
:rtype: DataPoint
|
:type timestamp: str
:type sid: str
:type data: dict
:rtype: DataPoint
|
[
":",
"type",
"timestamp",
":",
"str",
":",
"type",
"sid",
":",
"str",
":",
"type",
"data",
":",
"dict",
":",
"rtype",
":",
"DataPoint"
] |
def point_from_locust(timestamp, sid, data):
"""
:type timestamp: str
:type sid: str
:type data: dict
:rtype: DataPoint
"""
point = DataPoint(int(timestamp))
point[DataPoint.SOURCE_ID] = sid
overall = KPISet()
for item in data['stats']:
if timestamp not in item['num_reqs_per_sec']:
continue
kpiset = KPISet()
kpiset[KPISet.SAMPLE_COUNT] = item['num_reqs_per_sec'][timestamp]
kpiset[KPISet.CONCURRENCY] = data['user_count']
kpiset[KPISet.BYTE_COUNT] = item['total_content_length']
if item['num_requests']:
avg_rt = (item['total_response_time'] / 1000.0) / item['num_requests']
kpiset.sum_rt = item['num_reqs_per_sec'][timestamp] * avg_rt
for err in data['errors'].values():
if err['name'] == item['name']:
new_err = KPISet.error_item_skel(err['error'], None, err['occurences'], KPISet.ERRTYPE_ERROR,
Counter(), None)
KPISet.inc_list(kpiset[KPISet.ERRORS], ("msg", err['error']), new_err)
kpiset[KPISet.FAILURES] += err['occurences']
kpiset[KPISet.SUCCESSES] = kpiset[KPISet.SAMPLE_COUNT] - kpiset[KPISet.FAILURES]
point[DataPoint.CURRENT][item['name']] = kpiset
overall.merge_kpis(kpiset, sid)
point[DataPoint.CURRENT][''] = overall
point.recalculate()
return point
|
[
"def",
"point_from_locust",
"(",
"timestamp",
",",
"sid",
",",
"data",
")",
":",
"point",
"=",
"DataPoint",
"(",
"int",
"(",
"timestamp",
")",
")",
"point",
"[",
"DataPoint",
".",
"SOURCE_ID",
"]",
"=",
"sid",
"overall",
"=",
"KPISet",
"(",
")",
"for",
"item",
"in",
"data",
"[",
"'stats'",
"]",
":",
"if",
"timestamp",
"not",
"in",
"item",
"[",
"'num_reqs_per_sec'",
"]",
":",
"continue",
"kpiset",
"=",
"KPISet",
"(",
")",
"kpiset",
"[",
"KPISet",
".",
"SAMPLE_COUNT",
"]",
"=",
"item",
"[",
"'num_reqs_per_sec'",
"]",
"[",
"timestamp",
"]",
"kpiset",
"[",
"KPISet",
".",
"CONCURRENCY",
"]",
"=",
"data",
"[",
"'user_count'",
"]",
"kpiset",
"[",
"KPISet",
".",
"BYTE_COUNT",
"]",
"=",
"item",
"[",
"'total_content_length'",
"]",
"if",
"item",
"[",
"'num_requests'",
"]",
":",
"avg_rt",
"=",
"(",
"item",
"[",
"'total_response_time'",
"]",
"/",
"1000.0",
")",
"/",
"item",
"[",
"'num_requests'",
"]",
"kpiset",
".",
"sum_rt",
"=",
"item",
"[",
"'num_reqs_per_sec'",
"]",
"[",
"timestamp",
"]",
"*",
"avg_rt",
"for",
"err",
"in",
"data",
"[",
"'errors'",
"]",
".",
"values",
"(",
")",
":",
"if",
"err",
"[",
"'name'",
"]",
"==",
"item",
"[",
"'name'",
"]",
":",
"new_err",
"=",
"KPISet",
".",
"error_item_skel",
"(",
"err",
"[",
"'error'",
"]",
",",
"None",
",",
"err",
"[",
"'occurences'",
"]",
",",
"KPISet",
".",
"ERRTYPE_ERROR",
",",
"Counter",
"(",
")",
",",
"None",
")",
"KPISet",
".",
"inc_list",
"(",
"kpiset",
"[",
"KPISet",
".",
"ERRORS",
"]",
",",
"(",
"\"msg\"",
",",
"err",
"[",
"'error'",
"]",
")",
",",
"new_err",
")",
"kpiset",
"[",
"KPISet",
".",
"FAILURES",
"]",
"+=",
"err",
"[",
"'occurences'",
"]",
"kpiset",
"[",
"KPISet",
".",
"SUCCESSES",
"]",
"=",
"kpiset",
"[",
"KPISet",
".",
"SAMPLE_COUNT",
"]",
"-",
"kpiset",
"[",
"KPISet",
".",
"FAILURES",
"]",
"point",
"[",
"DataPoint",
".",
"CURRENT",
"]",
"[",
"item",
"[",
"'name'",
"]",
"]",
"=",
"kpiset",
"overall",
".",
"merge_kpis",
"(",
"kpiset",
",",
"sid",
")",
"point",
"[",
"DataPoint",
".",
"CURRENT",
"]",
"[",
"''",
"]",
"=",
"overall",
"point",
".",
"recalculate",
"(",
")",
"return",
"point"
] |
https://github.com/Blazemeter/taurus/blob/6e36b20397cf3e730e181cfebde0c8f19eb31fed/bzt/modules/_locustio.py#L265-L300
|
|
NISH1001/playx
|
9050f0c5f9fef7b9c9b14a7f26a055684e260d4c
|
playx/playlist/soundcloud.py
|
python
|
get_data
|
(URL, pl_start, pl_end)
|
return sound_cloud_playlist.list_content_tuple, sound_cloud_playlist.set_name
|
Generic function. Should be called only when
it is checked if the URL is a spotify playlist.
Returns a tuple containing the songs and name of
the playlist.
|
Generic function. Should be called only when
it is checked if the URL is a spotify playlist.
|
[
"Generic",
"function",
".",
"Should",
"be",
"called",
"only",
"when",
"it",
"is",
"checked",
"if",
"the",
"URL",
"is",
"a",
"spotify",
"playlist",
"."
] |
def get_data(URL, pl_start, pl_end):
"""Generic function. Should be called only when
it is checked if the URL is a spotify playlist.
Returns a tuple containing the songs and name of
the playlist.
"""
logger.debug("Extracting Playlist Contents")
sound_cloud_playlist = SoundCloudPlaylistExtractor(URL, pl_start, pl_end)
sound_cloud_playlist.get_tracks()
return sound_cloud_playlist.list_content_tuple, sound_cloud_playlist.set_name
|
[
"def",
"get_data",
"(",
"URL",
",",
"pl_start",
",",
"pl_end",
")",
":",
"logger",
".",
"debug",
"(",
"\"Extracting Playlist Contents\"",
")",
"sound_cloud_playlist",
"=",
"SoundCloudPlaylistExtractor",
"(",
"URL",
",",
"pl_start",
",",
"pl_end",
")",
"sound_cloud_playlist",
".",
"get_tracks",
"(",
")",
"return",
"sound_cloud_playlist",
".",
"list_content_tuple",
",",
"sound_cloud_playlist",
".",
"set_name"
] |
https://github.com/NISH1001/playx/blob/9050f0c5f9fef7b9c9b14a7f26a055684e260d4c/playx/playlist/soundcloud.py#L73-L83
|
|
TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials
|
5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e
|
deep-learning/fastai-docs/fastai_docs-master/dev_nb/nb_004b.py
|
python
|
bn2float
|
(module:nn.Module)
|
return module
|
If a module is batchnorm don't use half precision
|
If a module is batchnorm don't use half precision
|
[
"If",
"a",
"module",
"is",
"batchnorm",
"don",
"t",
"use",
"half",
"precision"
] |
def bn2float(module:nn.Module)->nn.Module:
"If a module is batchnorm don't use half precision"
if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.float()
for child in module.children(): bn2float(child)
return module
|
[
"def",
"bn2float",
"(",
"module",
":",
"nn",
".",
"Module",
")",
"->",
"nn",
".",
"Module",
":",
"if",
"isinstance",
"(",
"module",
",",
"torch",
".",
"nn",
".",
"modules",
".",
"batchnorm",
".",
"_BatchNorm",
")",
":",
"module",
".",
"float",
"(",
")",
"for",
"child",
"in",
"module",
".",
"children",
"(",
")",
":",
"bn2float",
"(",
"child",
")",
"return",
"module"
] |
https://github.com/TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials/blob/5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e/deep-learning/fastai-docs/fastai_docs-master/dev_nb/nb_004b.py#L20-L24
|
|
replit-archive/empythoned
|
977ec10ced29a3541a4973dc2b59910805695752
|
dist/lib/python2.7/lib2to3/refactor.py
|
python
|
_get_head_types
|
(pat)
|
Accepts a pytree Pattern Node and returns a set
of the pattern types which will match first.
|
Accepts a pytree Pattern Node and returns a set
of the pattern types which will match first.
|
[
"Accepts",
"a",
"pytree",
"Pattern",
"Node",
"and",
"returns",
"a",
"set",
"of",
"the",
"pattern",
"types",
"which",
"will",
"match",
"first",
"."
] |
def _get_head_types(pat):
""" Accepts a pytree Pattern Node and returns a set
of the pattern types which will match first. """
if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)):
# NodePatters must either have no type and no content
# or a type and content -- so they don't get any farther
# Always return leafs
if pat.type is None:
raise _EveryNode
return set([pat.type])
if isinstance(pat, pytree.NegatedPattern):
if pat.content:
return _get_head_types(pat.content)
raise _EveryNode # Negated Patterns don't have a type
if isinstance(pat, pytree.WildcardPattern):
# Recurse on each node in content
r = set()
for p in pat.content:
for x in p:
r.update(_get_head_types(x))
return r
raise Exception("Oh no! I don't understand pattern %s" %(pat))
|
[
"def",
"_get_head_types",
"(",
"pat",
")",
":",
"if",
"isinstance",
"(",
"pat",
",",
"(",
"pytree",
".",
"NodePattern",
",",
"pytree",
".",
"LeafPattern",
")",
")",
":",
"# NodePatters must either have no type and no content",
"# or a type and content -- so they don't get any farther",
"# Always return leafs",
"if",
"pat",
".",
"type",
"is",
"None",
":",
"raise",
"_EveryNode",
"return",
"set",
"(",
"[",
"pat",
".",
"type",
"]",
")",
"if",
"isinstance",
"(",
"pat",
",",
"pytree",
".",
"NegatedPattern",
")",
":",
"if",
"pat",
".",
"content",
":",
"return",
"_get_head_types",
"(",
"pat",
".",
"content",
")",
"raise",
"_EveryNode",
"# Negated Patterns don't have a type",
"if",
"isinstance",
"(",
"pat",
",",
"pytree",
".",
"WildcardPattern",
")",
":",
"# Recurse on each node in content",
"r",
"=",
"set",
"(",
")",
"for",
"p",
"in",
"pat",
".",
"content",
":",
"for",
"x",
"in",
"p",
":",
"r",
".",
"update",
"(",
"_get_head_types",
"(",
"x",
")",
")",
"return",
"r",
"raise",
"Exception",
"(",
"\"Oh no! I don't understand pattern %s\"",
"%",
"(",
"pat",
")",
")"
] |
https://github.com/replit-archive/empythoned/blob/977ec10ced29a3541a4973dc2b59910805695752/dist/lib/python2.7/lib2to3/refactor.py#L50-L75
|
||
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/docutils/utils/math/math2html.py
|
python
|
Position.identifier
|
(self)
|
return 'Error'
|
Return an identifier for the current position.
|
Return an identifier for the current position.
|
[
"Return",
"an",
"identifier",
"for",
"the",
"current",
"position",
"."
] |
def identifier(self):
"Return an identifier for the current position."
Trace.error('Unimplemented identifier()')
return 'Error'
|
[
"def",
"identifier",
"(",
"self",
")",
":",
"Trace",
".",
"error",
"(",
"'Unimplemented identifier()'",
")",
"return",
"'Error'"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/docutils/utils/math/math2html.py#L2032-L2035
|
|
frescobaldi/frescobaldi
|
301cc977fc4ba7caa3df9e4bf905212ad5d06912
|
frescobaldi_app/highlight2html.py
|
python
|
html
|
(cursor, scheme='editor', inline=False, number_lines=False, full_html=True,
wrap_tag="pre", wrap_attrib="id", wrap_attrib_name="document")
|
return w.html(cursor)
|
Return a HTML document with the syntax-highlighted region.
The tokens are marked with <span> tags. The cursor is a
ly.document.Cursor instance. The specified text formats scheme is used
(by default 'editor'). If inline is True, the span tags have inline
style attributes. If inline is False, the span tags have class
attributes and a stylesheet is included.
Set number_lines to True to add line numbers.
|
Return a HTML document with the syntax-highlighted region.
|
[
"Return",
"a",
"HTML",
"document",
"with",
"the",
"syntax",
"-",
"highlighted",
"region",
"."
] |
def html(cursor, scheme='editor', inline=False, number_lines=False, full_html=True,
wrap_tag="pre", wrap_attrib="id", wrap_attrib_name="document"):
"""Return a HTML document with the syntax-highlighted region.
The tokens are marked with <span> tags. The cursor is a
ly.document.Cursor instance. The specified text formats scheme is used
(by default 'editor'). If inline is True, the span tags have inline
style attributes. If inline is False, the span tags have class
attributes and a stylesheet is included.
Set number_lines to True to add line numbers.
"""
data = textformats.formatData(scheme) # the current highlighting scheme
w = ly.colorize.HtmlWriter()
w.set_wrapper_tag(wrap_tag)
w.set_wrapper_attribute(wrap_attrib)
w.document_id = wrap_attrib_name
w.inline_style = inline
w.number_lines = number_lines
w.full_html = full_html
w.fgcolor = data.baseColors['text'].name()
w.bgcolor = data.baseColors['background'].name()
w.css_scheme = data.css_scheme()
return w.html(cursor)
|
[
"def",
"html",
"(",
"cursor",
",",
"scheme",
"=",
"'editor'",
",",
"inline",
"=",
"False",
",",
"number_lines",
"=",
"False",
",",
"full_html",
"=",
"True",
",",
"wrap_tag",
"=",
"\"pre\"",
",",
"wrap_attrib",
"=",
"\"id\"",
",",
"wrap_attrib_name",
"=",
"\"document\"",
")",
":",
"data",
"=",
"textformats",
".",
"formatData",
"(",
"scheme",
")",
"# the current highlighting scheme",
"w",
"=",
"ly",
".",
"colorize",
".",
"HtmlWriter",
"(",
")",
"w",
".",
"set_wrapper_tag",
"(",
"wrap_tag",
")",
"w",
".",
"set_wrapper_attribute",
"(",
"wrap_attrib",
")",
"w",
".",
"document_id",
"=",
"wrap_attrib_name",
"w",
".",
"inline_style",
"=",
"inline",
"w",
".",
"number_lines",
"=",
"number_lines",
"w",
".",
"full_html",
"=",
"full_html",
"w",
".",
"fgcolor",
"=",
"data",
".",
"baseColors",
"[",
"'text'",
"]",
".",
"name",
"(",
")",
"w",
".",
"bgcolor",
"=",
"data",
".",
"baseColors",
"[",
"'background'",
"]",
".",
"name",
"(",
")",
"w",
".",
"css_scheme",
"=",
"data",
".",
"css_scheme",
"(",
")",
"return",
"w",
".",
"html",
"(",
"cursor",
")"
] |
https://github.com/frescobaldi/frescobaldi/blob/301cc977fc4ba7caa3df9e4bf905212ad5d06912/frescobaldi_app/highlight2html.py#L49-L73
|
|
abhik/pebl
|
5e7d694eb1e4f90e0f1410000b958ba62698a268
|
src/pebl/util.py
|
python
|
logsum
|
(lst)
|
return reduce(logadd, lst) + maxval
|
Sums a list of log values, ensuring accuracy.
|
Sums a list of log values, ensuring accuracy.
|
[
"Sums",
"a",
"list",
"of",
"log",
"values",
"ensuring",
"accuracy",
"."
] |
def logsum(lst):
"""Sums a list of log values, ensuring accuracy."""
if not isinstance(lst, N.ndarray):
lst = N.array(lst)
maxval = lst.max()
lst = lst - maxval
return reduce(logadd, lst) + maxval
|
[
"def",
"logsum",
"(",
"lst",
")",
":",
"if",
"not",
"isinstance",
"(",
"lst",
",",
"N",
".",
"ndarray",
")",
":",
"lst",
"=",
"N",
".",
"array",
"(",
"lst",
")",
"maxval",
"=",
"lst",
".",
"max",
"(",
")",
"lst",
"=",
"lst",
"-",
"maxval",
"return",
"reduce",
"(",
"logadd",
",",
"lst",
")",
"+",
"maxval"
] |
https://github.com/abhik/pebl/blob/5e7d694eb1e4f90e0f1410000b958ba62698a268/src/pebl/util.py#L89-L97
|
|
rferrazz/pyqt4topyqt5
|
c0630e1a3e1e2884d8c56127812c35854dbdf301
|
pyqt4topyqt5/__init__.py
|
python
|
Tools.read_python_source
|
(self, filename)
|
return self.get_content(filename)
|
Return the source code.
Args:
filename -- the file name
Returns:
list(lines)
|
Return the source code.
|
[
"Return",
"the",
"source",
"code",
"."
] |
def read_python_source(self, filename):
"""Return the source code.
Args:
filename -- the file name
Returns:
list(lines)
"""
self.encoding = self.get_encoding(filename)
if self.encoding is None:
return None
return self.get_content(filename)
|
[
"def",
"read_python_source",
"(",
"self",
",",
"filename",
")",
":",
"self",
".",
"encoding",
"=",
"self",
".",
"get_encoding",
"(",
"filename",
")",
"if",
"self",
".",
"encoding",
"is",
"None",
":",
"return",
"None",
"return",
"self",
".",
"get_content",
"(",
"filename",
")"
] |
https://github.com/rferrazz/pyqt4topyqt5/blob/c0630e1a3e1e2884d8c56127812c35854dbdf301/pyqt4topyqt5/__init__.py#L2226-L2239
|
|
iopsgroup/imoocc
|
de810eb6d4c1697b7139305925a5b0ba21225f3f
|
scanhosts/modules/paramiko1_9/proxy.py
|
python
|
ProxyCommand.send
|
(self, content)
|
return len(content)
|
Write the content received from the SSH client to the standard
input of the forked command.
@param content: string to be sent to the forked command
@type content: str
|
Write the content received from the SSH client to the standard
input of the forked command.
|
[
"Write",
"the",
"content",
"received",
"from",
"the",
"SSH",
"client",
"to",
"the",
"standard",
"input",
"of",
"the",
"forked",
"command",
"."
] |
def send(self, content):
"""
Write the content received from the SSH client to the standard
input of the forked command.
@param content: string to be sent to the forked command
@type content: str
"""
try:
self.process.stdin.write(content)
except IOError, e:
# There was a problem with the child process. It probably
# died and we can't proceed. The best option here is to
# raise an exception informing the user that the informed
# ProxyCommand is not working.
raise BadProxyCommand(' '.join(self.cmd), e.strerror)
return len(content)
|
[
"def",
"send",
"(",
"self",
",",
"content",
")",
":",
"try",
":",
"self",
".",
"process",
".",
"stdin",
".",
"write",
"(",
"content",
")",
"except",
"IOError",
",",
"e",
":",
"# There was a problem with the child process. It probably",
"# died and we can't proceed. The best option here is to",
"# raise an exception informing the user that the informed",
"# ProxyCommand is not working.",
"raise",
"BadProxyCommand",
"(",
"' '",
".",
"join",
"(",
"self",
".",
"cmd",
")",
",",
"e",
".",
"strerror",
")",
"return",
"len",
"(",
"content",
")"
] |
https://github.com/iopsgroup/imoocc/blob/de810eb6d4c1697b7139305925a5b0ba21225f3f/scanhosts/modules/paramiko1_9/proxy.py#L52-L68
|
|
danecjensen/subscribely
|
4d6ac60358b5fe26f0c01be68f1ba063df3b1ea0
|
src/pkg_resources.py
|
python
|
Distribution.__getattr__
|
(self,attr)
|
return getattr(self._provider, attr)
|
Delegate all unrecognized public attributes to .metadata provider
|
Delegate all unrecognized public attributes to .metadata provider
|
[
"Delegate",
"all",
"unrecognized",
"public",
"attributes",
"to",
".",
"metadata",
"provider"
] |
def __getattr__(self,attr):
"""Delegate all unrecognized public attributes to .metadata provider"""
if attr.startswith('_'):
raise AttributeError,attr
return getattr(self._provider, attr)
|
[
"def",
"__getattr__",
"(",
"self",
",",
"attr",
")",
":",
"if",
"attr",
".",
"startswith",
"(",
"'_'",
")",
":",
"raise",
"AttributeError",
",",
"attr",
"return",
"getattr",
"(",
"self",
".",
"_provider",
",",
"attr",
")"
] |
https://github.com/danecjensen/subscribely/blob/4d6ac60358b5fe26f0c01be68f1ba063df3b1ea0/src/pkg_resources.py#L2206-L2210
|
|
facebookresearch/mmf
|
fb6fe390287e1da12c3bd28d4ab43c5f7dcdfc9f
|
mmf/trainers/core/training_loop.py
|
python
|
TrainerTrainingLoopMixin.run_training_batch
|
(self, batch: Dict[str, Tensor], loss_divisor: int)
|
return report
|
[] |
def run_training_batch(self, batch: Dict[str, Tensor], loss_divisor: int) -> None:
report = self._forward(batch)
if self.training_config.exit_on_nan_losses:
self._check_nan_losses(report)
loss = extract_loss(report, loss_divisor)
self._backward(loss)
return report
|
[
"def",
"run_training_batch",
"(",
"self",
",",
"batch",
":",
"Dict",
"[",
"str",
",",
"Tensor",
"]",
",",
"loss_divisor",
":",
"int",
")",
"->",
"None",
":",
"report",
"=",
"self",
".",
"_forward",
"(",
"batch",
")",
"if",
"self",
".",
"training_config",
".",
"exit_on_nan_losses",
":",
"self",
".",
"_check_nan_losses",
"(",
"report",
")",
"loss",
"=",
"extract_loss",
"(",
"report",
",",
"loss_divisor",
")",
"self",
".",
"_backward",
"(",
"loss",
")",
"return",
"report"
] |
https://github.com/facebookresearch/mmf/blob/fb6fe390287e1da12c3bd28d4ab43c5f7dcdfc9f/mmf/trainers/core/training_loop.py#L165-L171
|
|||
khanhnamle1994/natural-language-processing
|
01d450d5ac002b0156ef4cf93a07cb508c1bcdc5
|
assignment2/q2_NER.py
|
python
|
test_NER
|
()
|
Test NER model implementation.
You can use this function to test your implementation of the Named Entity
Recognition network. When debugging, set max_epochs in the Config object to 1
so you can rapidly iterate.
|
Test NER model implementation.
|
[
"Test",
"NER",
"model",
"implementation",
"."
] |
def test_NER():
"""Test NER model implementation.
You can use this function to test your implementation of the Named Entity
Recognition network. When debugging, set max_epochs in the Config object to 1
so you can rapidly iterate.
"""
config = Config()
with tf.Graph().as_default():
model = NERModel(config)
init = tf.initialize_all_variables()
saver = tf.train.Saver()
with tf.Session() as session:
best_val_loss = float('inf')
best_val_epoch = 0
session.run(init)
for epoch in xrange(config.max_epochs):
print 'Epoch {}'.format(epoch)
start = time.time()
###
train_loss, train_acc = model.run_epoch(session, model.X_train,
model.y_train)
val_loss, predictions = model.predict(session, model.X_dev, model.y_dev)
print 'Training loss: {}'.format(train_loss)
print 'Training acc: {}'.format(train_acc)
print 'Validation loss: {}'.format(val_loss)
if val_loss < best_val_loss:
best_val_loss = val_loss
best_val_epoch = epoch
if not os.path.exists("./weights"):
os.makedirs("./weights")
saver.save(session, './weights/ner.weights')
if epoch - best_val_epoch > config.early_stopping:
break
###
confusion = calculate_confusion(config, predictions, model.y_dev)
print_confusion(confusion, model.num_to_tag)
print 'Total time: {}'.format(time.time() - start)
saver.restore(session, './weights/ner.weights')
print 'Test'
print '=-=-='
print 'Writing predictions to q2_test.predicted'
_, predictions = model.predict(session, model.X_test, model.y_test)
save_predictions(predictions, "q2_test.predicted")
|
[
"def",
"test_NER",
"(",
")",
":",
"config",
"=",
"Config",
"(",
")",
"with",
"tf",
".",
"Graph",
"(",
")",
".",
"as_default",
"(",
")",
":",
"model",
"=",
"NERModel",
"(",
"config",
")",
"init",
"=",
"tf",
".",
"initialize_all_variables",
"(",
")",
"saver",
"=",
"tf",
".",
"train",
".",
"Saver",
"(",
")",
"with",
"tf",
".",
"Session",
"(",
")",
"as",
"session",
":",
"best_val_loss",
"=",
"float",
"(",
"'inf'",
")",
"best_val_epoch",
"=",
"0",
"session",
".",
"run",
"(",
"init",
")",
"for",
"epoch",
"in",
"xrange",
"(",
"config",
".",
"max_epochs",
")",
":",
"print",
"'Epoch {}'",
".",
"format",
"(",
"epoch",
")",
"start",
"=",
"time",
".",
"time",
"(",
")",
"###",
"train_loss",
",",
"train_acc",
"=",
"model",
".",
"run_epoch",
"(",
"session",
",",
"model",
".",
"X_train",
",",
"model",
".",
"y_train",
")",
"val_loss",
",",
"predictions",
"=",
"model",
".",
"predict",
"(",
"session",
",",
"model",
".",
"X_dev",
",",
"model",
".",
"y_dev",
")",
"print",
"'Training loss: {}'",
".",
"format",
"(",
"train_loss",
")",
"print",
"'Training acc: {}'",
".",
"format",
"(",
"train_acc",
")",
"print",
"'Validation loss: {}'",
".",
"format",
"(",
"val_loss",
")",
"if",
"val_loss",
"<",
"best_val_loss",
":",
"best_val_loss",
"=",
"val_loss",
"best_val_epoch",
"=",
"epoch",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"\"./weights\"",
")",
":",
"os",
".",
"makedirs",
"(",
"\"./weights\"",
")",
"saver",
".",
"save",
"(",
"session",
",",
"'./weights/ner.weights'",
")",
"if",
"epoch",
"-",
"best_val_epoch",
">",
"config",
".",
"early_stopping",
":",
"break",
"###",
"confusion",
"=",
"calculate_confusion",
"(",
"config",
",",
"predictions",
",",
"model",
".",
"y_dev",
")",
"print_confusion",
"(",
"confusion",
",",
"model",
".",
"num_to_tag",
")",
"print",
"'Total time: {}'",
".",
"format",
"(",
"time",
".",
"time",
"(",
")",
"-",
"start",
")",
"saver",
".",
"restore",
"(",
"session",
",",
"'./weights/ner.weights'",
")",
"print",
"'Test'",
"print",
"'=-=-='",
"print",
"'Writing predictions to q2_test.predicted'",
"_",
",",
"predictions",
"=",
"model",
".",
"predict",
"(",
"session",
",",
"model",
".",
"X_test",
",",
"model",
".",
"y_test",
")",
"save_predictions",
"(",
"predictions",
",",
"\"q2_test.predicted\"",
")"
] |
https://github.com/khanhnamle1994/natural-language-processing/blob/01d450d5ac002b0156ef4cf93a07cb508c1bcdc5/assignment2/q2_NER.py#L343-L391
|
||
XX-net/XX-Net
|
a9898cfcf0084195fb7e69b6bc834e59aecdf14f
|
python3.8.2/Lib/locale.py
|
python
|
format_string
|
(f, val, grouping=False, monetary=False)
|
return new_f % val
|
Formats a string in the same way that the % formatting would use,
but takes the current locale into account.
Grouping is applied if the third parameter is true.
Conversion uses monetary thousands separator and grouping strings if
forth parameter monetary is true.
|
Formats a string in the same way that the % formatting would use,
but takes the current locale into account.
|
[
"Formats",
"a",
"string",
"in",
"the",
"same",
"way",
"that",
"the",
"%",
"formatting",
"would",
"use",
"but",
"takes",
"the",
"current",
"locale",
"into",
"account",
"."
] |
def format_string(f, val, grouping=False, monetary=False):
"""Formats a string in the same way that the % formatting would use,
but takes the current locale into account.
Grouping is applied if the third parameter is true.
Conversion uses monetary thousands separator and grouping strings if
forth parameter monetary is true."""
percents = list(_percent_re.finditer(f))
new_f = _percent_re.sub('%s', f)
if isinstance(val, _collections_abc.Mapping):
new_val = []
for perc in percents:
if perc.group()[-1]=='%':
new_val.append('%')
else:
new_val.append(_format(perc.group(), val, grouping, monetary))
else:
if not isinstance(val, tuple):
val = (val,)
new_val = []
i = 0
for perc in percents:
if perc.group()[-1]=='%':
new_val.append('%')
else:
starcount = perc.group('modifiers').count('*')
new_val.append(_format(perc.group(),
val[i],
grouping,
monetary,
*val[i+1:i+1+starcount]))
i += (1 + starcount)
val = tuple(new_val)
return new_f % val
|
[
"def",
"format_string",
"(",
"f",
",",
"val",
",",
"grouping",
"=",
"False",
",",
"monetary",
"=",
"False",
")",
":",
"percents",
"=",
"list",
"(",
"_percent_re",
".",
"finditer",
"(",
"f",
")",
")",
"new_f",
"=",
"_percent_re",
".",
"sub",
"(",
"'%s'",
",",
"f",
")",
"if",
"isinstance",
"(",
"val",
",",
"_collections_abc",
".",
"Mapping",
")",
":",
"new_val",
"=",
"[",
"]",
"for",
"perc",
"in",
"percents",
":",
"if",
"perc",
".",
"group",
"(",
")",
"[",
"-",
"1",
"]",
"==",
"'%'",
":",
"new_val",
".",
"append",
"(",
"'%'",
")",
"else",
":",
"new_val",
".",
"append",
"(",
"_format",
"(",
"perc",
".",
"group",
"(",
")",
",",
"val",
",",
"grouping",
",",
"monetary",
")",
")",
"else",
":",
"if",
"not",
"isinstance",
"(",
"val",
",",
"tuple",
")",
":",
"val",
"=",
"(",
"val",
",",
")",
"new_val",
"=",
"[",
"]",
"i",
"=",
"0",
"for",
"perc",
"in",
"percents",
":",
"if",
"perc",
".",
"group",
"(",
")",
"[",
"-",
"1",
"]",
"==",
"'%'",
":",
"new_val",
".",
"append",
"(",
"'%'",
")",
"else",
":",
"starcount",
"=",
"perc",
".",
"group",
"(",
"'modifiers'",
")",
".",
"count",
"(",
"'*'",
")",
"new_val",
".",
"append",
"(",
"_format",
"(",
"perc",
".",
"group",
"(",
")",
",",
"val",
"[",
"i",
"]",
",",
"grouping",
",",
"monetary",
",",
"*",
"val",
"[",
"i",
"+",
"1",
":",
"i",
"+",
"1",
"+",
"starcount",
"]",
")",
")",
"i",
"+=",
"(",
"1",
"+",
"starcount",
")",
"val",
"=",
"tuple",
"(",
"new_val",
")",
"return",
"new_f",
"%",
"val"
] |
https://github.com/XX-net/XX-Net/blob/a9898cfcf0084195fb7e69b6bc834e59aecdf14f/python3.8.2/Lib/locale.py#L207-L242
|
|
tensorflow/tensor2tensor
|
2a33b152d7835af66a6d20afe7961751047e28dd
|
tensor2tensor/rl/gym_utils.py
|
python
|
ActionDiscretizeWrapper._discretize_env
|
(self, env)
|
return action_map
|
Generates a discrete bounded spec and a linspace for the given limits.
Args:
env: An array to discretize.
Returns:
Tuple with the discrete_spec along with a list of lists mapping actions.
Raises:
ValueError: If not all limits value are >=2 or maximum or minimum of boxes
is equal to +- infinity.
|
Generates a discrete bounded spec and a linspace for the given limits.
|
[
"Generates",
"a",
"discrete",
"bounded",
"spec",
"and",
"a",
"linspace",
"for",
"the",
"given",
"limits",
"."
] |
def _discretize_env(self, env):
"""Generates a discrete bounded spec and a linspace for the given limits.
Args:
env: An array to discretize.
Returns:
Tuple with the discrete_spec along with a list of lists mapping actions.
Raises:
ValueError: If not all limits value are >=2 or maximum or minimum of boxes
is equal to +- infinity.
"""
if not np.all(self._num_actions >= 2):
raise ValueError("num_actions should all be at least size 2.")
if (math.isinf(np.min(env.action_space.low)) or
math.isinf(np.max(env.action_space.high))):
raise ValueError(
"""Minimum of boxes is {} and maximum of boxes is {},
but we expect that finite values are provided.""".
format(np.min(env.action_space.low),
np.max(env.action_space.high)))
limits = np.broadcast_to(self._num_actions,
env.action_space.shape)
minimum = np.broadcast_to(np.min(env.action_space.low),
env.action_space.shape)
maximum = np.broadcast_to(np.max(env.action_space.high),
env.action_space.shape)
action_map = [
np.linspace(env_min, env_max, num=n_actions)
for env_min, env_max, n_actions in zip(
np.nditer(minimum), np.nditer(maximum), np.nditer(limits))
]
return action_map
|
[
"def",
"_discretize_env",
"(",
"self",
",",
"env",
")",
":",
"if",
"not",
"np",
".",
"all",
"(",
"self",
".",
"_num_actions",
">=",
"2",
")",
":",
"raise",
"ValueError",
"(",
"\"num_actions should all be at least size 2.\"",
")",
"if",
"(",
"math",
".",
"isinf",
"(",
"np",
".",
"min",
"(",
"env",
".",
"action_space",
".",
"low",
")",
")",
"or",
"math",
".",
"isinf",
"(",
"np",
".",
"max",
"(",
"env",
".",
"action_space",
".",
"high",
")",
")",
")",
":",
"raise",
"ValueError",
"(",
"\"\"\"Minimum of boxes is {} and maximum of boxes is {},\n but we expect that finite values are provided.\"\"\"",
".",
"format",
"(",
"np",
".",
"min",
"(",
"env",
".",
"action_space",
".",
"low",
")",
",",
"np",
".",
"max",
"(",
"env",
".",
"action_space",
".",
"high",
")",
")",
")",
"limits",
"=",
"np",
".",
"broadcast_to",
"(",
"self",
".",
"_num_actions",
",",
"env",
".",
"action_space",
".",
"shape",
")",
"minimum",
"=",
"np",
".",
"broadcast_to",
"(",
"np",
".",
"min",
"(",
"env",
".",
"action_space",
".",
"low",
")",
",",
"env",
".",
"action_space",
".",
"shape",
")",
"maximum",
"=",
"np",
".",
"broadcast_to",
"(",
"np",
".",
"max",
"(",
"env",
".",
"action_space",
".",
"high",
")",
",",
"env",
".",
"action_space",
".",
"shape",
")",
"action_map",
"=",
"[",
"np",
".",
"linspace",
"(",
"env_min",
",",
"env_max",
",",
"num",
"=",
"n_actions",
")",
"for",
"env_min",
",",
"env_max",
",",
"n_actions",
"in",
"zip",
"(",
"np",
".",
"nditer",
"(",
"minimum",
")",
",",
"np",
".",
"nditer",
"(",
"maximum",
")",
",",
"np",
".",
"nditer",
"(",
"limits",
")",
")",
"]",
"return",
"action_map"
] |
https://github.com/tensorflow/tensor2tensor/blob/2a33b152d7835af66a6d20afe7961751047e28dd/tensor2tensor/rl/gym_utils.py#L122-L158
|
|
googleads/google-ads-python
|
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
|
google/ads/googleads/v8/services/services/batch_job_service/client.py
|
python
|
BatchJobServiceClient.user_list_path
|
(customer_id: str, user_list_id: str,)
|
return "customers/{customer_id}/userLists/{user_list_id}".format(
customer_id=customer_id, user_list_id=user_list_id,
)
|
Return a fully-qualified user_list string.
|
Return a fully-qualified user_list string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"user_list",
"string",
"."
] |
def user_list_path(customer_id: str, user_list_id: str,) -> str:
"""Return a fully-qualified user_list string."""
return "customers/{customer_id}/userLists/{user_list_id}".format(
customer_id=customer_id, user_list_id=user_list_id,
)
|
[
"def",
"user_list_path",
"(",
"customer_id",
":",
"str",
",",
"user_list_id",
":",
"str",
",",
")",
"->",
"str",
":",
"return",
"\"customers/{customer_id}/userLists/{user_list_id}\"",
".",
"format",
"(",
"customer_id",
"=",
"customer_id",
",",
"user_list_id",
"=",
"user_list_id",
",",
")"
] |
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v8/services/services/batch_job_service/client.py#L1254-L1258
|
|
postlund/pyatv
|
4ed1f5539f37d86d80272663d1f2ea34a6c41ec4
|
pyatv/core/facade.py
|
python
|
FacadeRemoteControl.volume_up
|
(self)
|
return await self.relay("volume_up")()
|
Press key volume up.
|
Press key volume up.
|
[
"Press",
"key",
"volume",
"up",
"."
] |
async def volume_up(self) -> None:
"""Press key volume up."""
return await self.relay("volume_up")()
|
[
"async",
"def",
"volume_up",
"(",
"self",
")",
"->",
"None",
":",
"return",
"await",
"self",
".",
"relay",
"(",
"\"volume_up\"",
")",
"(",
")"
] |
https://github.com/postlund/pyatv/blob/4ed1f5539f37d86d80272663d1f2ea34a6c41ec4/pyatv/core/facade.py#L115-L117
|
|
ranaroussi/qtpylib
|
2c1af0eef85fb3205d8fcefc41a421b074b371de
|
qtpylib/reports.py
|
python
|
Reports.run
|
(self)
|
Starts the reporting module
Makes the dashboard web app available via localhost:port, and exposes
a REST API for trade information, open positions and market data.
|
Starts the reporting module
|
[
"Starts",
"the",
"reporting",
"module"
] |
def run(self):
"""Starts the reporting module
Makes the dashboard web app available via localhost:port, and exposes
a REST API for trade information, open positions and market data.
"""
# -----------------------------------
# assign view
app.add_url_rule('/', 'index', view_func=self.index)
app.add_url_rule('/<path:start>', 'index', view_func=self.index)
app.add_url_rule('/<start>/<path:end>', 'index', view_func=self.index)
app.add_url_rule('/algos', 'algos', view_func=self.algos)
app.add_url_rule('/symbols', 'symbols', view_func=self.symbols)
app.add_url_rule('/positions', 'positions', view_func=self.positions)
app.add_url_rule('/positions/<path:algo_id>',
'positions', view_func=self.positions)
app.add_url_rule('/algo/<path:algo_id>',
'trades_by_algo', view_func=self.trades_by_algo)
app.add_url_rule('/algo/<algo_id>/<path:start>',
'trades_by_algo', view_func=self.trades_by_algo)
app.add_url_rule('/algo/<algo_id>/<start>/<path:end>',
'trades_by_algo', view_func=self.trades_by_algo)
app.add_url_rule('/bars/<resolution>/<symbol>',
'bars', view_func=self.bars)
app.add_url_rule(
'/bars/<resolution>/<symbol>/<path:start>', 'bars', view_func=self.bars)
app.add_url_rule('/bars/<resolution>/<symbol>/<start>/<path:end>',
'bars', view_func=self.bars)
app.add_url_rule('/trades', 'trades', view_func=self.trades)
app.add_url_rule('/trades/<path:start>',
'trades', view_func=self.trades)
app.add_url_rule('/trades/<start>/<path:end>',
'trades', view_func=self.trades)
app.add_url_rule('/login/<password>', 'login', view_func=self.login)
app.add_url_rule('/static/<url_path>', 'send_static',
view_func=self.send_static)
# let user know what the temp password is
if 'nopass' not in self.args and self._password != "":
print(" * Web app password is:", self._password)
# notice
# print(" * Running on http://"+ str(self.host) +":"+str(self.port)+"/ (CTRL+C to quit)")
# -----------------------------------
# run flask app
app.run(
debug=True,
host=str(self.host),
port=int(self.port)
)
|
[
"def",
"run",
"(",
"self",
")",
":",
"# -----------------------------------",
"# assign view",
"app",
".",
"add_url_rule",
"(",
"'/'",
",",
"'index'",
",",
"view_func",
"=",
"self",
".",
"index",
")",
"app",
".",
"add_url_rule",
"(",
"'/<path:start>'",
",",
"'index'",
",",
"view_func",
"=",
"self",
".",
"index",
")",
"app",
".",
"add_url_rule",
"(",
"'/<start>/<path:end>'",
",",
"'index'",
",",
"view_func",
"=",
"self",
".",
"index",
")",
"app",
".",
"add_url_rule",
"(",
"'/algos'",
",",
"'algos'",
",",
"view_func",
"=",
"self",
".",
"algos",
")",
"app",
".",
"add_url_rule",
"(",
"'/symbols'",
",",
"'symbols'",
",",
"view_func",
"=",
"self",
".",
"symbols",
")",
"app",
".",
"add_url_rule",
"(",
"'/positions'",
",",
"'positions'",
",",
"view_func",
"=",
"self",
".",
"positions",
")",
"app",
".",
"add_url_rule",
"(",
"'/positions/<path:algo_id>'",
",",
"'positions'",
",",
"view_func",
"=",
"self",
".",
"positions",
")",
"app",
".",
"add_url_rule",
"(",
"'/algo/<path:algo_id>'",
",",
"'trades_by_algo'",
",",
"view_func",
"=",
"self",
".",
"trades_by_algo",
")",
"app",
".",
"add_url_rule",
"(",
"'/algo/<algo_id>/<path:start>'",
",",
"'trades_by_algo'",
",",
"view_func",
"=",
"self",
".",
"trades_by_algo",
")",
"app",
".",
"add_url_rule",
"(",
"'/algo/<algo_id>/<start>/<path:end>'",
",",
"'trades_by_algo'",
",",
"view_func",
"=",
"self",
".",
"trades_by_algo",
")",
"app",
".",
"add_url_rule",
"(",
"'/bars/<resolution>/<symbol>'",
",",
"'bars'",
",",
"view_func",
"=",
"self",
".",
"bars",
")",
"app",
".",
"add_url_rule",
"(",
"'/bars/<resolution>/<symbol>/<path:start>'",
",",
"'bars'",
",",
"view_func",
"=",
"self",
".",
"bars",
")",
"app",
".",
"add_url_rule",
"(",
"'/bars/<resolution>/<symbol>/<start>/<path:end>'",
",",
"'bars'",
",",
"view_func",
"=",
"self",
".",
"bars",
")",
"app",
".",
"add_url_rule",
"(",
"'/trades'",
",",
"'trades'",
",",
"view_func",
"=",
"self",
".",
"trades",
")",
"app",
".",
"add_url_rule",
"(",
"'/trades/<path:start>'",
",",
"'trades'",
",",
"view_func",
"=",
"self",
".",
"trades",
")",
"app",
".",
"add_url_rule",
"(",
"'/trades/<start>/<path:end>'",
",",
"'trades'",
",",
"view_func",
"=",
"self",
".",
"trades",
")",
"app",
".",
"add_url_rule",
"(",
"'/login/<password>'",
",",
"'login'",
",",
"view_func",
"=",
"self",
".",
"login",
")",
"app",
".",
"add_url_rule",
"(",
"'/static/<url_path>'",
",",
"'send_static'",
",",
"view_func",
"=",
"self",
".",
"send_static",
")",
"# let user know what the temp password is",
"if",
"'nopass'",
"not",
"in",
"self",
".",
"args",
"and",
"self",
".",
"_password",
"!=",
"\"\"",
":",
"print",
"(",
"\" * Web app password is:\"",
",",
"self",
".",
"_password",
")",
"# notice",
"# print(\" * Running on http://\"+ str(self.host) +\":\"+str(self.port)+\"/ (CTRL+C to quit)\")",
"# -----------------------------------",
"# run flask app",
"app",
".",
"run",
"(",
"debug",
"=",
"True",
",",
"host",
"=",
"str",
"(",
"self",
".",
"host",
")",
",",
"port",
"=",
"int",
"(",
"self",
".",
"port",
")",
")"
] |
https://github.com/ranaroussi/qtpylib/blob/2c1af0eef85fb3205d8fcefc41a421b074b371de/qtpylib/reports.py#L314-L370
|
||
mme/vergeml
|
3dc30ba4e0f3d038743b6d468860cbcf3681acc6
|
vergeml/dataset.py
|
python
|
DatasetPlugin.__init__
|
(self)
|
[] |
def __init__(self):
self.progress_bar = None
self.progress = None
self.start_time = None
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"progress_bar",
"=",
"None",
"self",
".",
"progress",
"=",
"None",
"self",
".",
"start_time",
"=",
"None"
] |
https://github.com/mme/vergeml/blob/3dc30ba4e0f3d038743b6d468860cbcf3681acc6/vergeml/dataset.py#L23-L26
|
||||
JetBrains/python-skeletons
|
95ad24b666e475998e5d1cc02ed53a2188036167
|
builtins.py
|
python
|
str.rstrip
|
(self, chars=None)
|
return ''
|
Return a copy of the string with trailing characters removed.
:type chars: str | None
:rtype: str
|
Return a copy of the string with trailing characters removed.
|
[
"Return",
"a",
"copy",
"of",
"the",
"string",
"with",
"trailing",
"characters",
"removed",
"."
] |
def rstrip(self, chars=None):
"""Return a copy of the string with trailing characters removed.
:type chars: str | None
:rtype: str
"""
return ''
|
[
"def",
"rstrip",
"(",
"self",
",",
"chars",
"=",
"None",
")",
":",
"return",
"''"
] |
https://github.com/JetBrains/python-skeletons/blob/95ad24b666e475998e5d1cc02ed53a2188036167/builtins.py#L1654-L1660
|
|
google/vulncode-db
|
d7ca0bda764c4011e49a1d88a89b31d2f9aa32c8
|
migrations/env.py
|
python
|
run_migrations_online
|
()
|
Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
|
Run migrations in 'online' mode.
|
[
"Run",
"migrations",
"in",
"online",
"mode",
"."
] |
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, "autogenerate", False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info("No changes in schema detected.")
engine = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
connection = engine.connect()
def include_object(object, name, type_, reflected, compare_to):
skipped_schemas = ["sys", "mysql", "performance_schema"]
if type_ == "table" and object.schema in skipped_schemas:
return False
return True
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
include_schemas=True,
include_object=include_object,
**current_app.extensions["migrate"].configure_args
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
|
[
"def",
"run_migrations_online",
"(",
")",
":",
"# this callback is used to prevent an auto-migration from being generated",
"# when there are no changes to the schema",
"# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html",
"def",
"process_revision_directives",
"(",
"context",
",",
"revision",
",",
"directives",
")",
":",
"if",
"getattr",
"(",
"config",
".",
"cmd_opts",
",",
"\"autogenerate\"",
",",
"False",
")",
":",
"script",
"=",
"directives",
"[",
"0",
"]",
"if",
"script",
".",
"upgrade_ops",
".",
"is_empty",
"(",
")",
":",
"directives",
"[",
":",
"]",
"=",
"[",
"]",
"logger",
".",
"info",
"(",
"\"No changes in schema detected.\"",
")",
"engine",
"=",
"engine_from_config",
"(",
"config",
".",
"get_section",
"(",
"config",
".",
"config_ini_section",
")",
",",
"prefix",
"=",
"\"sqlalchemy.\"",
",",
"poolclass",
"=",
"pool",
".",
"NullPool",
",",
")",
"connection",
"=",
"engine",
".",
"connect",
"(",
")",
"def",
"include_object",
"(",
"object",
",",
"name",
",",
"type_",
",",
"reflected",
",",
"compare_to",
")",
":",
"skipped_schemas",
"=",
"[",
"\"sys\"",
",",
"\"mysql\"",
",",
"\"performance_schema\"",
"]",
"if",
"type_",
"==",
"\"table\"",
"and",
"object",
".",
"schema",
"in",
"skipped_schemas",
":",
"return",
"False",
"return",
"True",
"context",
".",
"configure",
"(",
"connection",
"=",
"connection",
",",
"target_metadata",
"=",
"target_metadata",
",",
"process_revision_directives",
"=",
"process_revision_directives",
",",
"include_schemas",
"=",
"True",
",",
"include_object",
"=",
"include_object",
",",
"*",
"*",
"current_app",
".",
"extensions",
"[",
"\"migrate\"",
"]",
".",
"configure_args",
")",
"try",
":",
"with",
"context",
".",
"begin_transaction",
"(",
")",
":",
"context",
".",
"run_migrations",
"(",
")",
"finally",
":",
"connection",
".",
"close",
"(",
")"
] |
https://github.com/google/vulncode-db/blob/d7ca0bda764c4011e49a1d88a89b31d2f9aa32c8/migrations/env.py#L52-L97
|
||
KalleHallden/AutoTimer
|
2d954216700c4930baa154e28dbddc34609af7ce
|
env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py
|
python
|
_main
|
()
|
Display all information sysconfig detains.
|
Display all information sysconfig detains.
|
[
"Display",
"all",
"information",
"sysconfig",
"detains",
"."
] |
def _main():
"""Display all information sysconfig detains."""
print('Platform: "%s"' % get_platform())
print('Python version: "%s"' % get_python_version())
print('Current installation scheme: "%s"' % _get_default_scheme())
print()
_print_dict('Paths', get_paths())
print()
_print_dict('Variables', get_config_vars())
|
[
"def",
"_main",
"(",
")",
":",
"print",
"(",
"'Platform: \"%s\"'",
"%",
"get_platform",
"(",
")",
")",
"print",
"(",
"'Python version: \"%s\"'",
"%",
"get_python_version",
"(",
")",
")",
"print",
"(",
"'Current installation scheme: \"%s\"'",
"%",
"_get_default_scheme",
"(",
")",
")",
"print",
"(",
")",
"_print_dict",
"(",
"'Paths'",
",",
"get_paths",
"(",
")",
")",
"print",
"(",
")",
"_print_dict",
"(",
"'Variables'",
",",
"get_config_vars",
"(",
")",
")"
] |
https://github.com/KalleHallden/AutoTimer/blob/2d954216700c4930baa154e28dbddc34609af7ce/env/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py#L776-L784
|
||
JDAI-CV/Partial-Person-ReID
|
fb94dbfbec1105bbc22a442702bc6e385427d416
|
fastreid/data/datasets/bases.py
|
python
|
Dataset.parse_data
|
(self, data)
|
return len(pids), len(cams)
|
Parses data list and returns the number of person IDs
and the number of camera views.
Args:
data (list): contains tuples of (img_path(s), pid, camid)
|
Parses data list and returns the number of person IDs
and the number of camera views.
Args:
data (list): contains tuples of (img_path(s), pid, camid)
|
[
"Parses",
"data",
"list",
"and",
"returns",
"the",
"number",
"of",
"person",
"IDs",
"and",
"the",
"number",
"of",
"camera",
"views",
".",
"Args",
":",
"data",
"(",
"list",
")",
":",
"contains",
"tuples",
"of",
"(",
"img_path",
"(",
"s",
")",
"pid",
"camid",
")"
] |
def parse_data(self, data):
"""Parses data list and returns the number of person IDs
and the number of camera views.
Args:
data (list): contains tuples of (img_path(s), pid, camid)
"""
pids = set()
cams = set()
for _, pid, camid in data:
pids.add(pid)
cams.add(camid)
return len(pids), len(cams)
|
[
"def",
"parse_data",
"(",
"self",
",",
"data",
")",
":",
"pids",
"=",
"set",
"(",
")",
"cams",
"=",
"set",
"(",
")",
"for",
"_",
",",
"pid",
",",
"camid",
"in",
"data",
":",
"pids",
".",
"add",
"(",
"pid",
")",
"cams",
".",
"add",
"(",
"camid",
")",
"return",
"len",
"(",
"pids",
")",
",",
"len",
"(",
"cams",
")"
] |
https://github.com/JDAI-CV/Partial-Person-ReID/blob/fb94dbfbec1105bbc22a442702bc6e385427d416/fastreid/data/datasets/bases.py#L105-L116
|
|
IronLanguages/ironpython3
|
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
|
Src/StdLib/Lib/bdb.py
|
python
|
Tdb.user_return
|
(self, frame, retval)
|
[] |
def user_return(self, frame, retval):
print('+++ return', retval)
|
[
"def",
"user_return",
"(",
"self",
",",
"frame",
",",
"retval",
")",
":",
"print",
"(",
"'+++ return'",
",",
"retval",
")"
] |
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/bdb.py#L656-L657
|
||||
volatilityfoundation/volatility3
|
168b0d0b053ab97a7cb096ef2048795cc54d885f
|
volatility3/framework/objects/__init__.py
|
python
|
Bytes.__init__
|
(self,
context: interfaces.context.ContextInterface,
type_name: str,
object_info: interfaces.objects.ObjectInformation,
length: int = 1)
|
[] |
def __init__(self,
context: interfaces.context.ContextInterface,
type_name: str,
object_info: interfaces.objects.ObjectInformation,
length: int = 1) -> None:
super().__init__(context = context,
type_name = type_name,
object_info = object_info,
data_format = DataFormatInfo(length, "big", False))
self._vol['length'] = length
|
[
"def",
"__init__",
"(",
"self",
",",
"context",
":",
"interfaces",
".",
"context",
".",
"ContextInterface",
",",
"type_name",
":",
"str",
",",
"object_info",
":",
"interfaces",
".",
"objects",
".",
"ObjectInformation",
",",
"length",
":",
"int",
"=",
"1",
")",
"->",
"None",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"context",
"=",
"context",
",",
"type_name",
"=",
"type_name",
",",
"object_info",
"=",
"object_info",
",",
"data_format",
"=",
"DataFormatInfo",
"(",
"length",
",",
"\"big\"",
",",
"False",
")",
")",
"self",
".",
"_vol",
"[",
"'length'",
"]",
"=",
"length"
] |
https://github.com/volatilityfoundation/volatility3/blob/168b0d0b053ab97a7cb096ef2048795cc54d885f/volatility3/framework/objects/__init__.py#L188-L197
|
||||
wxWidgets/Phoenix
|
b2199e299a6ca6d866aa6f3d0888499136ead9d6
|
wx/lib/agw/hyperlink.py
|
python
|
HyperLinkCtrl.GetURL
|
(self)
|
return self._URL
|
Retrieve the URL associated to the :class:`HyperLinkCtrl`.
|
Retrieve the URL associated to the :class:`HyperLinkCtrl`.
|
[
"Retrieve",
"the",
"URL",
"associated",
"to",
"the",
":",
"class",
":",
"HyperLinkCtrl",
"."
] |
def GetURL(self):
""" Retrieve the URL associated to the :class:`HyperLinkCtrl`. """
return self._URL
|
[
"def",
"GetURL",
"(",
"self",
")",
":",
"return",
"self",
".",
"_URL"
] |
https://github.com/wxWidgets/Phoenix/blob/b2199e299a6ca6d866aa6f3d0888499136ead9d6/wx/lib/agw/hyperlink.py#L562-L565
|
|
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/sympy/solvers/solveset.py
|
python
|
linear_eq_to_matrix
|
(equations, *symbols)
|
return A, b
|
r"""
Converts a given System of Equations into Matrix form.
Here `equations` must be a linear system of equations in
`symbols`. Element M[i, j] corresponds to the coefficient
of the jth symbol in the ith equation.
The Matrix form corresponds to the augmented matrix form.
For example:
.. math:: 4x + 2y + 3z = 1
.. math:: 3x + y + z = -6
.. math:: 2x + 4y + 9z = 2
This system would return `A` & `b` as given below:
::
[ 4 2 3 ] [ 1 ]
A = [ 3 1 1 ] b = [-6 ]
[ 2 4 9 ] [ 2 ]
The only simplification performed is to convert
`Eq(a, b) -> a - b`.
Raises
======
ValueError
The equations contain a nonlinear term.
The symbols are not given or are not unique.
Examples
========
>>> from sympy import linear_eq_to_matrix, symbols
>>> c, x, y, z = symbols('c, x, y, z')
The coefficients (numerical or symbolic) of the symbols will
be returned as matrices:
>>> eqns = [c*x + z - 1 - c, y + z, x - y]
>>> A, b = linear_eq_to_matrix(eqns, [x, y, z])
>>> A
Matrix([
[c, 0, 1],
[0, 1, 1],
[1, -1, 0]])
>>> b
Matrix([
[c + 1],
[ 0],
[ 0]])
This routine does not simplify expressions and will raise an error
if nonlinearity is encountered:
>>> eqns = [
... (x**2 - 3*x)/(x - 3) - 3,
... y**2 - 3*y - y*(y - 4) + x - 4]
>>> linear_eq_to_matrix(eqns, [x, y])
Traceback (most recent call last):
...
ValueError:
The term (x**2 - 3*x)/(x - 3) is nonlinear in {x, y}
Simplifying these equations will discard the removable singularity
in the first, reveal the linear structure of the second:
>>> [e.simplify() for e in eqns]
[x - 3, x + y - 4]
Any such simplification needed to eliminate nonlinear terms must
be done before calling this routine.
|
r"""
Converts a given System of Equations into Matrix form.
Here `equations` must be a linear system of equations in
`symbols`. Element M[i, j] corresponds to the coefficient
of the jth symbol in the ith equation.
|
[
"r",
"Converts",
"a",
"given",
"System",
"of",
"Equations",
"into",
"Matrix",
"form",
".",
"Here",
"equations",
"must",
"be",
"a",
"linear",
"system",
"of",
"equations",
"in",
"symbols",
".",
"Element",
"M",
"[",
"i",
"j",
"]",
"corresponds",
"to",
"the",
"coefficient",
"of",
"the",
"jth",
"symbol",
"in",
"the",
"ith",
"equation",
"."
] |
def linear_eq_to_matrix(equations, *symbols):
r"""
Converts a given System of Equations into Matrix form.
Here `equations` must be a linear system of equations in
`symbols`. Element M[i, j] corresponds to the coefficient
of the jth symbol in the ith equation.
The Matrix form corresponds to the augmented matrix form.
For example:
.. math:: 4x + 2y + 3z = 1
.. math:: 3x + y + z = -6
.. math:: 2x + 4y + 9z = 2
This system would return `A` & `b` as given below:
::
[ 4 2 3 ] [ 1 ]
A = [ 3 1 1 ] b = [-6 ]
[ 2 4 9 ] [ 2 ]
The only simplification performed is to convert
`Eq(a, b) -> a - b`.
Raises
======
ValueError
The equations contain a nonlinear term.
The symbols are not given or are not unique.
Examples
========
>>> from sympy import linear_eq_to_matrix, symbols
>>> c, x, y, z = symbols('c, x, y, z')
The coefficients (numerical or symbolic) of the symbols will
be returned as matrices:
>>> eqns = [c*x + z - 1 - c, y + z, x - y]
>>> A, b = linear_eq_to_matrix(eqns, [x, y, z])
>>> A
Matrix([
[c, 0, 1],
[0, 1, 1],
[1, -1, 0]])
>>> b
Matrix([
[c + 1],
[ 0],
[ 0]])
This routine does not simplify expressions and will raise an error
if nonlinearity is encountered:
>>> eqns = [
... (x**2 - 3*x)/(x - 3) - 3,
... y**2 - 3*y - y*(y - 4) + x - 4]
>>> linear_eq_to_matrix(eqns, [x, y])
Traceback (most recent call last):
...
ValueError:
The term (x**2 - 3*x)/(x - 3) is nonlinear in {x, y}
Simplifying these equations will discard the removable singularity
in the first, reveal the linear structure of the second:
>>> [e.simplify() for e in eqns]
[x - 3, x + y - 4]
Any such simplification needed to eliminate nonlinear terms must
be done before calling this routine.
"""
if not symbols:
raise ValueError(filldedent('''
Symbols must be given, for which coefficients
are to be found.
'''))
if hasattr(symbols[0], '__iter__'):
symbols = symbols[0]
for i in symbols:
if not isinstance(i, Symbol):
raise ValueError(filldedent('''
Expecting a Symbol but got %s
''' % i))
if has_dups(symbols):
raise ValueError('Symbols must be unique')
equations = sympify(equations)
if isinstance(equations, MatrixBase):
equations = list(equations)
elif isinstance(equations, Expr):
equations = [equations]
elif not is_sequence(equations):
raise ValueError(filldedent('''
Equation(s) must be given as a sequence, Expr,
Eq or Matrix.
'''))
A, b = [], []
for i, f in enumerate(equations):
if isinstance(f, Equality):
f = f.rewrite(Add, evaluate=False)
coeff_list = linear_coeffs(f, *symbols)
b.append(-coeff_list.pop())
A.append(coeff_list)
A, b = map(Matrix, (A, b))
return A, b
|
[
"def",
"linear_eq_to_matrix",
"(",
"equations",
",",
"*",
"symbols",
")",
":",
"if",
"not",
"symbols",
":",
"raise",
"ValueError",
"(",
"filldedent",
"(",
"'''\n Symbols must be given, for which coefficients\n are to be found.\n '''",
")",
")",
"if",
"hasattr",
"(",
"symbols",
"[",
"0",
"]",
",",
"'__iter__'",
")",
":",
"symbols",
"=",
"symbols",
"[",
"0",
"]",
"for",
"i",
"in",
"symbols",
":",
"if",
"not",
"isinstance",
"(",
"i",
",",
"Symbol",
")",
":",
"raise",
"ValueError",
"(",
"filldedent",
"(",
"'''\n Expecting a Symbol but got %s\n '''",
"%",
"i",
")",
")",
"if",
"has_dups",
"(",
"symbols",
")",
":",
"raise",
"ValueError",
"(",
"'Symbols must be unique'",
")",
"equations",
"=",
"sympify",
"(",
"equations",
")",
"if",
"isinstance",
"(",
"equations",
",",
"MatrixBase",
")",
":",
"equations",
"=",
"list",
"(",
"equations",
")",
"elif",
"isinstance",
"(",
"equations",
",",
"Expr",
")",
":",
"equations",
"=",
"[",
"equations",
"]",
"elif",
"not",
"is_sequence",
"(",
"equations",
")",
":",
"raise",
"ValueError",
"(",
"filldedent",
"(",
"'''\n Equation(s) must be given as a sequence, Expr,\n Eq or Matrix.\n '''",
")",
")",
"A",
",",
"b",
"=",
"[",
"]",
",",
"[",
"]",
"for",
"i",
",",
"f",
"in",
"enumerate",
"(",
"equations",
")",
":",
"if",
"isinstance",
"(",
"f",
",",
"Equality",
")",
":",
"f",
"=",
"f",
".",
"rewrite",
"(",
"Add",
",",
"evaluate",
"=",
"False",
")",
"coeff_list",
"=",
"linear_coeffs",
"(",
"f",
",",
"*",
"symbols",
")",
"b",
".",
"append",
"(",
"-",
"coeff_list",
".",
"pop",
"(",
")",
")",
"A",
".",
"append",
"(",
"coeff_list",
")",
"A",
",",
"b",
"=",
"map",
"(",
"Matrix",
",",
"(",
"A",
",",
"b",
")",
")",
"return",
"A",
",",
"b"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/solvers/solveset.py#L2224-L2336
|
|
PttCodingMan/PyPtt
|
07037ccc31116d0699285d9d00f61d9288a608b7
|
PyPtt/PTT.py
|
python
|
API.get_user
|
(self, user_id)
|
return self._get_user(user_id)
|
[] |
def get_user(self, user_id) -> data_type.UserInfo:
self._one_thread()
if not self._login_status:
raise exceptions.Requirelogin(i18n.Requirelogin)
if self.unregistered_user:
raise exceptions.UnregisteredUser(lib_util.get_current_func_name())
self.config.log_last_value = None
return self._get_user(user_id)
|
[
"def",
"get_user",
"(",
"self",
",",
"user_id",
")",
"->",
"data_type",
".",
"UserInfo",
":",
"self",
".",
"_one_thread",
"(",
")",
"if",
"not",
"self",
".",
"_login_status",
":",
"raise",
"exceptions",
".",
"Requirelogin",
"(",
"i18n",
".",
"Requirelogin",
")",
"if",
"self",
".",
"unregistered_user",
":",
"raise",
"exceptions",
".",
"UnregisteredUser",
"(",
"lib_util",
".",
"get_current_func_name",
"(",
")",
")",
"self",
".",
"config",
".",
"log_last_value",
"=",
"None",
"return",
"self",
".",
"_get_user",
"(",
"user_id",
")"
] |
https://github.com/PttCodingMan/PyPtt/blob/07037ccc31116d0699285d9d00f61d9288a608b7/PyPtt/PTT.py#L1210-L1221
|
|||
LittletreeZou/Question-Pairs-Matching
|
e85ff93f4a0cb922bc801cf9d2e9259c786e9482
|
post_processing_with_graph_features.py
|
python
|
gen_graph
|
(train)
|
return graph
|
把输入数据转化为以字典表示的无向图
|
把输入数据转化为以字典表示的无向图
|
[
"把输入数据转化为以字典表示的无向图"
] |
def gen_graph(train):
"""
把输入数据转化为以字典表示的无向图
"""
data = train[train['label']==1][['q1','q2']]
graph = {}
for i in range(len(data)):
if data.iloc[i,0] not in graph.keys():
graph[data.iloc[i,0]] = set([data.iloc[i,1]])
else:
graph[data.iloc[i,0]].add(data.iloc[i,1])
if data.iloc[i,1] not in graph.keys():
graph[data.iloc[i,1]] = set([data.iloc[i,0]])
else:
graph[data.iloc[i,1]].add(data.iloc[i,0])
return graph
|
[
"def",
"gen_graph",
"(",
"train",
")",
":",
"data",
"=",
"train",
"[",
"train",
"[",
"'label'",
"]",
"==",
"1",
"]",
"[",
"[",
"'q1'",
",",
"'q2'",
"]",
"]",
"graph",
"=",
"{",
"}",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"data",
")",
")",
":",
"if",
"data",
".",
"iloc",
"[",
"i",
",",
"0",
"]",
"not",
"in",
"graph",
".",
"keys",
"(",
")",
":",
"graph",
"[",
"data",
".",
"iloc",
"[",
"i",
",",
"0",
"]",
"]",
"=",
"set",
"(",
"[",
"data",
".",
"iloc",
"[",
"i",
",",
"1",
"]",
"]",
")",
"else",
":",
"graph",
"[",
"data",
".",
"iloc",
"[",
"i",
",",
"0",
"]",
"]",
".",
"add",
"(",
"data",
".",
"iloc",
"[",
"i",
",",
"1",
"]",
")",
"if",
"data",
".",
"iloc",
"[",
"i",
",",
"1",
"]",
"not",
"in",
"graph",
".",
"keys",
"(",
")",
":",
"graph",
"[",
"data",
".",
"iloc",
"[",
"i",
",",
"1",
"]",
"]",
"=",
"set",
"(",
"[",
"data",
".",
"iloc",
"[",
"i",
",",
"0",
"]",
"]",
")",
"else",
":",
"graph",
"[",
"data",
".",
"iloc",
"[",
"i",
",",
"1",
"]",
"]",
".",
"add",
"(",
"data",
".",
"iloc",
"[",
"i",
",",
"0",
"]",
")",
"return",
"graph"
] |
https://github.com/LittletreeZou/Question-Pairs-Matching/blob/e85ff93f4a0cb922bc801cf9d2e9259c786e9482/post_processing_with_graph_features.py#L26-L43
|
|
gbeced/pyalgotrade
|
ad2bcc6b25c06c66eee4a8d522ce844504d8ec62
|
pyalgotrade/talibext/indicator.py
|
python
|
KAMA
|
(ds, count, timeperiod=-2**31)
|
return call_talib_with_ds(ds, count, talib.KAMA, timeperiod)
|
Kaufman Adaptive Moving Average
|
Kaufman Adaptive Moving Average
|
[
"Kaufman",
"Adaptive",
"Moving",
"Average"
] |
def KAMA(ds, count, timeperiod=-2**31):
"""Kaufman Adaptive Moving Average"""
return call_talib_with_ds(ds, count, talib.KAMA, timeperiod)
|
[
"def",
"KAMA",
"(",
"ds",
",",
"count",
",",
"timeperiod",
"=",
"-",
"2",
"**",
"31",
")",
":",
"return",
"call_talib_with_ds",
"(",
"ds",
",",
"count",
",",
"talib",
".",
"KAMA",
",",
"timeperiod",
")"
] |
https://github.com/gbeced/pyalgotrade/blob/ad2bcc6b25c06c66eee4a8d522ce844504d8ec62/pyalgotrade/talibext/indicator.py#L592-L594
|
|
leancloud/satori
|
701caccbd4fe45765001ca60435c0cb499477c03
|
satori-rules/plugin/libs/bson/son.py
|
python
|
SON.__init__
|
(self, data=None, **kwargs)
|
[] |
def __init__(self, data=None, **kwargs):
self.__keys = []
dict.__init__(self)
self.update(data)
self.update(kwargs)
|
[
"def",
"__init__",
"(",
"self",
",",
"data",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"__keys",
"=",
"[",
"]",
"dict",
".",
"__init__",
"(",
"self",
")",
"self",
".",
"update",
"(",
"data",
")",
"self",
".",
"update",
"(",
"kwargs",
")"
] |
https://github.com/leancloud/satori/blob/701caccbd4fe45765001ca60435c0cb499477c03/satori-rules/plugin/libs/bson/son.py#L89-L93
|
||||
quantumlib/Cirq
|
89f88b01d69222d3f1ec14d649b7b3a85ed9211f
|
cirq-core/cirq/_compat.py
|
python
|
DeprecatedModuleFinder.find_spec
|
(self, fullname: str, path: Any = None, target: Any = None)
|
return spec
|
Finds the specification of a module.
This is an implementation of the importlib.abc.MetaPathFinder.find_spec method.
See https://docs.python.org/3/library/importlib.html#importlib.abc.MetaPathFinder.
Args:
fullname: name of the module.
path: if presented, this is the parent module's submodule search path.
target: When passed in, target is a module object that the finder may use to make a more
educated guess about what spec to return. We don't use it here, just pass it along
to the wrapped finder.
|
Finds the specification of a module.
|
[
"Finds",
"the",
"specification",
"of",
"a",
"module",
"."
] |
def find_spec(self, fullname: str, path: Any = None, target: Any = None) -> Any:
"""Finds the specification of a module.
This is an implementation of the importlib.abc.MetaPathFinder.find_spec method.
See https://docs.python.org/3/library/importlib.html#importlib.abc.MetaPathFinder.
Args:
fullname: name of the module.
path: if presented, this is the parent module's submodule search path.
target: When passed in, target is a module object that the finder may use to make a more
educated guess about what spec to return. We don't use it here, just pass it along
to the wrapped finder.
"""
if fullname != self.old_module_name and not fullname.startswith(self.old_module_name + "."):
return None
if self.broken_module_exception is not None:
raise self.broken_module_exception
# warn for deprecation
_deduped_module_warn_or_error(self.old_module_name, self.new_module_name, self.deadline)
new_fullname = self.new_module_name + fullname[len(self.old_module_name) :]
# use normal import mechanism for the new module specs
spec = importlib.util.find_spec(new_fullname)
# if the spec exists, return the DeprecatedModuleLoader that will do the loading as well
# as set the alias(es) in sys.modules as necessary
if spec is not None:
# change back the name to the deprecated module name
spec.name = fullname
# some loaders do a check to ensure the module's name is the same
# as the loader was created for
if getattr(spec.loader, "name", None) == new_fullname:
setattr(spec.loader, "name", fullname)
spec.loader = DeprecatedModuleLoader(spec.loader, fullname, new_fullname)
return spec
|
[
"def",
"find_spec",
"(",
"self",
",",
"fullname",
":",
"str",
",",
"path",
":",
"Any",
"=",
"None",
",",
"target",
":",
"Any",
"=",
"None",
")",
"->",
"Any",
":",
"if",
"fullname",
"!=",
"self",
".",
"old_module_name",
"and",
"not",
"fullname",
".",
"startswith",
"(",
"self",
".",
"old_module_name",
"+",
"\".\"",
")",
":",
"return",
"None",
"if",
"self",
".",
"broken_module_exception",
"is",
"not",
"None",
":",
"raise",
"self",
".",
"broken_module_exception",
"# warn for deprecation",
"_deduped_module_warn_or_error",
"(",
"self",
".",
"old_module_name",
",",
"self",
".",
"new_module_name",
",",
"self",
".",
"deadline",
")",
"new_fullname",
"=",
"self",
".",
"new_module_name",
"+",
"fullname",
"[",
"len",
"(",
"self",
".",
"old_module_name",
")",
":",
"]",
"# use normal import mechanism for the new module specs",
"spec",
"=",
"importlib",
".",
"util",
".",
"find_spec",
"(",
"new_fullname",
")",
"# if the spec exists, return the DeprecatedModuleLoader that will do the loading as well",
"# as set the alias(es) in sys.modules as necessary",
"if",
"spec",
"is",
"not",
"None",
":",
"# change back the name to the deprecated module name",
"spec",
".",
"name",
"=",
"fullname",
"# some loaders do a check to ensure the module's name is the same",
"# as the loader was created for",
"if",
"getattr",
"(",
"spec",
".",
"loader",
",",
"\"name\"",
",",
"None",
")",
"==",
"new_fullname",
":",
"setattr",
"(",
"spec",
".",
"loader",
",",
"\"name\"",
",",
"fullname",
")",
"spec",
".",
"loader",
"=",
"DeprecatedModuleLoader",
"(",
"spec",
".",
"loader",
",",
"fullname",
",",
"new_fullname",
")",
"return",
"spec"
] |
https://github.com/quantumlib/Cirq/blob/89f88b01d69222d3f1ec14d649b7b3a85ed9211f/cirq-core/cirq/_compat.py#L518-L554
|
|
chdsbd/kodiak
|
4c705cea8edaa2792f2a59700a2f7c3d75b6e918
|
bot/kodiak/cli.py
|
python
|
refresh_pull_requests
|
()
|
Listen on a Redis list for messages triggering pull request reevaluations.
|
Listen on a Redis list for messages triggering pull request reevaluations.
|
[
"Listen",
"on",
"a",
"Redis",
"list",
"for",
"messages",
"triggering",
"pull",
"request",
"reevaluations",
"."
] |
def refresh_pull_requests() -> None:
"""
Listen on a Redis list for messages triggering pull request reevaluations.
"""
from kodiak.refresh_pull_requests import main
main()
|
[
"def",
"refresh_pull_requests",
"(",
")",
"->",
"None",
":",
"from",
"kodiak",
".",
"refresh_pull_requests",
"import",
"main",
"main",
"(",
")"
] |
https://github.com/chdsbd/kodiak/blob/4c705cea8edaa2792f2a59700a2f7c3d75b6e918/bot/kodiak/cli.py#L89-L95
|
||
edfungus/Crouton
|
ada98b3930192938a48909072b45cb84b945f875
|
clients/python_clients/cf_demo_client/cf_env/lib/python2.7/site-packages/pkg_resources/__init__.py
|
python
|
WorkingSet.__contains__
|
(self, dist)
|
return self.by_key.get(dist.key) == dist
|
True if `dist` is the active distribution for its project
|
True if `dist` is the active distribution for its project
|
[
"True",
"if",
"dist",
"is",
"the",
"active",
"distribution",
"for",
"its",
"project"
] |
def __contains__(self, dist):
"""True if `dist` is the active distribution for its project"""
return self.by_key.get(dist.key) == dist
|
[
"def",
"__contains__",
"(",
"self",
",",
"dist",
")",
":",
"return",
"self",
".",
"by_key",
".",
"get",
"(",
"dist",
".",
"key",
")",
"==",
"dist"
] |
https://github.com/edfungus/Crouton/blob/ada98b3930192938a48909072b45cb84b945f875/clients/python_clients/cf_demo_client/cf_env/lib/python2.7/site-packages/pkg_resources/__init__.py#L694-L696
|
|
sahana/eden
|
1696fa50e90ce967df69f66b571af45356cc18da
|
modules/templates/historic/DRKCM/dvr.py
|
python
|
dvr_response_default_status
|
()
|
return default
|
Helper to get/set the default status for response records
@return: the default status_id
|
Helper to get/set the default status for response records
|
[
"Helper",
"to",
"get",
"/",
"set",
"the",
"default",
"status",
"for",
"response",
"records"
] |
def dvr_response_default_status():
"""
Helper to get/set the default status for response records
@return: the default status_id
"""
s3db = current.s3db
rtable = s3db.dvr_response_action
field = rtable.status_id
default = field.default
if not default:
stable = s3db.dvr_response_status
if current.deployment_settings.get_dvr_response_planning():
# Actions are planned ahead, so initial status by default
query = (stable.is_default == True)
else:
# Actions are documented in hindsight, so closed by default
query = (stable.is_default_closure == True)
# Look up the default status
query = query & (stable.deleted != True)
row = current.db(query).select(stable.id,
cache = s3db.cache,
limitby = (0, 1),
).first()
if row:
# Set as field default in responses table
default = field.default = row.id
return default
|
[
"def",
"dvr_response_default_status",
"(",
")",
":",
"s3db",
"=",
"current",
".",
"s3db",
"rtable",
"=",
"s3db",
".",
"dvr_response_action",
"field",
"=",
"rtable",
".",
"status_id",
"default",
"=",
"field",
".",
"default",
"if",
"not",
"default",
":",
"stable",
"=",
"s3db",
".",
"dvr_response_status",
"if",
"current",
".",
"deployment_settings",
".",
"get_dvr_response_planning",
"(",
")",
":",
"# Actions are planned ahead, so initial status by default",
"query",
"=",
"(",
"stable",
".",
"is_default",
"==",
"True",
")",
"else",
":",
"# Actions are documented in hindsight, so closed by default",
"query",
"=",
"(",
"stable",
".",
"is_default_closure",
"==",
"True",
")",
"# Look up the default status",
"query",
"=",
"query",
"&",
"(",
"stable",
".",
"deleted",
"!=",
"True",
")",
"row",
"=",
"current",
".",
"db",
"(",
"query",
")",
".",
"select",
"(",
"stable",
".",
"id",
",",
"cache",
"=",
"s3db",
".",
"cache",
",",
"limitby",
"=",
"(",
"0",
",",
"1",
")",
",",
")",
".",
"first",
"(",
")",
"if",
"row",
":",
"# Set as field default in responses table",
"default",
"=",
"field",
".",
"default",
"=",
"row",
".",
"id",
"return",
"default"
] |
https://github.com/sahana/eden/blob/1696fa50e90ce967df69f66b571af45356cc18da/modules/templates/historic/DRKCM/dvr.py#L6077-L6112
|
|
pypa/setuptools
|
9f37366aab9cd8f6baa23e6a77cfdb8daf97757e
|
setuptools/_distutils/_msvccompiler.py
|
python
|
_find_vc2015
|
()
|
return best_version, best_dir
|
[] |
def _find_vc2015():
try:
key = winreg.OpenKeyEx(
winreg.HKEY_LOCAL_MACHINE,
r"Software\Microsoft\VisualStudio\SxS\VC7",
access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY
)
except OSError:
log.debug("Visual C++ is not registered")
return None, None
best_version = 0
best_dir = None
with key:
for i in count():
try:
v, vc_dir, vt = winreg.EnumValue(key, i)
except OSError:
break
if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir):
try:
version = int(float(v))
except (ValueError, TypeError):
continue
if version >= 14 and version > best_version:
best_version, best_dir = version, vc_dir
return best_version, best_dir
|
[
"def",
"_find_vc2015",
"(",
")",
":",
"try",
":",
"key",
"=",
"winreg",
".",
"OpenKeyEx",
"(",
"winreg",
".",
"HKEY_LOCAL_MACHINE",
",",
"r\"Software\\Microsoft\\VisualStudio\\SxS\\VC7\"",
",",
"access",
"=",
"winreg",
".",
"KEY_READ",
"|",
"winreg",
".",
"KEY_WOW64_32KEY",
")",
"except",
"OSError",
":",
"log",
".",
"debug",
"(",
"\"Visual C++ is not registered\"",
")",
"return",
"None",
",",
"None",
"best_version",
"=",
"0",
"best_dir",
"=",
"None",
"with",
"key",
":",
"for",
"i",
"in",
"count",
"(",
")",
":",
"try",
":",
"v",
",",
"vc_dir",
",",
"vt",
"=",
"winreg",
".",
"EnumValue",
"(",
"key",
",",
"i",
")",
"except",
"OSError",
":",
"break",
"if",
"v",
"and",
"vt",
"==",
"winreg",
".",
"REG_SZ",
"and",
"os",
".",
"path",
".",
"isdir",
"(",
"vc_dir",
")",
":",
"try",
":",
"version",
"=",
"int",
"(",
"float",
"(",
"v",
")",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"continue",
"if",
"version",
">=",
"14",
"and",
"version",
">",
"best_version",
":",
"best_version",
",",
"best_dir",
"=",
"version",
",",
"vc_dir",
"return",
"best_version",
",",
"best_dir"
] |
https://github.com/pypa/setuptools/blob/9f37366aab9cd8f6baa23e6a77cfdb8daf97757e/setuptools/_distutils/_msvccompiler.py#L32-L58
|
|||
Jenyay/outwiker
|
50530cf7b3f71480bb075b2829bc0669773b835b
|
plugins/updatenotifier/updatenotifier/libs/jinja2/compiler.py
|
python
|
CodeGenerator.pop_parameter_definitions
|
(self)
|
Pops the current parameter definitions set.
|
Pops the current parameter definitions set.
|
[
"Pops",
"the",
"current",
"parameter",
"definitions",
"set",
"."
] |
def pop_parameter_definitions(self):
"""Pops the current parameter definitions set."""
self._param_def_block.pop()
|
[
"def",
"pop_parameter_definitions",
"(",
"self",
")",
":",
"self",
".",
"_param_def_block",
".",
"pop",
"(",
")"
] |
https://github.com/Jenyay/outwiker/blob/50530cf7b3f71480bb075b2829bc0669773b835b/plugins/updatenotifier/updatenotifier/libs/jinja2/compiler.py#L617-L619
|
||
oilshell/oil
|
94388e7d44a9ad879b12615f6203b38596b5a2d3
|
Python-2.7.13/Lib/pstats.py
|
python
|
add_callers
|
(target, source)
|
return new_callers
|
Combine two caller lists in a single list.
|
Combine two caller lists in a single list.
|
[
"Combine",
"two",
"caller",
"lists",
"in",
"a",
"single",
"list",
"."
] |
def add_callers(target, source):
"""Combine two caller lists in a single list."""
new_callers = {}
for func, caller in target.iteritems():
new_callers[func] = caller
for func, caller in source.iteritems():
if func in new_callers:
if isinstance(caller, tuple):
# format used by cProfile
new_callers[func] = tuple([i[0] + i[1] for i in
zip(caller, new_callers[func])])
else:
# format used by profile
new_callers[func] += caller
else:
new_callers[func] = caller
return new_callers
|
[
"def",
"add_callers",
"(",
"target",
",",
"source",
")",
":",
"new_callers",
"=",
"{",
"}",
"for",
"func",
",",
"caller",
"in",
"target",
".",
"iteritems",
"(",
")",
":",
"new_callers",
"[",
"func",
"]",
"=",
"caller",
"for",
"func",
",",
"caller",
"in",
"source",
".",
"iteritems",
"(",
")",
":",
"if",
"func",
"in",
"new_callers",
":",
"if",
"isinstance",
"(",
"caller",
",",
"tuple",
")",
":",
"# format used by cProfile",
"new_callers",
"[",
"func",
"]",
"=",
"tuple",
"(",
"[",
"i",
"[",
"0",
"]",
"+",
"i",
"[",
"1",
"]",
"for",
"i",
"in",
"zip",
"(",
"caller",
",",
"new_callers",
"[",
"func",
"]",
")",
"]",
")",
"else",
":",
"# format used by profile",
"new_callers",
"[",
"func",
"]",
"+=",
"caller",
"else",
":",
"new_callers",
"[",
"func",
"]",
"=",
"caller",
"return",
"new_callers"
] |
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/pstats.py#L506-L522
|
|
kvh/ramp
|
8618ce673e49b95f40c9659319c3cb72281dacac
|
ramp/reporters.py
|
python
|
MetricReporter.__init__
|
(self, metric, **kwargs)
|
Accepts a Metric object and evaluates it at each fold.
|
Accepts a Metric object and evaluates it at each fold.
|
[
"Accepts",
"a",
"Metric",
"object",
"and",
"evaluates",
"it",
"at",
"each",
"fold",
"."
] |
def __init__(self, metric, **kwargs):
"""
Accepts a Metric object and evaluates it at each fold.
"""
Reporter.__init__(self, **kwargs)
self.metric = metrics.as_ramp_metric(metric)
|
[
"def",
"__init__",
"(",
"self",
",",
"metric",
",",
"*",
"*",
"kwargs",
")",
":",
"Reporter",
".",
"__init__",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"metric",
"=",
"metrics",
".",
"as_ramp_metric",
"(",
"metric",
")"
] |
https://github.com/kvh/ramp/blob/8618ce673e49b95f40c9659319c3cb72281dacac/ramp/reporters.py#L124-L129
|
||
Juniper/ansible-junos-stdlib
|
442ada386c1a2d5a38f342dc9810d5836fa1e5e6
|
ansible_collections/juniper/device/plugins/modules/software.py
|
python
|
define_progress_callback
|
(junos_module)
|
return myprogress
|
Create callback which can be passed to SW.install(progress=progress)
|
Create callback which can be passed to SW.install(progress=progress)
|
[
"Create",
"callback",
"which",
"can",
"be",
"passed",
"to",
"SW",
".",
"install",
"(",
"progress",
"=",
"progress",
")"
] |
def define_progress_callback(junos_module):
"""Create callback which can be passed to SW.install(progress=progress)
"""
def myprogress(_, report):
"""A progress function which logs report at level INFO.
Args:
_: The PyEZ device object. Unused because the logger already knows.
report: The string to be logged.
"""
junos_module.logger.info(report)
return myprogress
|
[
"def",
"define_progress_callback",
"(",
"junos_module",
")",
":",
"def",
"myprogress",
"(",
"_",
",",
"report",
")",
":",
"\"\"\"A progress function which logs report at level INFO.\n\n Args:\n _: The PyEZ device object. Unused because the logger already knows.\n report: The string to be logged.\n \"\"\"",
"junos_module",
".",
"logger",
".",
"info",
"(",
"report",
")",
"return",
"myprogress"
] |
https://github.com/Juniper/ansible-junos-stdlib/blob/442ada386c1a2d5a38f342dc9810d5836fa1e5e6/ansible_collections/juniper/device/plugins/modules/software.py#L430-L441
|
|
albertz/music-player
|
d23586f5bf657cbaea8147223be7814d117ae73d
|
mac/pyobjc-framework-Quartz/distribute_setup.py
|
python
|
_after_install
|
(dist)
|
[] |
def _after_install(dist):
log.warn('After install bootstrap.')
placeholder = dist.get_command_obj('install').install_purelib
_create_fake_setuptools_pkg_info(placeholder)
|
[
"def",
"_after_install",
"(",
"dist",
")",
":",
"log",
".",
"warn",
"(",
"'After install bootstrap.'",
")",
"placeholder",
"=",
"dist",
".",
"get_command_obj",
"(",
"'install'",
")",
".",
"install_purelib",
"_create_fake_setuptools_pkg_info",
"(",
"placeholder",
")"
] |
https://github.com/albertz/music-player/blob/d23586f5bf657cbaea8147223be7814d117ae73d/mac/pyobjc-framework-Quartz/distribute_setup.py#L273-L276
|
||||
aneisch/home-assistant-config
|
86e381fde9609cb8871c439c433c12989e4e225d
|
custom_components/localtuya/config_flow.py
|
python
|
LocaltuyaConfigFlow.async_step_basic_info
|
(self, user_input=None)
|
return self.async_show_form(
step_id="basic_info",
data_schema=schema_defaults(BASIC_INFO_SCHEMA, **defaults),
errors=errors,
)
|
Handle input of basic info.
|
Handle input of basic info.
|
[
"Handle",
"input",
"of",
"basic",
"info",
"."
] |
async def async_step_basic_info(self, user_input=None):
"""Handle input of basic info."""
errors = {}
if user_input is not None:
await self.async_set_unique_id(user_input[CONF_DEVICE_ID])
try:
self.basic_info = user_input
if self.selected_device is not None:
self.basic_info[CONF_PRODUCT_KEY] = self.devices[
self.selected_device
]["productKey"]
self.dps_strings = await validate_input(self.hass, user_input)
return await self.async_step_pick_entity_type()
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except EmptyDpsList:
errors["base"] = "empty_dps"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
# If selected device exists as a config entry, load config from it
if self.selected_device in self._async_current_ids():
entry = async_config_entry_by_device_id(self.hass, self.selected_device)
await self.async_set_unique_id(entry.data[CONF_DEVICE_ID])
self.basic_info = entry.data.copy()
self.dps_strings = self.basic_info.pop(CONF_DPS_STRINGS).copy()
self.entities = self.basic_info.pop(CONF_ENTITIES).copy()
return await self.async_step_pick_entity_type()
# Insert default values from discovery if present
defaults = {}
defaults.update(user_input or {})
if self.selected_device is not None:
device = self.devices[self.selected_device]
defaults[CONF_HOST] = device.get("ip")
defaults[CONF_DEVICE_ID] = device.get("gwId")
defaults[CONF_PROTOCOL_VERSION] = device.get("version")
return self.async_show_form(
step_id="basic_info",
data_schema=schema_defaults(BASIC_INFO_SCHEMA, **defaults),
errors=errors,
)
|
[
"async",
"def",
"async_step_basic_info",
"(",
"self",
",",
"user_input",
"=",
"None",
")",
":",
"errors",
"=",
"{",
"}",
"if",
"user_input",
"is",
"not",
"None",
":",
"await",
"self",
".",
"async_set_unique_id",
"(",
"user_input",
"[",
"CONF_DEVICE_ID",
"]",
")",
"try",
":",
"self",
".",
"basic_info",
"=",
"user_input",
"if",
"self",
".",
"selected_device",
"is",
"not",
"None",
":",
"self",
".",
"basic_info",
"[",
"CONF_PRODUCT_KEY",
"]",
"=",
"self",
".",
"devices",
"[",
"self",
".",
"selected_device",
"]",
"[",
"\"productKey\"",
"]",
"self",
".",
"dps_strings",
"=",
"await",
"validate_input",
"(",
"self",
".",
"hass",
",",
"user_input",
")",
"return",
"await",
"self",
".",
"async_step_pick_entity_type",
"(",
")",
"except",
"CannotConnect",
":",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"cannot_connect\"",
"except",
"InvalidAuth",
":",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"invalid_auth\"",
"except",
"EmptyDpsList",
":",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"empty_dps\"",
"except",
"Exception",
":",
"# pylint: disable=broad-except",
"_LOGGER",
".",
"exception",
"(",
"\"Unexpected exception\"",
")",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"unknown\"",
"# If selected device exists as a config entry, load config from it",
"if",
"self",
".",
"selected_device",
"in",
"self",
".",
"_async_current_ids",
"(",
")",
":",
"entry",
"=",
"async_config_entry_by_device_id",
"(",
"self",
".",
"hass",
",",
"self",
".",
"selected_device",
")",
"await",
"self",
".",
"async_set_unique_id",
"(",
"entry",
".",
"data",
"[",
"CONF_DEVICE_ID",
"]",
")",
"self",
".",
"basic_info",
"=",
"entry",
".",
"data",
".",
"copy",
"(",
")",
"self",
".",
"dps_strings",
"=",
"self",
".",
"basic_info",
".",
"pop",
"(",
"CONF_DPS_STRINGS",
")",
".",
"copy",
"(",
")",
"self",
".",
"entities",
"=",
"self",
".",
"basic_info",
".",
"pop",
"(",
"CONF_ENTITIES",
")",
".",
"copy",
"(",
")",
"return",
"await",
"self",
".",
"async_step_pick_entity_type",
"(",
")",
"# Insert default values from discovery if present",
"defaults",
"=",
"{",
"}",
"defaults",
".",
"update",
"(",
"user_input",
"or",
"{",
"}",
")",
"if",
"self",
".",
"selected_device",
"is",
"not",
"None",
":",
"device",
"=",
"self",
".",
"devices",
"[",
"self",
".",
"selected_device",
"]",
"defaults",
"[",
"CONF_HOST",
"]",
"=",
"device",
".",
"get",
"(",
"\"ip\"",
")",
"defaults",
"[",
"CONF_DEVICE_ID",
"]",
"=",
"device",
".",
"get",
"(",
"\"gwId\"",
")",
"defaults",
"[",
"CONF_PROTOCOL_VERSION",
"]",
"=",
"device",
".",
"get",
"(",
"\"version\"",
")",
"return",
"self",
".",
"async_show_form",
"(",
"step_id",
"=",
"\"basic_info\"",
",",
"data_schema",
"=",
"schema_defaults",
"(",
"BASIC_INFO_SCHEMA",
",",
"*",
"*",
"defaults",
")",
",",
"errors",
"=",
"errors",
",",
")"
] |
https://github.com/aneisch/home-assistant-config/blob/86e381fde9609cb8871c439c433c12989e4e225d/custom_components/localtuya/config_flow.py#L271-L317
|
|
readthedocs/readthedocs.org
|
0852d7c10d725d954d3e9a93513171baa1116d9f
|
readthedocs/core/resolver.py
|
python
|
ResolverBase.resolve_path
|
(
self,
project,
filename='',
version_slug=None,
language=None,
single_version=None,
subdomain=None,
cname=None,
urlconf=None,
)
|
return self.base_resolve_path(
project_slug=project_slug,
filename=filename,
version_slug=version_slug,
language=language,
single_version=single_version,
subproject_slug=subproject_slug,
cname=cname,
subdomain=subdomain,
urlconf=urlconf or project.urlconf,
)
|
Resolve a URL with a subset of fields defined.
|
Resolve a URL with a subset of fields defined.
|
[
"Resolve",
"a",
"URL",
"with",
"a",
"subset",
"of",
"fields",
"defined",
"."
] |
def resolve_path(
self,
project,
filename='',
version_slug=None,
language=None,
single_version=None,
subdomain=None,
cname=None,
urlconf=None,
):
"""Resolve a URL with a subset of fields defined."""
version_slug = version_slug or project.get_default_version()
language = language or project.language
filename = self._fix_filename(project, filename)
main_project, subproject_slug = self._get_canonical_project_data(project)
project_slug = main_project.slug
cname = (
cname
or self._use_subdomain()
or main_project.get_canonical_custom_domain()
)
single_version = bool(project.single_version or single_version)
return self.base_resolve_path(
project_slug=project_slug,
filename=filename,
version_slug=version_slug,
language=language,
single_version=single_version,
subproject_slug=subproject_slug,
cname=cname,
subdomain=subdomain,
urlconf=urlconf or project.urlconf,
)
|
[
"def",
"resolve_path",
"(",
"self",
",",
"project",
",",
"filename",
"=",
"''",
",",
"version_slug",
"=",
"None",
",",
"language",
"=",
"None",
",",
"single_version",
"=",
"None",
",",
"subdomain",
"=",
"None",
",",
"cname",
"=",
"None",
",",
"urlconf",
"=",
"None",
",",
")",
":",
"version_slug",
"=",
"version_slug",
"or",
"project",
".",
"get_default_version",
"(",
")",
"language",
"=",
"language",
"or",
"project",
".",
"language",
"filename",
"=",
"self",
".",
"_fix_filename",
"(",
"project",
",",
"filename",
")",
"main_project",
",",
"subproject_slug",
"=",
"self",
".",
"_get_canonical_project_data",
"(",
"project",
")",
"project_slug",
"=",
"main_project",
".",
"slug",
"cname",
"=",
"(",
"cname",
"or",
"self",
".",
"_use_subdomain",
"(",
")",
"or",
"main_project",
".",
"get_canonical_custom_domain",
"(",
")",
")",
"single_version",
"=",
"bool",
"(",
"project",
".",
"single_version",
"or",
"single_version",
")",
"return",
"self",
".",
"base_resolve_path",
"(",
"project_slug",
"=",
"project_slug",
",",
"filename",
"=",
"filename",
",",
"version_slug",
"=",
"version_slug",
",",
"language",
"=",
"language",
",",
"single_version",
"=",
"single_version",
",",
"subproject_slug",
"=",
"subproject_slug",
",",
"cname",
"=",
"cname",
",",
"subdomain",
"=",
"subdomain",
",",
"urlconf",
"=",
"urlconf",
"or",
"project",
".",
"urlconf",
",",
")"
] |
https://github.com/readthedocs/readthedocs.org/blob/0852d7c10d725d954d3e9a93513171baa1116d9f/readthedocs/core/resolver.py#L117-L153
|
|
pwnieexpress/pwn_plug_sources
|
1a23324f5dc2c3de20f9c810269b6a29b2758cad
|
src/set/src/core/scapy.py
|
python
|
get_if_hwaddr
|
(iff)
|
[] |
def get_if_hwaddr(iff):
addrfamily, mac = get_if_raw_hwaddr(iff)
if addrfamily in [ARPHDR_ETHER,ARPHDR_LOOPBACK]:
return str2mac(mac)
else:
raise Scapy_Exception("Unsupported address family (%i) for interface [%s]" % (addrfamily,iff))
|
[
"def",
"get_if_hwaddr",
"(",
"iff",
")",
":",
"addrfamily",
",",
"mac",
"=",
"get_if_raw_hwaddr",
"(",
"iff",
")",
"if",
"addrfamily",
"in",
"[",
"ARPHDR_ETHER",
",",
"ARPHDR_LOOPBACK",
"]",
":",
"return",
"str2mac",
"(",
"mac",
")",
"else",
":",
"raise",
"Scapy_Exception",
"(",
"\"Unsupported address family (%i) for interface [%s]\"",
"%",
"(",
"addrfamily",
",",
"iff",
")",
")"
] |
https://github.com/pwnieexpress/pwn_plug_sources/blob/1a23324f5dc2c3de20f9c810269b6a29b2758cad/src/set/src/core/scapy.py#L1294-L1299
|
||||
idapython/src
|
839d93ac969bc1a152982464907445bc0d18a1f8
|
pywraps/py_idaapi.py
|
python
|
require
|
(modulename, package=None)
|
Load, or reload a module.
When under heavy development, a user's tool might consist of multiple
modules. If those are imported using the standard 'import' mechanism,
there is no guarantee that the Python implementation will re-read
and re-evaluate the module's Python code. In fact, it usually doesn't.
What should be done instead is 'reload()'-ing that module.
This is a simple helper function that will do just that: In case the
module doesn't exist, it 'import's it, and if it does exist,
'reload()'s it.
The importing module (i.e., the module calling require()) will have
the loaded module bound to its globals(), under the name 'modulename'.
(If require() is called from the command line, the importing module
will be '__main__'.)
For more information, see: <http://www.hexblog.com/?p=749>.
|
Load, or reload a module.
|
[
"Load",
"or",
"reload",
"a",
"module",
"."
] |
def require(modulename, package=None):
"""
Load, or reload a module.
When under heavy development, a user's tool might consist of multiple
modules. If those are imported using the standard 'import' mechanism,
there is no guarantee that the Python implementation will re-read
and re-evaluate the module's Python code. In fact, it usually doesn't.
What should be done instead is 'reload()'-ing that module.
This is a simple helper function that will do just that: In case the
module doesn't exist, it 'import's it, and if it does exist,
'reload()'s it.
The importing module (i.e., the module calling require()) will have
the loaded module bound to its globals(), under the name 'modulename'.
(If require() is called from the command line, the importing module
will be '__main__'.)
For more information, see: <http://www.hexblog.com/?p=749>.
"""
import inspect
frame_obj, filename, line_number, function_name, lines, index = inspect.stack()[1]
importer_module = inspect.getmodule(frame_obj)
if importer_module is None: # No importer module; called from command line
importer_module = sys.modules['__main__']
if modulename in sys.modules.keys():
m = sys.modules[modulename]
if sys.version_info.major >= 3:
import importlib
importlib.reload(m)
else:
reload(m)
m = sys.modules[modulename]
else:
import importlib
m = importlib.import_module(modulename, package)
sys.modules[modulename] = m
setattr(importer_module, modulename, m)
|
[
"def",
"require",
"(",
"modulename",
",",
"package",
"=",
"None",
")",
":",
"import",
"inspect",
"frame_obj",
",",
"filename",
",",
"line_number",
",",
"function_name",
",",
"lines",
",",
"index",
"=",
"inspect",
".",
"stack",
"(",
")",
"[",
"1",
"]",
"importer_module",
"=",
"inspect",
".",
"getmodule",
"(",
"frame_obj",
")",
"if",
"importer_module",
"is",
"None",
":",
"# No importer module; called from command line",
"importer_module",
"=",
"sys",
".",
"modules",
"[",
"'__main__'",
"]",
"if",
"modulename",
"in",
"sys",
".",
"modules",
".",
"keys",
"(",
")",
":",
"m",
"=",
"sys",
".",
"modules",
"[",
"modulename",
"]",
"if",
"sys",
".",
"version_info",
".",
"major",
">=",
"3",
":",
"import",
"importlib",
"importlib",
".",
"reload",
"(",
"m",
")",
"else",
":",
"reload",
"(",
"m",
")",
"m",
"=",
"sys",
".",
"modules",
"[",
"modulename",
"]",
"else",
":",
"import",
"importlib",
"m",
"=",
"importlib",
".",
"import_module",
"(",
"modulename",
",",
"package",
")",
"sys",
".",
"modules",
"[",
"modulename",
"]",
"=",
"m",
"setattr",
"(",
"importer_module",
",",
"modulename",
",",
"m",
")"
] |
https://github.com/idapython/src/blob/839d93ac969bc1a152982464907445bc0d18a1f8/pywraps/py_idaapi.py#L37-L75
|
||
zhl2008/awd-platform
|
0416b31abea29743387b10b3914581fbe8e7da5e
|
web_hxb2/lib/python3.5/site-packages/compressor/contrib/jinja2ext.py
|
python
|
CompressorExtension.parse
|
(self, parser)
|
return nodes.CallBlock(self.call_method('_compress_normal', args), [], [],
body).set_lineno(lineno)
|
[] |
def parse(self, parser):
lineno = next(parser.stream).lineno
kindarg = parser.parse_expression()
# Allow kind to be defined as jinja2 name node
if isinstance(kindarg, nodes.Name):
kindarg = nodes.Const(kindarg.name)
args = [kindarg]
if args[0].value not in self.compressors:
raise TemplateSyntaxError('compress kind may be one of: %s' %
(', '.join(self.compressors.keys())),
lineno)
if parser.stream.skip_if('comma'):
modearg = parser.parse_expression()
# Allow mode to be defined as jinja2 name node
if isinstance(modearg, nodes.Name):
modearg = nodes.Const(modearg.name)
args.append(modearg)
else:
args.append(nodes.Const('file'))
body = parser.parse_statements(['name:endcompress'], drop_needle=True)
# Skip the kind if used in the endblock, by using the kind in the
# endblock the templates are slightly more readable.
parser.stream.skip_if('name:' + kindarg.value)
return nodes.CallBlock(self.call_method('_compress_normal', args), [], [],
body).set_lineno(lineno)
|
[
"def",
"parse",
"(",
"self",
",",
"parser",
")",
":",
"lineno",
"=",
"next",
"(",
"parser",
".",
"stream",
")",
".",
"lineno",
"kindarg",
"=",
"parser",
".",
"parse_expression",
"(",
")",
"# Allow kind to be defined as jinja2 name node",
"if",
"isinstance",
"(",
"kindarg",
",",
"nodes",
".",
"Name",
")",
":",
"kindarg",
"=",
"nodes",
".",
"Const",
"(",
"kindarg",
".",
"name",
")",
"args",
"=",
"[",
"kindarg",
"]",
"if",
"args",
"[",
"0",
"]",
".",
"value",
"not",
"in",
"self",
".",
"compressors",
":",
"raise",
"TemplateSyntaxError",
"(",
"'compress kind may be one of: %s'",
"%",
"(",
"', '",
".",
"join",
"(",
"self",
".",
"compressors",
".",
"keys",
"(",
")",
")",
")",
",",
"lineno",
")",
"if",
"parser",
".",
"stream",
".",
"skip_if",
"(",
"'comma'",
")",
":",
"modearg",
"=",
"parser",
".",
"parse_expression",
"(",
")",
"# Allow mode to be defined as jinja2 name node",
"if",
"isinstance",
"(",
"modearg",
",",
"nodes",
".",
"Name",
")",
":",
"modearg",
"=",
"nodes",
".",
"Const",
"(",
"modearg",
".",
"name",
")",
"args",
".",
"append",
"(",
"modearg",
")",
"else",
":",
"args",
".",
"append",
"(",
"nodes",
".",
"Const",
"(",
"'file'",
")",
")",
"body",
"=",
"parser",
".",
"parse_statements",
"(",
"[",
"'name:endcompress'",
"]",
",",
"drop_needle",
"=",
"True",
")",
"# Skip the kind if used in the endblock, by using the kind in the",
"# endblock the templates are slightly more readable.",
"parser",
".",
"stream",
".",
"skip_if",
"(",
"'name:'",
"+",
"kindarg",
".",
"value",
")",
"return",
"nodes",
".",
"CallBlock",
"(",
"self",
".",
"call_method",
"(",
"'_compress_normal'",
",",
"args",
")",
",",
"[",
"]",
",",
"[",
"]",
",",
"body",
")",
".",
"set_lineno",
"(",
"lineno",
")"
] |
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/compressor/contrib/jinja2ext.py#L12-L38
|
|||
nadineproject/nadine
|
c41c8ef7ffe18f1853029c97eecc329039b4af6c
|
doors/core.py
|
python
|
Gatekeeper.pull_event_logs
|
(self, record_count=-1)
|
return event_logs
|
[] |
def pull_event_logs(self, record_count=-1):
logging.debug("Gatekeeper: Pulling event logs from the doors...")
if record_count <= 0:
record_count = self.event_count
event_logs = {}
for door_name, door in list(self.get_doors().items()):
logging.debug("Gatekeeper: Pulling %d logs from '%s'" % (record_count, door_name))
controller = door['controller']
door_events = controller.pull_events(record_count)
if self.card_secret:
for e in door_events:
if 'cardNumber' in e:
# e['cardNumber'] = self.encode_door_code(e['cardNumber']).decode('utf-8')
e['cardNumber'] = self.encode_door_code(e['cardNumber'])
event_logs[door_name] = door_events
return event_logs
|
[
"def",
"pull_event_logs",
"(",
"self",
",",
"record_count",
"=",
"-",
"1",
")",
":",
"logging",
".",
"debug",
"(",
"\"Gatekeeper: Pulling event logs from the doors...\"",
")",
"if",
"record_count",
"<=",
"0",
":",
"record_count",
"=",
"self",
".",
"event_count",
"event_logs",
"=",
"{",
"}",
"for",
"door_name",
",",
"door",
"in",
"list",
"(",
"self",
".",
"get_doors",
"(",
")",
".",
"items",
"(",
")",
")",
":",
"logging",
".",
"debug",
"(",
"\"Gatekeeper: Pulling %d logs from '%s'\"",
"%",
"(",
"record_count",
",",
"door_name",
")",
")",
"controller",
"=",
"door",
"[",
"'controller'",
"]",
"door_events",
"=",
"controller",
".",
"pull_events",
"(",
"record_count",
")",
"if",
"self",
".",
"card_secret",
":",
"for",
"e",
"in",
"door_events",
":",
"if",
"'cardNumber'",
"in",
"e",
":",
"# e['cardNumber'] = self.encode_door_code(e['cardNumber']).decode('utf-8')",
"e",
"[",
"'cardNumber'",
"]",
"=",
"self",
".",
"encode_door_code",
"(",
"e",
"[",
"'cardNumber'",
"]",
")",
"event_logs",
"[",
"door_name",
"]",
"=",
"door_events",
"return",
"event_logs"
] |
https://github.com/nadineproject/nadine/blob/c41c8ef7ffe18f1853029c97eecc329039b4af6c/doors/core.py#L481-L496
|
|||
airspeed-velocity/asv
|
9d5af5713357ccea00a518758fae6822cc69f539
|
asv/extern/asizeof.py
|
python
|
_len_code
|
(obj)
|
return (obj.co_stacksize + obj.co_nlocals +
_len(obj.co_freevars) + _len(obj.co_cellvars) - 1)
|
Length of code object (stack and variables only).
|
Length of code object (stack and variables only).
|
[
"Length",
"of",
"code",
"object",
"(",
"stack",
"and",
"variables",
"only",
")",
"."
] |
def _len_code(obj): # see .../Lib/test/test_sys.py
'''Length of code object (stack and variables only).
'''
return (obj.co_stacksize + obj.co_nlocals +
_len(obj.co_freevars) + _len(obj.co_cellvars) - 1)
|
[
"def",
"_len_code",
"(",
"obj",
")",
":",
"# see .../Lib/test/test_sys.py",
"return",
"(",
"obj",
".",
"co_stacksize",
"+",
"obj",
".",
"co_nlocals",
"+",
"_len",
"(",
"obj",
".",
"co_freevars",
")",
"+",
"_len",
"(",
"obj",
".",
"co_cellvars",
")",
"-",
"1",
")"
] |
https://github.com/airspeed-velocity/asv/blob/9d5af5713357ccea00a518758fae6822cc69f539/asv/extern/asizeof.py#L877-L881
|
|
megvii-model/MABN
|
db1ef7bc396c8aa6f4eec9e3c5875d73f74da3de
|
det/demo/predictor.py
|
python
|
COCODemo.build_transform
|
(self)
|
return transform
|
Creates a basic transformation that was used to train the models
|
Creates a basic transformation that was used to train the models
|
[
"Creates",
"a",
"basic",
"transformation",
"that",
"was",
"used",
"to",
"train",
"the",
"models"
] |
def build_transform(self):
"""
Creates a basic transformation that was used to train the models
"""
cfg = self.cfg
# we are loading images with OpenCV, so we don't need to convert them
# to BGR, they are already! So all we need to do is to normalize
# by 255 if we want to convert to BGR255 format, or flip the channels
# if we want it to be in RGB in [0-1] range.
if cfg.INPUT.TO_BGR255:
to_bgr_transform = T.Lambda(lambda x: x * 255)
else:
to_bgr_transform = T.Lambda(lambda x: x[[2, 1, 0]])
normalize_transform = T.Normalize(
mean=cfg.INPUT.PIXEL_MEAN, std=cfg.INPUT.PIXEL_STD
)
min_size = cfg.INPUT.MIN_SIZE_TEST
max_size = cfg.INPUT.MAX_SIZE_TEST
transform = T.Compose(
[
T.ToPILImage(),
Resize(min_size, max_size),
T.ToTensor(),
to_bgr_transform,
normalize_transform,
]
)
return transform
|
[
"def",
"build_transform",
"(",
"self",
")",
":",
"cfg",
"=",
"self",
".",
"cfg",
"# we are loading images with OpenCV, so we don't need to convert them",
"# to BGR, they are already! So all we need to do is to normalize",
"# by 255 if we want to convert to BGR255 format, or flip the channels",
"# if we want it to be in RGB in [0-1] range.",
"if",
"cfg",
".",
"INPUT",
".",
"TO_BGR255",
":",
"to_bgr_transform",
"=",
"T",
".",
"Lambda",
"(",
"lambda",
"x",
":",
"x",
"*",
"255",
")",
"else",
":",
"to_bgr_transform",
"=",
"T",
".",
"Lambda",
"(",
"lambda",
"x",
":",
"x",
"[",
"[",
"2",
",",
"1",
",",
"0",
"]",
"]",
")",
"normalize_transform",
"=",
"T",
".",
"Normalize",
"(",
"mean",
"=",
"cfg",
".",
"INPUT",
".",
"PIXEL_MEAN",
",",
"std",
"=",
"cfg",
".",
"INPUT",
".",
"PIXEL_STD",
")",
"min_size",
"=",
"cfg",
".",
"INPUT",
".",
"MIN_SIZE_TEST",
"max_size",
"=",
"cfg",
".",
"INPUT",
".",
"MAX_SIZE_TEST",
"transform",
"=",
"T",
".",
"Compose",
"(",
"[",
"T",
".",
"ToPILImage",
"(",
")",
",",
"Resize",
"(",
"min_size",
",",
"max_size",
")",
",",
"T",
".",
"ToTensor",
"(",
")",
",",
"to_bgr_transform",
",",
"normalize_transform",
",",
"]",
")",
"return",
"transform"
] |
https://github.com/megvii-model/MABN/blob/db1ef7bc396c8aa6f4eec9e3c5875d73f74da3de/det/demo/predictor.py#L168-L197
|
|
david-abel/simple_rl
|
d8fe6007efb4840377f085a4e35ba89aaa2cdf6d
|
simple_rl/experiments/ExperimentClass.py
|
python
|
Experiment.write_exp_info_to_file
|
(self)
|
Summary:
Writes relevant experiment information to a file for reproducibility.
|
Summary:
Writes relevant experiment information to a file for reproducibility.
|
[
"Summary",
":",
"Writes",
"relevant",
"experiment",
"information",
"to",
"a",
"file",
"for",
"reproducibility",
"."
] |
def write_exp_info_to_file(self):
'''
Summary:
Writes relevant experiment information to a file for reproducibility.
'''
out_file = open(os.path.join(self.exp_directory, Experiment.EXP_PARAM_FILE_NAME), "w+")
to_write_to_file = self._get_exp_file_string()
out_file.write(to_write_to_file)
out_file.close()
|
[
"def",
"write_exp_info_to_file",
"(",
"self",
")",
":",
"out_file",
"=",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"exp_directory",
",",
"Experiment",
".",
"EXP_PARAM_FILE_NAME",
")",
",",
"\"w+\"",
")",
"to_write_to_file",
"=",
"self",
".",
"_get_exp_file_string",
"(",
")",
"out_file",
".",
"write",
"(",
"to_write_to_file",
")",
"out_file",
".",
"close",
"(",
")"
] |
https://github.com/david-abel/simple_rl/blob/d8fe6007efb4840377f085a4e35ba89aaa2cdf6d/simple_rl/experiments/ExperimentClass.py#L305-L313
|
||
karpathy/neuraltalk
|
c36bd0aa23907638a1217f03a13bdba5e1a8cada
|
py_caffe_feat_extract.py
|
python
|
reduce_along_dim
|
(img , dim , weights , indicies)
|
return out_img
|
Perform bilinear interpolation given along the image dimension dim
-weights are the kernel weights
-indicies are the crossponding indicies location
return img resize along dimension dim
|
Perform bilinear interpolation given along the image dimension dim
-weights are the kernel weights
-indicies are the crossponding indicies location
return img resize along dimension dim
|
[
"Perform",
"bilinear",
"interpolation",
"given",
"along",
"the",
"image",
"dimension",
"dim",
"-",
"weights",
"are",
"the",
"kernel",
"weights",
"-",
"indicies",
"are",
"the",
"crossponding",
"indicies",
"location",
"return",
"img",
"resize",
"along",
"dimension",
"dim"
] |
def reduce_along_dim(img , dim , weights , indicies):
'''
Perform bilinear interpolation given along the image dimension dim
-weights are the kernel weights
-indicies are the crossponding indicies location
return img resize along dimension dim
'''
other_dim = abs(dim-1)
if other_dim == 0: #resizing image width
weights = np.tile(weights[np.newaxis,:,:,np.newaxis],(img.shape[other_dim],1,1,3))
out_img = img[:,indicies,:]*weights
out_img = np.sum(out_img,axis=2)
else: # resize image height
weights = np.tile(weights[:,:,np.newaxis,np.newaxis],(1,1,img.shape[other_dim],3))
out_img = img[indicies,:,:]*weights
out_img = np.sum(out_img,axis=1)
return out_img
|
[
"def",
"reduce_along_dim",
"(",
"img",
",",
"dim",
",",
"weights",
",",
"indicies",
")",
":",
"other_dim",
"=",
"abs",
"(",
"dim",
"-",
"1",
")",
"if",
"other_dim",
"==",
"0",
":",
"#resizing image width",
"weights",
"=",
"np",
".",
"tile",
"(",
"weights",
"[",
"np",
".",
"newaxis",
",",
":",
",",
":",
",",
"np",
".",
"newaxis",
"]",
",",
"(",
"img",
".",
"shape",
"[",
"other_dim",
"]",
",",
"1",
",",
"1",
",",
"3",
")",
")",
"out_img",
"=",
"img",
"[",
":",
",",
"indicies",
",",
":",
"]",
"*",
"weights",
"out_img",
"=",
"np",
".",
"sum",
"(",
"out_img",
",",
"axis",
"=",
"2",
")",
"else",
":",
"# resize image height ",
"weights",
"=",
"np",
".",
"tile",
"(",
"weights",
"[",
":",
",",
":",
",",
"np",
".",
"newaxis",
",",
"np",
".",
"newaxis",
"]",
",",
"(",
"1",
",",
"1",
",",
"img",
".",
"shape",
"[",
"other_dim",
"]",
",",
"3",
")",
")",
"out_img",
"=",
"img",
"[",
"indicies",
",",
":",
",",
":",
"]",
"*",
"weights",
"out_img",
"=",
"np",
".",
"sum",
"(",
"out_img",
",",
"axis",
"=",
"1",
")",
"return",
"out_img"
] |
https://github.com/karpathy/neuraltalk/blob/c36bd0aa23907638a1217f03a13bdba5e1a8cada/py_caffe_feat_extract.py#L14-L31
|
|
frePPLe/frepple
|
57aa612030b4fcd03cb9c613f83a7dac4f0e8d6d
|
freppledb/output/widget.py
|
python
|
LateOrdersWidget.render
|
(cls, request=None)
|
return HttpResponse("\n".join(result))
|
[] |
def render(cls, request=None):
limit = int(request.GET.get("limit", cls.limit))
try:
db = _thread_locals.request.database or DEFAULT_DB_ALIAS
except Exception:
db = DEFAULT_DB_ALIAS
cursor = connections[db].cursor()
result = [
'<div class="table-responsive"><table class="table table-condensed table-hover">',
'<thead><tr><th class="alignleft">%s</th><th class="alignleft">%s</th>'
'<th class="alignleft">%s</th><th class="alignleft">%s</th>'
'<th class="aligncenter">%s</th><th class="aligncenter">%s</th>'
'<th class="aligncenter">%s</th></tr></thead>'
% (
capfirst(force_text(_("name"))),
capfirst(force_text(_("item"))),
capfirst(force_text(_("location"))),
capfirst(force_text(_("customer"))),
capfirst(force_text(_("due"))),
capfirst(force_text(_("planned date"))),
capfirst(force_text(_("delay"))),
),
]
alt = False
cursor.execute(cls.query, (limit,))
for rec in cursor.fetchall():
result.append(
'<tr%s><td class="underline"><a href="%s/demandpegging/%s/">%s</a></td>'
'<td class="alignleft">%s</td><td class="alignleft">%s</td>'
'<td class="alignleft">%s</td><td class="alignleft">%s</td>'
'<td class="aligncenter">%s</td><td class="aligncenter">%s</td></tr>'
% (
alt and ' class="altRow"' or "",
request.prefix,
quote(rec[0]),
escape(rec[0]),
escape(rec[1]),
escape(rec[2]),
escape(rec[3]),
rec[4].date(),
rec[5].date(),
int(rec[6]),
)
)
alt = not alt
result.append("</table></div>")
return HttpResponse("\n".join(result))
|
[
"def",
"render",
"(",
"cls",
",",
"request",
"=",
"None",
")",
":",
"limit",
"=",
"int",
"(",
"request",
".",
"GET",
".",
"get",
"(",
"\"limit\"",
",",
"cls",
".",
"limit",
")",
")",
"try",
":",
"db",
"=",
"_thread_locals",
".",
"request",
".",
"database",
"or",
"DEFAULT_DB_ALIAS",
"except",
"Exception",
":",
"db",
"=",
"DEFAULT_DB_ALIAS",
"cursor",
"=",
"connections",
"[",
"db",
"]",
".",
"cursor",
"(",
")",
"result",
"=",
"[",
"'<div class=\"table-responsive\"><table class=\"table table-condensed table-hover\">'",
",",
"'<thead><tr><th class=\"alignleft\">%s</th><th class=\"alignleft\">%s</th>'",
"'<th class=\"alignleft\">%s</th><th class=\"alignleft\">%s</th>'",
"'<th class=\"aligncenter\">%s</th><th class=\"aligncenter\">%s</th>'",
"'<th class=\"aligncenter\">%s</th></tr></thead>'",
"%",
"(",
"capfirst",
"(",
"force_text",
"(",
"_",
"(",
"\"name\"",
")",
")",
")",
",",
"capfirst",
"(",
"force_text",
"(",
"_",
"(",
"\"item\"",
")",
")",
")",
",",
"capfirst",
"(",
"force_text",
"(",
"_",
"(",
"\"location\"",
")",
")",
")",
",",
"capfirst",
"(",
"force_text",
"(",
"_",
"(",
"\"customer\"",
")",
")",
")",
",",
"capfirst",
"(",
"force_text",
"(",
"_",
"(",
"\"due\"",
")",
")",
")",
",",
"capfirst",
"(",
"force_text",
"(",
"_",
"(",
"\"planned date\"",
")",
")",
")",
",",
"capfirst",
"(",
"force_text",
"(",
"_",
"(",
"\"delay\"",
")",
")",
")",
",",
")",
",",
"]",
"alt",
"=",
"False",
"cursor",
".",
"execute",
"(",
"cls",
".",
"query",
",",
"(",
"limit",
",",
")",
")",
"for",
"rec",
"in",
"cursor",
".",
"fetchall",
"(",
")",
":",
"result",
".",
"append",
"(",
"'<tr%s><td class=\"underline\"><a href=\"%s/demandpegging/%s/\">%s</a></td>'",
"'<td class=\"alignleft\">%s</td><td class=\"alignleft\">%s</td>'",
"'<td class=\"alignleft\">%s</td><td class=\"alignleft\">%s</td>'",
"'<td class=\"aligncenter\">%s</td><td class=\"aligncenter\">%s</td></tr>'",
"%",
"(",
"alt",
"and",
"' class=\"altRow\"'",
"or",
"\"\"",
",",
"request",
".",
"prefix",
",",
"quote",
"(",
"rec",
"[",
"0",
"]",
")",
",",
"escape",
"(",
"rec",
"[",
"0",
"]",
")",
",",
"escape",
"(",
"rec",
"[",
"1",
"]",
")",
",",
"escape",
"(",
"rec",
"[",
"2",
"]",
")",
",",
"escape",
"(",
"rec",
"[",
"3",
"]",
")",
",",
"rec",
"[",
"4",
"]",
".",
"date",
"(",
")",
",",
"rec",
"[",
"5",
"]",
".",
"date",
"(",
")",
",",
"int",
"(",
"rec",
"[",
"6",
"]",
")",
",",
")",
")",
"alt",
"=",
"not",
"alt",
"result",
".",
"append",
"(",
"\"</table></div>\"",
")",
"return",
"HttpResponse",
"(",
"\"\\n\"",
".",
"join",
"(",
"result",
")",
")"
] |
https://github.com/frePPLe/frepple/blob/57aa612030b4fcd03cb9c613f83a7dac4f0e8d6d/freppledb/output/widget.py#L65-L111
|
|||
Autodesk/molecular-design-toolkit
|
5f45a47fea21d3603899a6366cb163024f0e2ec4
|
moldesign/external/transformations.py
|
python
|
translation_matrix
|
(direction)
|
return M
|
Return matrix to translate by direction vector.
>>> v = numpy.random.random(3) - 0.5
>>> numpy.allclose(v, translation_matrix(v)[:3, 3])
True
|
Return matrix to translate by direction vector.
|
[
"Return",
"matrix",
"to",
"translate",
"by",
"direction",
"vector",
"."
] |
def translation_matrix(direction):
"""Return matrix to translate by direction vector.
>>> v = numpy.random.random(3) - 0.5
>>> numpy.allclose(v, translation_matrix(v)[:3, 3])
True
"""
M = numpy.identity(4)
M[:3, 3] = direction[:3]
return M
|
[
"def",
"translation_matrix",
"(",
"direction",
")",
":",
"M",
"=",
"numpy",
".",
"identity",
"(",
"4",
")",
"M",
"[",
":",
"3",
",",
"3",
"]",
"=",
"direction",
"[",
":",
"3",
"]",
"return",
"M"
] |
https://github.com/Autodesk/molecular-design-toolkit/blob/5f45a47fea21d3603899a6366cb163024f0e2ec4/moldesign/external/transformations.py#L223-L233
|
|
open-cogsci/OpenSesame
|
c4a3641b097a80a76937edbd8c365f036bcc9705
|
libopensesame/sketchpad_elements/_textline.py
|
python
|
textline.__init__
|
(self, sketchpad, string)
|
desc:
Constructor.
arguments:
sketchpad: A sketchpad object.
string: A definition string.
|
desc:
Constructor.
|
[
"desc",
":",
"Constructor",
"."
] |
def __init__(self, sketchpad, string):
"""
desc:
Constructor.
arguments:
sketchpad: A sketchpad object.
string: A definition string.
"""
defaults = [
(u'x' , None),
(u'y' , None),
(u'text' , None),
(u'center' , 1),
(u'color' , sketchpad.var.get(u'foreground')),
(u'font_family' , sketchpad.var.get(u'font_family')),
(u'font_size' , sketchpad.var.get(u'font_size')),
(u'font_bold' , sketchpad.var.get(u'font_bold')),
(u'font_italic' , sketchpad.var.get(u'font_italic')),
(u'html' , u'yes'),
]
super(textline, self).__init__(sketchpad, string, defaults=defaults)
|
[
"def",
"__init__",
"(",
"self",
",",
"sketchpad",
",",
"string",
")",
":",
"defaults",
"=",
"[",
"(",
"u'x'",
",",
"None",
")",
",",
"(",
"u'y'",
",",
"None",
")",
",",
"(",
"u'text'",
",",
"None",
")",
",",
"(",
"u'center'",
",",
"1",
")",
",",
"(",
"u'color'",
",",
"sketchpad",
".",
"var",
".",
"get",
"(",
"u'foreground'",
")",
")",
",",
"(",
"u'font_family'",
",",
"sketchpad",
".",
"var",
".",
"get",
"(",
"u'font_family'",
")",
")",
",",
"(",
"u'font_size'",
",",
"sketchpad",
".",
"var",
".",
"get",
"(",
"u'font_size'",
")",
")",
",",
"(",
"u'font_bold'",
",",
"sketchpad",
".",
"var",
".",
"get",
"(",
"u'font_bold'",
")",
")",
",",
"(",
"u'font_italic'",
",",
"sketchpad",
".",
"var",
".",
"get",
"(",
"u'font_italic'",
")",
")",
",",
"(",
"u'html'",
",",
"u'yes'",
")",
",",
"]",
"super",
"(",
"textline",
",",
"self",
")",
".",
"__init__",
"(",
"sketchpad",
",",
"string",
",",
"defaults",
"=",
"defaults",
")"
] |
https://github.com/open-cogsci/OpenSesame/blob/c4a3641b097a80a76937edbd8c365f036bcc9705/libopensesame/sketchpad_elements/_textline.py#L31-L54
|
||
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_openshift/library/oc_clusterrole.py
|
python
|
ClusterRole.builder
|
(name='default_clusterrole', rules=None)
|
return ClusterRole(content=content)
|
return a clusterrole with name and/or rules
|
return a clusterrole with name and/or rules
|
[
"return",
"a",
"clusterrole",
"with",
"name",
"and",
"/",
"or",
"rules"
] |
def builder(name='default_clusterrole', rules=None):
'''return a clusterrole with name and/or rules'''
if rules is None:
rules = [{'apiGroups': [""],
'attributeRestrictions': None,
'verbs': [],
'resources': []}]
content = {
'apiVersion': 'v1',
'kind': 'ClusterRole',
'metadata': {'name': '{}'.format(name)},
'rules': rules,
}
return ClusterRole(content=content)
|
[
"def",
"builder",
"(",
"name",
"=",
"'default_clusterrole'",
",",
"rules",
"=",
"None",
")",
":",
"if",
"rules",
"is",
"None",
":",
"rules",
"=",
"[",
"{",
"'apiGroups'",
":",
"[",
"\"\"",
"]",
",",
"'attributeRestrictions'",
":",
"None",
",",
"'verbs'",
":",
"[",
"]",
",",
"'resources'",
":",
"[",
"]",
"}",
"]",
"content",
"=",
"{",
"'apiVersion'",
":",
"'v1'",
",",
"'kind'",
":",
"'ClusterRole'",
",",
"'metadata'",
":",
"{",
"'name'",
":",
"'{}'",
".",
"format",
"(",
"name",
")",
"}",
",",
"'rules'",
":",
"rules",
",",
"}",
"return",
"ClusterRole",
"(",
"content",
"=",
"content",
")"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_openshift/library/oc_clusterrole.py#L1652-L1666
|
|
rhinstaller/anaconda
|
63edc8680f1b05cbfe11bef28703acba808c5174
|
pyanaconda/modules/common/task/task.py
|
python
|
Task.start
|
(self)
|
Start the task in a new thread.
|
Start the task in a new thread.
|
[
"Start",
"the",
"task",
"in",
"a",
"new",
"thread",
"."
] |
def start(self):
"""Start the task in a new thread."""
threadMgr.add(
AnacondaThread(
name=self._thread_name,
target=self._thread_run_callback,
target_started=self._task_started_callback,
target_stopped=self._task_stopped_callback,
target_failed=self._thread_failed_callback,
fatal=False
)
)
|
[
"def",
"start",
"(",
"self",
")",
":",
"threadMgr",
".",
"add",
"(",
"AnacondaThread",
"(",
"name",
"=",
"self",
".",
"_thread_name",
",",
"target",
"=",
"self",
".",
"_thread_run_callback",
",",
"target_started",
"=",
"self",
".",
"_task_started_callback",
",",
"target_stopped",
"=",
"self",
".",
"_task_stopped_callback",
",",
"target_failed",
"=",
"self",
".",
"_thread_failed_callback",
",",
"fatal",
"=",
"False",
")",
")"
] |
https://github.com/rhinstaller/anaconda/blob/63edc8680f1b05cbfe11bef28703acba808c5174/pyanaconda/modules/common/task/task.py#L81-L92
|
||
JaniceWuo/MovieRecommend
|
4c86db64ca45598917d304f535413df3bc9fea65
|
movierecommend/venv1/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/_vendor/pkg_resources/__init__.py
|
python
|
normalize_path
|
(filename)
|
return os.path.normcase(os.path.realpath(filename))
|
Normalize a file/dir name for comparison purposes
|
Normalize a file/dir name for comparison purposes
|
[
"Normalize",
"a",
"file",
"/",
"dir",
"name",
"for",
"comparison",
"purposes"
] |
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(filename))
|
[
"def",
"normalize_path",
"(",
"filename",
")",
":",
"return",
"os",
".",
"path",
".",
"normcase",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"filename",
")",
")"
] |
https://github.com/JaniceWuo/MovieRecommend/blob/4c86db64ca45598917d304f535413df3bc9fea65/movierecommend/venv1/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/_vendor/pkg_resources/__init__.py#L2197-L2199
|
|
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/physics/quantum/sho1d.py
|
python
|
RaisingOp._eval_adjoint
|
(self)
|
return LoweringOp(*self.args)
|
[] |
def _eval_adjoint(self):
return LoweringOp(*self.args)
|
[
"def",
"_eval_adjoint",
"(",
"self",
")",
":",
"return",
"LoweringOp",
"(",
"*",
"self",
".",
"args",
")"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/physics/quantum/sho1d.py#L111-L112
|
|||
PyHDI/veriloggen
|
2382d200deabf59cfcfd741f5eba371010aaf2bb
|
veriloggen/verilog/from_verilog.py
|
python
|
VerilogReadVisitor.visit_GreaterThan
|
(self, node)
|
return vtypes.GreaterThan(left, right)
|
[] |
def visit_GreaterThan(self, node):
left = self.visit(node.left)
right = self.visit(node.right)
return vtypes.GreaterThan(left, right)
|
[
"def",
"visit_GreaterThan",
"(",
"self",
",",
"node",
")",
":",
"left",
"=",
"self",
".",
"visit",
"(",
"node",
".",
"left",
")",
"right",
"=",
"self",
".",
"visit",
"(",
"node",
".",
"right",
")",
"return",
"vtypes",
".",
"GreaterThan",
"(",
"left",
",",
"right",
")"
] |
https://github.com/PyHDI/veriloggen/blob/2382d200deabf59cfcfd741f5eba371010aaf2bb/veriloggen/verilog/from_verilog.py#L521-L524
|
|||
dimagi/commcare-hq
|
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
|
corehq/ex-submodules/casexml/apps/phone/models.py
|
python
|
SimplifiedSyncLog.is_formplayer
|
(self)
|
return self.device_id and self.device_id.startswith("WebAppsLogin")
|
[] |
def is_formplayer(self):
return self.device_id and self.device_id.startswith("WebAppsLogin")
|
[
"def",
"is_formplayer",
"(",
"self",
")",
":",
"return",
"self",
".",
"device_id",
"and",
"self",
".",
"device_id",
".",
"startswith",
"(",
"\"WebAppsLogin\"",
")"
] |
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/ex-submodules/casexml/apps/phone/models.py#L656-L657
|
|||
microsoft/tf2-gnn
|
182eb6b337cecf1f0d6dce237a4a8ff4e5599e67
|
tf2_gnn/cli_utils/training_utils.py
|
python
|
make_run_id
|
(model_name: str, task_name: str, run_name: Optional[str] = None)
|
Choose a run ID, based on the --run-name parameter and the current time.
|
Choose a run ID, based on the --run-name parameter and the current time.
|
[
"Choose",
"a",
"run",
"ID",
"based",
"on",
"the",
"--",
"run",
"-",
"name",
"parameter",
"and",
"the",
"current",
"time",
"."
] |
def make_run_id(model_name: str, task_name: str, run_name: Optional[str] = None) -> str:
"""Choose a run ID, based on the --run-name parameter and the current time."""
if run_name is not None:
return run_name
else:
return "%s_%s__%s" % (model_name, task_name, time.strftime("%Y-%m-%d_%H-%M-%S"))
|
[
"def",
"make_run_id",
"(",
"model_name",
":",
"str",
",",
"task_name",
":",
"str",
",",
"run_name",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
"->",
"str",
":",
"if",
"run_name",
"is",
"not",
"None",
":",
"return",
"run_name",
"else",
":",
"return",
"\"%s_%s__%s\"",
"%",
"(",
"model_name",
",",
"task_name",
",",
"time",
".",
"strftime",
"(",
"\"%Y-%m-%d_%H-%M-%S\"",
")",
")"
] |
https://github.com/microsoft/tf2-gnn/blob/182eb6b337cecf1f0d6dce237a4a8ff4e5599e67/tf2_gnn/cli_utils/training_utils.py#L20-L25
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.