nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
maqp/tfc
|
4bb13da1f19671e1e723db7e8a21be58847209af
|
src/common/reed_solomon.py
|
python
|
gf_neg
|
(x: int)
|
return x
|
Do negation in binary Galois Field.
|
Do negation in binary Galois Field.
|
[
"Do",
"negation",
"in",
"binary",
"Galois",
"Field",
"."
] |
def gf_neg(x: int) -> int:
"""Do negation in binary Galois Field."""
return x
|
[
"def",
"gf_neg",
"(",
"x",
":",
"int",
")",
"->",
"int",
":",
"return",
"x"
] |
https://github.com/maqp/tfc/blob/4bb13da1f19671e1e723db7e8a21be58847209af/src/common/reed_solomon.py#L479-L481
|
|
bukun/TorCMS
|
f7b44e8650aa54774f6b57e7b178edebbbf57e8e
|
torcms/model/user_model.py
|
python
|
MUser.delete_by_user_name
|
(user_name)
|
Delete user in the database by `user_name`.
|
Delete user in the database by `user_name`.
|
[
"Delete",
"user",
"in",
"the",
"database",
"by",
"user_name",
"."
] |
def delete_by_user_name(user_name):
'''
Delete user in the database by `user_name`.
'''
try:
del_count = TabMember.delete().where(
TabMember.user_name == user_name)
del_count.execute()
return True
except Exception as err:
print(repr(err))
return False
|
[
"def",
"delete_by_user_name",
"(",
"user_name",
")",
":",
"try",
":",
"del_count",
"=",
"TabMember",
".",
"delete",
"(",
")",
".",
"where",
"(",
"TabMember",
".",
"user_name",
"==",
"user_name",
")",
"del_count",
".",
"execute",
"(",
")",
"return",
"True",
"except",
"Exception",
"as",
"err",
":",
"print",
"(",
"repr",
"(",
"err",
")",
")",
"return",
"False"
] |
https://github.com/bukun/TorCMS/blob/f7b44e8650aa54774f6b57e7b178edebbbf57e8e/torcms/model/user_model.py#L367-L378
|
||
ibm-research-tokyo/dybm
|
a6d308c896c2f66680ee9c5d05a3d7826cc27c64
|
src/pydybm/time_series/esn.py
|
python
|
ESN._update_state
|
(self, in_pattern)
|
Updating internal state
Parameters
----------
in_pattern : array, or list of arrays
pattern used to update the state
|
Updating internal state
|
[
"Updating",
"internal",
"state"
] |
def _update_state(self, in_pattern):
"""Updating internal state
Parameters
----------
in_pattern : array, or list of arrays
pattern used to update the state
"""
new_si = amath.tanh(in_pattern.dot(self.Win) + self.si.dot(self.Wrec))
self.si = (1 - self.leak) * self.si + self.leak * new_si
|
[
"def",
"_update_state",
"(",
"self",
",",
"in_pattern",
")",
":",
"new_si",
"=",
"amath",
".",
"tanh",
"(",
"in_pattern",
".",
"dot",
"(",
"self",
".",
"Win",
")",
"+",
"self",
".",
"si",
".",
"dot",
"(",
"self",
".",
"Wrec",
")",
")",
"self",
".",
"si",
"=",
"(",
"1",
"-",
"self",
".",
"leak",
")",
"*",
"self",
".",
"si",
"+",
"self",
".",
"leak",
"*",
"new_si"
] |
https://github.com/ibm-research-tokyo/dybm/blob/a6d308c896c2f66680ee9c5d05a3d7826cc27c64/src/pydybm/time_series/esn.py#L122-L131
|
||
TM0831/Spiders
|
89ba07667a1e729b67f012f2f7cf71034243ad28
|
WeRead/spider.py
|
python
|
save_data
|
(data: list)
|
use MongoDB to save data
:param data: data need to save
:return:
|
use MongoDB to save data
:param data: data need to save
:return:
|
[
"use",
"MongoDB",
"to",
"save",
"data",
":",
"param",
"data",
":",
"data",
"need",
"to",
"save",
":",
"return",
":"
] |
def save_data(data: list):
"""
use MongoDB to save data
:param data: data need to save
:return:
"""
client = pymongo.MongoClient(host=MONGO_HOST, port=MONGO_PORT)
col = client[MONGO_DB][MONGO_COL]
try:
col.insert_many(data)
except Exception as e:
logging.error(e)
finally:
client.close()
|
[
"def",
"save_data",
"(",
"data",
":",
"list",
")",
":",
"client",
"=",
"pymongo",
".",
"MongoClient",
"(",
"host",
"=",
"MONGO_HOST",
",",
"port",
"=",
"MONGO_PORT",
")",
"col",
"=",
"client",
"[",
"MONGO_DB",
"]",
"[",
"MONGO_COL",
"]",
"try",
":",
"col",
".",
"insert_many",
"(",
"data",
")",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"error",
"(",
"e",
")",
"finally",
":",
"client",
".",
"close",
"(",
")"
] |
https://github.com/TM0831/Spiders/blob/89ba07667a1e729b67f012f2f7cf71034243ad28/WeRead/spider.py#L92-L105
|
||
eliben/code-for-blog
|
06d6887eccd84ca5703b792a85ab6c1ebfc5393e
|
2018/type-inference/typing.py
|
python
|
apply_unifier
|
(typ, subst)
|
Applies the unifier subst to typ.
Returns a type where all occurrences of variables bound in subst
were replaced (recursively); on failure returns None.
|
Applies the unifier subst to typ.
|
[
"Applies",
"the",
"unifier",
"subst",
"to",
"typ",
"."
] |
def apply_unifier(typ, subst):
"""Applies the unifier subst to typ.
Returns a type where all occurrences of variables bound in subst
were replaced (recursively); on failure returns None.
"""
if subst is None:
return None
elif len(subst) == 0:
return typ
elif isinstance(typ, (BoolType, IntType)):
return typ
elif isinstance(typ, TypeVar):
if typ.name in subst:
return apply_unifier(subst[typ.name], subst)
else:
return typ
elif isinstance(typ, FuncType):
newargtypes = [apply_unifier(arg, subst) for arg in typ.argtypes]
return FuncType(newargtypes,
apply_unifier(typ.rettype, subst))
else:
return None
|
[
"def",
"apply_unifier",
"(",
"typ",
",",
"subst",
")",
":",
"if",
"subst",
"is",
"None",
":",
"return",
"None",
"elif",
"len",
"(",
"subst",
")",
"==",
"0",
":",
"return",
"typ",
"elif",
"isinstance",
"(",
"typ",
",",
"(",
"BoolType",
",",
"IntType",
")",
")",
":",
"return",
"typ",
"elif",
"isinstance",
"(",
"typ",
",",
"TypeVar",
")",
":",
"if",
"typ",
".",
"name",
"in",
"subst",
":",
"return",
"apply_unifier",
"(",
"subst",
"[",
"typ",
".",
"name",
"]",
",",
"subst",
")",
"else",
":",
"return",
"typ",
"elif",
"isinstance",
"(",
"typ",
",",
"FuncType",
")",
":",
"newargtypes",
"=",
"[",
"apply_unifier",
"(",
"arg",
",",
"subst",
")",
"for",
"arg",
"in",
"typ",
".",
"argtypes",
"]",
"return",
"FuncType",
"(",
"newargtypes",
",",
"apply_unifier",
"(",
"typ",
".",
"rettype",
",",
"subst",
")",
")",
"else",
":",
"return",
"None"
] |
https://github.com/eliben/code-for-blog/blob/06d6887eccd84ca5703b792a85ab6c1ebfc5393e/2018/type-inference/typing.py#L311-L333
|
||
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_process.py
|
python
|
Utils.create_tmpfile
|
(prefix='tmp')
|
Generates and returns a temporary file name
|
Generates and returns a temporary file name
|
[
"Generates",
"and",
"returns",
"a",
"temporary",
"file",
"name"
] |
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
|
[
"def",
"create_tmpfile",
"(",
"prefix",
"=",
"'tmp'",
")",
":",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"prefix",
"=",
"prefix",
",",
"delete",
"=",
"False",
")",
"as",
"tmp",
":",
"return",
"tmp",
".",
"name"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_process.py#L1213-L1217
|
||
Mellcap/MellPlayer
|
90b3210eaaed675552fd69717b6b953fd0e0b07d
|
mellplayer/api.py
|
python
|
Netease.playlist_detail
|
(self, playlist_id)
|
return result
|
歌单详情
http://music.163.com/api/playlist/detail?id=xxx
|
歌单详情
http://music.163.com/api/playlist/detail?id=xxx
|
[
"歌单详情",
"http",
":",
"//",
"music",
".",
"163",
".",
"com",
"/",
"api",
"/",
"playlist",
"/",
"detail?id",
"=",
"xxx"
] |
def playlist_detail(self, playlist_id):
'''
歌单详情
http://music.163.com/api/playlist/detail?id=xxx
'''
url = 'http://music.163.com/api/playlist/detail?id=%s' % playlist_id
result = self._request(url)
return result
|
[
"def",
"playlist_detail",
"(",
"self",
",",
"playlist_id",
")",
":",
"url",
"=",
"'http://music.163.com/api/playlist/detail?id=%s'",
"%",
"playlist_id",
"result",
"=",
"self",
".",
"_request",
"(",
"url",
")",
"return",
"result"
] |
https://github.com/Mellcap/MellPlayer/blob/90b3210eaaed675552fd69717b6b953fd0e0b07d/mellplayer/api.py#L57-L64
|
|
conan-io/conan
|
28ec09f6cbf1d7e27ec27393fd7bbc74891e74a8
|
conans/model/info.py
|
python
|
ConanInfo.copy
|
(self)
|
return result
|
Useful for build_id implementation
|
Useful for build_id implementation
|
[
"Useful",
"for",
"build_id",
"implementation"
] |
def copy(self):
""" Useful for build_id implementation
"""
result = ConanInfo()
result.invalid = self.invalid
result.settings = self.settings.copy()
result.options = self.options.copy()
result.requires = self.requires.copy()
result.python_requires = self.python_requires.copy()
return result
|
[
"def",
"copy",
"(",
"self",
")",
":",
"result",
"=",
"ConanInfo",
"(",
")",
"result",
".",
"invalid",
"=",
"self",
".",
"invalid",
"result",
".",
"settings",
"=",
"self",
".",
"settings",
".",
"copy",
"(",
")",
"result",
".",
"options",
"=",
"self",
".",
"options",
".",
"copy",
"(",
")",
"result",
".",
"requires",
"=",
"self",
".",
"requires",
".",
"copy",
"(",
")",
"result",
".",
"python_requires",
"=",
"self",
".",
"python_requires",
".",
"copy",
"(",
")",
"return",
"result"
] |
https://github.com/conan-io/conan/blob/28ec09f6cbf1d7e27ec27393fd7bbc74891e74a8/conans/model/info.py#L430-L439
|
|
bbaibowen/computer-vision
|
c105db5aa4cb4398bc7b6b84cb8ba63d2f2d88cc
|
FaceNet + MTCNN/utils.py
|
python
|
pad
|
(bboxes, w, h)
|
return return_list
|
将超出图像的box进行处理
参数:
bboxes:人脸框
w,h:图像长宽
x:候选框的左上角x坐标
y:候选框的左上角y坐标
ex:候选框的右下角x坐标
ey:候选框的与下角y坐标
dx:经过对齐之后的候选框左上角x坐标
dy:经过对齐之后的候选框左上角y坐标
edx:修改之后的候选框右下角x
edy:修改之后的候选框右下角y
tmpw:候选框的宽度
tmph:候选框的长度
|
将超出图像的box进行处理
参数:
bboxes:人脸框
w,h:图像长宽
x:候选框的左上角x坐标
y:候选框的左上角y坐标
ex:候选框的右下角x坐标
ey:候选框的与下角y坐标
dx:经过对齐之后的候选框左上角x坐标
dy:经过对齐之后的候选框左上角y坐标
edx:修改之后的候选框右下角x
edy:修改之后的候选框右下角y
tmpw:候选框的宽度
tmph:候选框的长度
|
[
"将超出图像的box进行处理",
"参数:",
"bboxes",
":",
"人脸框",
"w",
"h",
":",
"图像长宽",
"x:候选框的左上角x坐标",
"y:候选框的左上角y坐标",
"ex:候选框的右下角x坐标",
"ey:候选框的与下角y坐标",
"dx:经过对齐之后的候选框左上角x坐标",
"dy:经过对齐之后的候选框左上角y坐标",
"edx:修改之后的候选框右下角x",
"edy:修改之后的候选框右下角y",
"tmpw:候选框的宽度",
"tmph:候选框的长度"
] |
def pad(bboxes, w, h):
'''将超出图像的box进行处理
参数:
bboxes:人脸框
w,h:图像长宽
x:候选框的左上角x坐标
y:候选框的左上角y坐标
ex:候选框的右下角x坐标
ey:候选框的与下角y坐标
dx:经过对齐之后的候选框左上角x坐标
dy:经过对齐之后的候选框左上角y坐标
edx:修改之后的候选框右下角x
edy:修改之后的候选框右下角y
tmpw:候选框的宽度
tmph:候选框的长度
'''
# box的长宽
tmpw, tmph = bboxes[:, 2] - bboxes[:, 0] + 1, bboxes[:, 3] - bboxes[:, 1] + 1
num_box = bboxes.shape[0]
dx, dy = np.zeros((num_box,)), np.zeros((num_box,))
edx, edy = tmpw.copy() - 1, tmph.copy() - 1
# box左上右下的坐标
x, y, ex, ey = bboxes[:, 0], bboxes[:, 1], bboxes[:, 2], bboxes[:, 3]
# 找到超出右下边界的box并将ex,ey归为图像的w,h
# edx,edy为调整后的box右下角相对原box左上角的相对坐标
tmp_index = np.where(ex > w - 1)
edx[tmp_index] = tmpw[tmp_index] + w - ex[tmp_index] - 2
ex[tmp_index] = w - 1
tmp_index = np.where(ey > h - 1)
edy[tmp_index] = tmph[tmp_index] + h - ey[tmp_index] - 2
ey[tmp_index] = h - 1
# 找到超出左上角的box并将x,y归为0
# dx,dy为调整后的box的左上角坐标相对于原box左上角的坐标
tmp_index = np.where(x < 0)
dx[tmp_index] = 0 - x[tmp_index]
x[tmp_index] = 0
tmp_index = np.where(y < 0)
dy[tmp_index] = 0 - y[tmp_index]
y[tmp_index] = 0
return_list = [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph]
return_list = [item.astype(np.int32) for item in return_list]
return return_list
|
[
"def",
"pad",
"(",
"bboxes",
",",
"w",
",",
"h",
")",
":",
"# box的长宽",
"tmpw",
",",
"tmph",
"=",
"bboxes",
"[",
":",
",",
"2",
"]",
"-",
"bboxes",
"[",
":",
",",
"0",
"]",
"+",
"1",
",",
"bboxes",
"[",
":",
",",
"3",
"]",
"-",
"bboxes",
"[",
":",
",",
"1",
"]",
"+",
"1",
"num_box",
"=",
"bboxes",
".",
"shape",
"[",
"0",
"]",
"dx",
",",
"dy",
"=",
"np",
".",
"zeros",
"(",
"(",
"num_box",
",",
")",
")",
",",
"np",
".",
"zeros",
"(",
"(",
"num_box",
",",
")",
")",
"edx",
",",
"edy",
"=",
"tmpw",
".",
"copy",
"(",
")",
"-",
"1",
",",
"tmph",
".",
"copy",
"(",
")",
"-",
"1",
"# box左上右下的坐标",
"x",
",",
"y",
",",
"ex",
",",
"ey",
"=",
"bboxes",
"[",
":",
",",
"0",
"]",
",",
"bboxes",
"[",
":",
",",
"1",
"]",
",",
"bboxes",
"[",
":",
",",
"2",
"]",
",",
"bboxes",
"[",
":",
",",
"3",
"]",
"# 找到超出右下边界的box并将ex,ey归为图像的w,h",
"# edx,edy为调整后的box右下角相对原box左上角的相对坐标",
"tmp_index",
"=",
"np",
".",
"where",
"(",
"ex",
">",
"w",
"-",
"1",
")",
"edx",
"[",
"tmp_index",
"]",
"=",
"tmpw",
"[",
"tmp_index",
"]",
"+",
"w",
"-",
"ex",
"[",
"tmp_index",
"]",
"-",
"2",
"ex",
"[",
"tmp_index",
"]",
"=",
"w",
"-",
"1",
"tmp_index",
"=",
"np",
".",
"where",
"(",
"ey",
">",
"h",
"-",
"1",
")",
"edy",
"[",
"tmp_index",
"]",
"=",
"tmph",
"[",
"tmp_index",
"]",
"+",
"h",
"-",
"ey",
"[",
"tmp_index",
"]",
"-",
"2",
"ey",
"[",
"tmp_index",
"]",
"=",
"h",
"-",
"1",
"# 找到超出左上角的box并将x,y归为0",
"# dx,dy为调整后的box的左上角坐标相对于原box左上角的坐标",
"tmp_index",
"=",
"np",
".",
"where",
"(",
"x",
"<",
"0",
")",
"dx",
"[",
"tmp_index",
"]",
"=",
"0",
"-",
"x",
"[",
"tmp_index",
"]",
"x",
"[",
"tmp_index",
"]",
"=",
"0",
"tmp_index",
"=",
"np",
".",
"where",
"(",
"y",
"<",
"0",
")",
"dy",
"[",
"tmp_index",
"]",
"=",
"0",
"-",
"y",
"[",
"tmp_index",
"]",
"y",
"[",
"tmp_index",
"]",
"=",
"0",
"return_list",
"=",
"[",
"dy",
",",
"edy",
",",
"dx",
",",
"edx",
",",
"y",
",",
"ey",
",",
"x",
",",
"ex",
",",
"tmpw",
",",
"tmph",
"]",
"return_list",
"=",
"[",
"item",
".",
"astype",
"(",
"np",
".",
"int32",
")",
"for",
"item",
"in",
"return_list",
"]",
"return",
"return_list"
] |
https://github.com/bbaibowen/computer-vision/blob/c105db5aa4cb4398bc7b6b84cb8ba63d2f2d88cc/FaceNet + MTCNN/utils.py#L88-L134
|
|
apache/tvm
|
6eb4ed813ebcdcd9558f0906a1870db8302ff1e0
|
python/tvm/auto_scheduler/relay_integration.py
|
python
|
enter_layout_rewrite
|
()
|
Enter layout rewrite tracing environment
|
Enter layout rewrite tracing environment
|
[
"Enter",
"layout",
"rewrite",
"tracing",
"environment"
] |
def enter_layout_rewrite():
"""Enter layout rewrite tracing environment"""
env = TracingEnvironment(TracingMode.PREPARE_LAYOUT_REWRITE)
env.__enter__()
|
[
"def",
"enter_layout_rewrite",
"(",
")",
":",
"env",
"=",
"TracingEnvironment",
"(",
"TracingMode",
".",
"PREPARE_LAYOUT_REWRITE",
")",
"env",
".",
"__enter__",
"(",
")"
] |
https://github.com/apache/tvm/blob/6eb4ed813ebcdcd9558f0906a1870db8302ff1e0/python/tvm/auto_scheduler/relay_integration.py#L242-L245
|
||
RasaHQ/rasa
|
54823b68c1297849ba7ae841a4246193cd1223a1
|
rasa/core/processor.py
|
python
|
MessageProcessor.should_predict_another_action
|
(action_name: Text)
|
return action_name not in (ACTION_LISTEN_NAME, ACTION_SESSION_START_NAME)
|
Determine whether the processor should predict another action.
Args:
action_name: Name of the latest executed action.
Returns:
`False` if `action_name` is `ACTION_LISTEN_NAME` or
`ACTION_SESSION_START_NAME`, otherwise `True`.
|
Determine whether the processor should predict another action.
|
[
"Determine",
"whether",
"the",
"processor",
"should",
"predict",
"another",
"action",
"."
] |
def should_predict_another_action(action_name: Text) -> bool:
"""Determine whether the processor should predict another action.
Args:
action_name: Name of the latest executed action.
Returns:
`False` if `action_name` is `ACTION_LISTEN_NAME` or
`ACTION_SESSION_START_NAME`, otherwise `True`.
"""
return action_name not in (ACTION_LISTEN_NAME, ACTION_SESSION_START_NAME)
|
[
"def",
"should_predict_another_action",
"(",
"action_name",
":",
"Text",
")",
"->",
"bool",
":",
"return",
"action_name",
"not",
"in",
"(",
"ACTION_LISTEN_NAME",
",",
"ACTION_SESSION_START_NAME",
")"
] |
https://github.com/RasaHQ/rasa/blob/54823b68c1297849ba7ae841a4246193cd1223a1/rasa/core/processor.py#L773-L784
|
|
Nicotine-Plus/nicotine-plus
|
6583532193e132206bb2096c77c6ad1ce96c21fa
|
pynicotine/pynicotine.py
|
python
|
NicotineCore.place_in_queue_request
|
(self, msg)
|
Peer code: 51
|
Peer code: 51
|
[
"Peer",
"code",
":",
"51"
] |
def place_in_queue_request(self, msg):
""" Peer code: 51 """
log.add_msg_contents(msg)
self.transfers.place_in_queue_request(msg)
|
[
"def",
"place_in_queue_request",
"(",
"self",
",",
"msg",
")",
":",
"log",
".",
"add_msg_contents",
"(",
"msg",
")",
"self",
".",
"transfers",
".",
"place_in_queue_request",
"(",
"msg",
")"
] |
https://github.com/Nicotine-Plus/nicotine-plus/blob/6583532193e132206bb2096c77c6ad1ce96c21fa/pynicotine/pynicotine.py#L1164-L1168
|
||
Fizzadar/pyinfra
|
ff0913d6a172966760b63fe59e55dff9ea852e0d
|
pyinfra/facts/windows.py
|
python
|
WindowsService.command
|
(self, name)
|
return 'Get-Service -Name {} | Format-List -Property *'.format(name)
|
[] |
def command(self, name):
return 'Get-Service -Name {} | Format-List -Property *'.format(name)
|
[
"def",
"command",
"(",
"self",
",",
"name",
")",
":",
"return",
"'Get-Service -Name {} | Format-List -Property *'",
".",
"format",
"(",
"name",
")"
] |
https://github.com/Fizzadar/pyinfra/blob/ff0913d6a172966760b63fe59e55dff9ea852e0d/pyinfra/facts/windows.py#L311-L312
|
|||
Fizzadar/pyinfra
|
ff0913d6a172966760b63fe59e55dff9ea852e0d
|
pyinfra/api/connectors/mech.py
|
python
|
make_names_data
|
(limit=None)
|
return hosts
|
[] |
def make_names_data(limit=None):
mech_ssh_info = get_mech_config(limit)
logger.debug('Got Mech SSH info: \n{0}'.format(mech_ssh_info))
hosts = []
current_host = None
for line in mech_ssh_info:
if not line:
if current_host:
hosts.append(_make_name_data(current_host))
current_host = None
continue
key, value = line.strip().split(' ', 1)
if key == 'Host':
if current_host:
hosts.append(_make_name_data(current_host))
# Set the new host
current_host = {
key: value,
}
elif current_host:
current_host[key] = value
else:
logger.debug('Extra Mech SSH key/value ({0}={1})'.format(
key, value,
))
if current_host:
hosts.append(_make_name_data(current_host))
if not hosts:
raise InventoryError('No running Mech instances found!')
return hosts
|
[
"def",
"make_names_data",
"(",
"limit",
"=",
"None",
")",
":",
"mech_ssh_info",
"=",
"get_mech_config",
"(",
"limit",
")",
"logger",
".",
"debug",
"(",
"'Got Mech SSH info: \\n{0}'",
".",
"format",
"(",
"mech_ssh_info",
")",
")",
"hosts",
"=",
"[",
"]",
"current_host",
"=",
"None",
"for",
"line",
"in",
"mech_ssh_info",
":",
"if",
"not",
"line",
":",
"if",
"current_host",
":",
"hosts",
".",
"append",
"(",
"_make_name_data",
"(",
"current_host",
")",
")",
"current_host",
"=",
"None",
"continue",
"key",
",",
"value",
"=",
"line",
".",
"strip",
"(",
")",
".",
"split",
"(",
"' '",
",",
"1",
")",
"if",
"key",
"==",
"'Host'",
":",
"if",
"current_host",
":",
"hosts",
".",
"append",
"(",
"_make_name_data",
"(",
"current_host",
")",
")",
"# Set the new host",
"current_host",
"=",
"{",
"key",
":",
"value",
",",
"}",
"elif",
"current_host",
":",
"current_host",
"[",
"key",
"]",
"=",
"value",
"else",
":",
"logger",
".",
"debug",
"(",
"'Extra Mech SSH key/value ({0}={1})'",
".",
"format",
"(",
"key",
",",
"value",
",",
")",
")",
"if",
"current_host",
":",
"hosts",
".",
"append",
"(",
"_make_name_data",
"(",
"current_host",
")",
")",
"if",
"not",
"hosts",
":",
"raise",
"InventoryError",
"(",
"'No running Mech instances found!'",
")",
"return",
"hosts"
] |
https://github.com/Fizzadar/pyinfra/blob/ff0913d6a172966760b63fe59e55dff9ea852e0d/pyinfra/api/connectors/mech.py#L133-L174
|
|||
oracle/oci-python-sdk
|
3c1604e4e212008fb6718e2f68cdb5ef71fd5793
|
src/oci/mysql/mysqlaas_client.py
|
python
|
MysqlaasClient.list_configurations
|
(self, compartment_id, **kwargs)
|
Lists the Configurations available when creating a DB System.
This may include DEFAULT configurations per Shape and CUSTOM configurations.
The default sort order is a multi-part sort by:
- shapeName, ascending
- DEFAULT-before-CUSTOM
- displayName ascending
:param str compartment_id: (required)
The compartment `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param str opc_request_id: (optional)
Customer-defined unique identifier for the request. If you need to
contact Oracle about a specific request, please provide the request
ID that you supplied in this header with the request.
:param str configuration_id: (optional)
The requested Configuration instance.
:param str lifecycle_state: (optional)
Configuration Lifecycle State
Allowed values are: "ACTIVE", "DELETED"
:param list[str] type: (optional)
The requested Configuration types.
Allowed values are: "DEFAULT", "CUSTOM"
:param str display_name: (optional)
A filter to return only the resource matching the given display name exactly.
:param str shape_name: (optional)
The requested Shape name.
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Time fields are default ordered as descending. Display name is default ordered as ascending.
Allowed values are: "displayName", "shapeName", "timeCreated", "timeUpdated"
:param str sort_order: (optional)
The sort order to use (ASC or DESC).
Allowed values are: "ASC", "DESC"
:param int limit: (optional)
The maximum number of items to return in a paginated list call. For information about pagination, see
`List Pagination`__.
__ https://docs.cloud.oracle.com/#API/Concepts/usingapi.htm#List_Pagination
:param str page: (optional)
The value of the `opc-next-page` or `opc-prev-page` response header from
the previous list call. For information about pagination, see `List
Pagination`__.
__ https://docs.cloud.oracle.com/#API/Concepts/usingapi.htm#List_Pagination
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type list of :class:`~oci.mysql.models.ConfigurationSummary`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/mysql/list_configurations.py.html>`__ to see an example of how to use list_configurations API.
|
Lists the Configurations available when creating a DB System.
|
[
"Lists",
"the",
"Configurations",
"available",
"when",
"creating",
"a",
"DB",
"System",
"."
] |
def list_configurations(self, compartment_id, **kwargs):
"""
Lists the Configurations available when creating a DB System.
This may include DEFAULT configurations per Shape and CUSTOM configurations.
The default sort order is a multi-part sort by:
- shapeName, ascending
- DEFAULT-before-CUSTOM
- displayName ascending
:param str compartment_id: (required)
The compartment `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param str opc_request_id: (optional)
Customer-defined unique identifier for the request. If you need to
contact Oracle about a specific request, please provide the request
ID that you supplied in this header with the request.
:param str configuration_id: (optional)
The requested Configuration instance.
:param str lifecycle_state: (optional)
Configuration Lifecycle State
Allowed values are: "ACTIVE", "DELETED"
:param list[str] type: (optional)
The requested Configuration types.
Allowed values are: "DEFAULT", "CUSTOM"
:param str display_name: (optional)
A filter to return only the resource matching the given display name exactly.
:param str shape_name: (optional)
The requested Shape name.
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Time fields are default ordered as descending. Display name is default ordered as ascending.
Allowed values are: "displayName", "shapeName", "timeCreated", "timeUpdated"
:param str sort_order: (optional)
The sort order to use (ASC or DESC).
Allowed values are: "ASC", "DESC"
:param int limit: (optional)
The maximum number of items to return in a paginated list call. For information about pagination, see
`List Pagination`__.
__ https://docs.cloud.oracle.com/#API/Concepts/usingapi.htm#List_Pagination
:param str page: (optional)
The value of the `opc-next-page` or `opc-prev-page` response header from
the previous list call. For information about pagination, see `List
Pagination`__.
__ https://docs.cloud.oracle.com/#API/Concepts/usingapi.htm#List_Pagination
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type list of :class:`~oci.mysql.models.ConfigurationSummary`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/mysql/list_configurations.py.html>`__ to see an example of how to use list_configurations API.
"""
resource_path = "/configurations"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id",
"configuration_id",
"lifecycle_state",
"type",
"display_name",
"shape_name",
"sort_by",
"sort_order",
"limit",
"page"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_configurations got unknown kwargs: {!r}".format(extra_kwargs))
if 'lifecycle_state' in kwargs:
lifecycle_state_allowed_values = ["ACTIVE", "DELETED"]
if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:
raise ValueError(
"Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values)
)
if 'type' in kwargs:
type_allowed_values = ["DEFAULT", "CUSTOM"]
for type_item in kwargs['type']:
if type_item not in type_allowed_values:
raise ValueError(
"Invalid value for `type`, must be one of {0}".format(type_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["displayName", "shapeName", "timeCreated", "timeUpdated"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
query_params = {
"compartmentId": compartment_id,
"configurationId": kwargs.get("configuration_id", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing),
"type": self.base_client.generate_collection_format_param(kwargs.get("type", missing), 'multi'),
"displayName": kwargs.get("display_name", missing),
"shapeName": kwargs.get("shape_name", missing),
"sortBy": kwargs.get("sort_by", missing),
"sortOrder": kwargs.get("sort_order", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="list[ConfigurationSummary]")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="list[ConfigurationSummary]")
|
[
"def",
"list_configurations",
"(",
"self",
",",
"compartment_id",
",",
"*",
"*",
"kwargs",
")",
":",
"resource_path",
"=",
"\"/configurations\"",
"method",
"=",
"\"GET\"",
"# Don't accept unknown kwargs",
"expected_kwargs",
"=",
"[",
"\"retry_strategy\"",
",",
"\"opc_request_id\"",
",",
"\"configuration_id\"",
",",
"\"lifecycle_state\"",
",",
"\"type\"",
",",
"\"display_name\"",
",",
"\"shape_name\"",
",",
"\"sort_by\"",
",",
"\"sort_order\"",
",",
"\"limit\"",
",",
"\"page\"",
"]",
"extra_kwargs",
"=",
"[",
"_key",
"for",
"_key",
"in",
"six",
".",
"iterkeys",
"(",
"kwargs",
")",
"if",
"_key",
"not",
"in",
"expected_kwargs",
"]",
"if",
"extra_kwargs",
":",
"raise",
"ValueError",
"(",
"\"list_configurations got unknown kwargs: {!r}\"",
".",
"format",
"(",
"extra_kwargs",
")",
")",
"if",
"'lifecycle_state'",
"in",
"kwargs",
":",
"lifecycle_state_allowed_values",
"=",
"[",
"\"ACTIVE\"",
",",
"\"DELETED\"",
"]",
"if",
"kwargs",
"[",
"'lifecycle_state'",
"]",
"not",
"in",
"lifecycle_state_allowed_values",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for `lifecycle_state`, must be one of {0}\"",
".",
"format",
"(",
"lifecycle_state_allowed_values",
")",
")",
"if",
"'type'",
"in",
"kwargs",
":",
"type_allowed_values",
"=",
"[",
"\"DEFAULT\"",
",",
"\"CUSTOM\"",
"]",
"for",
"type_item",
"in",
"kwargs",
"[",
"'type'",
"]",
":",
"if",
"type_item",
"not",
"in",
"type_allowed_values",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for `type`, must be one of {0}\"",
".",
"format",
"(",
"type_allowed_values",
")",
")",
"if",
"'sort_by'",
"in",
"kwargs",
":",
"sort_by_allowed_values",
"=",
"[",
"\"displayName\"",
",",
"\"shapeName\"",
",",
"\"timeCreated\"",
",",
"\"timeUpdated\"",
"]",
"if",
"kwargs",
"[",
"'sort_by'",
"]",
"not",
"in",
"sort_by_allowed_values",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for `sort_by`, must be one of {0}\"",
".",
"format",
"(",
"sort_by_allowed_values",
")",
")",
"if",
"'sort_order'",
"in",
"kwargs",
":",
"sort_order_allowed_values",
"=",
"[",
"\"ASC\"",
",",
"\"DESC\"",
"]",
"if",
"kwargs",
"[",
"'sort_order'",
"]",
"not",
"in",
"sort_order_allowed_values",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for `sort_order`, must be one of {0}\"",
".",
"format",
"(",
"sort_order_allowed_values",
")",
")",
"query_params",
"=",
"{",
"\"compartmentId\"",
":",
"compartment_id",
",",
"\"configurationId\"",
":",
"kwargs",
".",
"get",
"(",
"\"configuration_id\"",
",",
"missing",
")",
",",
"\"lifecycleState\"",
":",
"kwargs",
".",
"get",
"(",
"\"lifecycle_state\"",
",",
"missing",
")",
",",
"\"type\"",
":",
"self",
".",
"base_client",
".",
"generate_collection_format_param",
"(",
"kwargs",
".",
"get",
"(",
"\"type\"",
",",
"missing",
")",
",",
"'multi'",
")",
",",
"\"displayName\"",
":",
"kwargs",
".",
"get",
"(",
"\"display_name\"",
",",
"missing",
")",
",",
"\"shapeName\"",
":",
"kwargs",
".",
"get",
"(",
"\"shape_name\"",
",",
"missing",
")",
",",
"\"sortBy\"",
":",
"kwargs",
".",
"get",
"(",
"\"sort_by\"",
",",
"missing",
")",
",",
"\"sortOrder\"",
":",
"kwargs",
".",
"get",
"(",
"\"sort_order\"",
",",
"missing",
")",
",",
"\"limit\"",
":",
"kwargs",
".",
"get",
"(",
"\"limit\"",
",",
"missing",
")",
",",
"\"page\"",
":",
"kwargs",
".",
"get",
"(",
"\"page\"",
",",
"missing",
")",
"}",
"query_params",
"=",
"{",
"k",
":",
"v",
"for",
"(",
"k",
",",
"v",
")",
"in",
"six",
".",
"iteritems",
"(",
"query_params",
")",
"if",
"v",
"is",
"not",
"missing",
"and",
"v",
"is",
"not",
"None",
"}",
"header_params",
"=",
"{",
"\"accept\"",
":",
"\"application/json\"",
",",
"\"content-type\"",
":",
"\"application/json\"",
",",
"\"opc-request-id\"",
":",
"kwargs",
".",
"get",
"(",
"\"opc_request_id\"",
",",
"missing",
")",
"}",
"header_params",
"=",
"{",
"k",
":",
"v",
"for",
"(",
"k",
",",
"v",
")",
"in",
"six",
".",
"iteritems",
"(",
"header_params",
")",
"if",
"v",
"is",
"not",
"missing",
"and",
"v",
"is",
"not",
"None",
"}",
"retry_strategy",
"=",
"self",
".",
"base_client",
".",
"get_preferred_retry_strategy",
"(",
"operation_retry_strategy",
"=",
"kwargs",
".",
"get",
"(",
"'retry_strategy'",
")",
",",
"client_retry_strategy",
"=",
"self",
".",
"retry_strategy",
")",
"if",
"retry_strategy",
":",
"if",
"not",
"isinstance",
"(",
"retry_strategy",
",",
"retry",
".",
"NoneRetryStrategy",
")",
":",
"self",
".",
"base_client",
".",
"add_opc_client_retries_header",
"(",
"header_params",
")",
"retry_strategy",
".",
"add_circuit_breaker_callback",
"(",
"self",
".",
"circuit_breaker_callback",
")",
"return",
"retry_strategy",
".",
"make_retrying_call",
"(",
"self",
".",
"base_client",
".",
"call_api",
",",
"resource_path",
"=",
"resource_path",
",",
"method",
"=",
"method",
",",
"query_params",
"=",
"query_params",
",",
"header_params",
"=",
"header_params",
",",
"response_type",
"=",
"\"list[ConfigurationSummary]\"",
")",
"else",
":",
"return",
"self",
".",
"base_client",
".",
"call_api",
"(",
"resource_path",
"=",
"resource_path",
",",
"method",
"=",
"method",
",",
"query_params",
"=",
"query_params",
",",
"header_params",
"=",
"header_params",
",",
"response_type",
"=",
"\"list[ConfigurationSummary]\"",
")"
] |
https://github.com/oracle/oci-python-sdk/blob/3c1604e4e212008fb6718e2f68cdb5ef71fd5793/src/oci/mysql/mysqlaas_client.py#L362-L534
|
||
chanyn/Reasoning-RCNN
|
4bcad9f8338271f502598f6b880dcfea90ed6ad2
|
mmdet/core/evaluation/eval_hooks.py
|
python
|
DistEvalHook._barrier
|
(self, rank, world_size)
|
Due to some issues with `torch.distributed.barrier()`, we have to
implement this ugly barrier function.
|
Due to some issues with `torch.distributed.barrier()`, we have to
implement this ugly barrier function.
|
[
"Due",
"to",
"some",
"issues",
"with",
"torch",
".",
"distributed",
".",
"barrier",
"()",
"we",
"have",
"to",
"implement",
"this",
"ugly",
"barrier",
"function",
"."
] |
def _barrier(self, rank, world_size):
"""Due to some issues with `torch.distributed.barrier()`, we have to
implement this ugly barrier function.
"""
if rank == 0:
for i in range(1, world_size):
tmp = osp.join(self.lock_dir, '{}.pkl'.format(i))
while not (osp.exists(tmp)):
time.sleep(1)
for i in range(1, world_size):
tmp = osp.join(self.lock_dir, '{}.pkl'.format(i))
os.remove(tmp)
else:
tmp = osp.join(self.lock_dir, '{}.pkl'.format(rank))
mmcv.dump([], tmp)
while osp.exists(tmp):
time.sleep(1)
|
[
"def",
"_barrier",
"(",
"self",
",",
"rank",
",",
"world_size",
")",
":",
"if",
"rank",
"==",
"0",
":",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"world_size",
")",
":",
"tmp",
"=",
"osp",
".",
"join",
"(",
"self",
".",
"lock_dir",
",",
"'{}.pkl'",
".",
"format",
"(",
"i",
")",
")",
"while",
"not",
"(",
"osp",
".",
"exists",
"(",
"tmp",
")",
")",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"world_size",
")",
":",
"tmp",
"=",
"osp",
".",
"join",
"(",
"self",
".",
"lock_dir",
",",
"'{}.pkl'",
".",
"format",
"(",
"i",
")",
")",
"os",
".",
"remove",
"(",
"tmp",
")",
"else",
":",
"tmp",
"=",
"osp",
".",
"join",
"(",
"self",
".",
"lock_dir",
",",
"'{}.pkl'",
".",
"format",
"(",
"rank",
")",
")",
"mmcv",
".",
"dump",
"(",
"[",
"]",
",",
"tmp",
")",
"while",
"osp",
".",
"exists",
"(",
"tmp",
")",
":",
"time",
".",
"sleep",
"(",
"1",
")"
] |
https://github.com/chanyn/Reasoning-RCNN/blob/4bcad9f8338271f502598f6b880dcfea90ed6ad2/mmdet/core/evaluation/eval_hooks.py#L34-L50
|
||
bendmorris/static-python
|
2e0f8c4d7ed5b359dc7d8a75b6fb37e6b6c5c473
|
Lib/decimal.py
|
python
|
Context.max_mag
|
(self, a, b)
|
return a.max_mag(b, context=self)
|
Compares the values numerically with their sign ignored.
>>> ExtendedContext.max_mag(Decimal('7'), Decimal('NaN'))
Decimal('7')
>>> ExtendedContext.max_mag(Decimal('7'), Decimal('-10'))
Decimal('-10')
>>> ExtendedContext.max_mag(1, -2)
Decimal('-2')
>>> ExtendedContext.max_mag(Decimal(1), -2)
Decimal('-2')
>>> ExtendedContext.max_mag(1, Decimal(-2))
Decimal('-2')
|
Compares the values numerically with their sign ignored.
|
[
"Compares",
"the",
"values",
"numerically",
"with",
"their",
"sign",
"ignored",
"."
] |
def max_mag(self, a, b):
"""Compares the values numerically with their sign ignored.
>>> ExtendedContext.max_mag(Decimal('7'), Decimal('NaN'))
Decimal('7')
>>> ExtendedContext.max_mag(Decimal('7'), Decimal('-10'))
Decimal('-10')
>>> ExtendedContext.max_mag(1, -2)
Decimal('-2')
>>> ExtendedContext.max_mag(Decimal(1), -2)
Decimal('-2')
>>> ExtendedContext.max_mag(1, Decimal(-2))
Decimal('-2')
"""
a = _convert_other(a, raiseit=True)
return a.max_mag(b, context=self)
|
[
"def",
"max_mag",
"(",
"self",
",",
"a",
",",
"b",
")",
":",
"a",
"=",
"_convert_other",
"(",
"a",
",",
"raiseit",
"=",
"True",
")",
"return",
"a",
".",
"max_mag",
"(",
"b",
",",
"context",
"=",
"self",
")"
] |
https://github.com/bendmorris/static-python/blob/2e0f8c4d7ed5b359dc7d8a75b6fb37e6b6c5c473/Lib/decimal.py#L4863-L4878
|
|
django-nonrel/django-nonrel
|
4fbfe7344481a5eab8698f79207f09124310131b
|
django/contrib/gis/geos/prototypes/errcheck.py
|
python
|
check_zero
|
(result, func, cargs)
|
Error checking on routines that should not return 0.
|
Error checking on routines that should not return 0.
|
[
"Error",
"checking",
"on",
"routines",
"that",
"should",
"not",
"return",
"0",
"."
] |
def check_zero(result, func, cargs):
"Error checking on routines that should not return 0."
if result == 0:
raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__)
else:
return result
|
[
"def",
"check_zero",
"(",
"result",
",",
"func",
",",
"cargs",
")",
":",
"if",
"result",
"==",
"0",
":",
"raise",
"GEOSException",
"(",
"'Error encountered in GEOS C function \"%s\".'",
"%",
"func",
".",
"__name__",
")",
"else",
":",
"return",
"result"
] |
https://github.com/django-nonrel/django-nonrel/blob/4fbfe7344481a5eab8698f79207f09124310131b/django/contrib/gis/geos/prototypes/errcheck.py#L90-L95
|
||
getsentry/sentry
|
83b1f25aac3e08075e0e2495bc29efaf35aca18a
|
src/sentry/mediators/param.py
|
python
|
Param._eval_string_type
|
(self)
|
return getattr(sys.modules[mod], klass)
|
Converts a class path in string form to the actual class object.
Example:
>>> self._type = 'sentry.models.Project'
>>> self._eval_string_type()
sentry.models.project.Project
|
Converts a class path in string form to the actual class object.
|
[
"Converts",
"a",
"class",
"path",
"in",
"string",
"form",
"to",
"the",
"actual",
"class",
"object",
"."
] |
def _eval_string_type(self):
"""
Converts a class path in string form to the actual class object.
Example:
>>> self._type = 'sentry.models.Project'
>>> self._eval_string_type()
sentry.models.project.Project
"""
mod, klass = self._type.rsplit(".", 1)
return getattr(sys.modules[mod], klass)
|
[
"def",
"_eval_string_type",
"(",
"self",
")",
":",
"mod",
",",
"klass",
"=",
"self",
".",
"_type",
".",
"rsplit",
"(",
"\".\"",
",",
"1",
")",
"return",
"getattr",
"(",
"sys",
".",
"modules",
"[",
"mod",
"]",
",",
"klass",
")"
] |
https://github.com/getsentry/sentry/blob/83b1f25aac3e08075e0e2495bc29efaf35aca18a/src/sentry/mediators/param.py#L131-L141
|
|
Theano/Theano
|
8fd9203edfeecebced9344b0c70193be292a9ade
|
theano/gof/cmodule.py
|
python
|
ModuleCache.module_from_key
|
(self, key, lnk=None, keep_lock=False)
|
return module
|
Return a module from the cache, compiling it if necessary.
Parameters
----------
key
The key object associated with the module. If this hits a match,
we avoid compilation.
lnk
Usually a CLinker instance, but it can be any object that defines
the `get_src_code()` and `compile_cmodule(location)` functions. The
first one returns the source code of the module to load/compile and
the second performs the actual compilation.
keep_lock : bool
If True, the compilation lock will not be released if taken.
|
Return a module from the cache, compiling it if necessary.
|
[
"Return",
"a",
"module",
"from",
"the",
"cache",
"compiling",
"it",
"if",
"necessary",
"."
] |
def module_from_key(self, key, lnk=None, keep_lock=False):
"""
Return a module from the cache, compiling it if necessary.
Parameters
----------
key
The key object associated with the module. If this hits a match,
we avoid compilation.
lnk
Usually a CLinker instance, but it can be any object that defines
the `get_src_code()` and `compile_cmodule(location)` functions. The
first one returns the source code of the module to load/compile and
the second performs the actual compilation.
keep_lock : bool
If True, the compilation lock will not be released if taken.
"""
# Is the module in the cache?
module = self._get_from_key(key)
if module is not None:
return module
src_code = lnk.get_src_code()
# Is the source code already in the cache?
module_hash = get_module_hash(src_code, key)
module = self._get_from_hash(module_hash, key, keep_lock=keep_lock)
if module is not None:
return module
with compilelock.lock_ctx(keep_lock=keep_lock):
# 1) Maybe somebody else compiled it for us while we
# where waiting for the lock. Try to load it again.
# 2) If other repo that import Theano have Theano ops defined,
# we need to refresh the cache here. Otherwise, there are import
# order problems.
# When device=gpu, we compile during Theano
# import. This triggers the loading of the cache. But
# unpickling the cache asks that the external Ops are
# completly loaded, which isn't always the case!
# If a module isn't completly loaded and its unpickling
# fails, it means it is safe for this function
# compilation to skip them, but not for future
# compilations. So reloading the cache here
# compilation fixes this problem. (we could do that only once)
self.refresh(cleanup=False)
module = self._get_from_key(key)
if module is not None:
return module
module = self._get_from_hash(module_hash, key)
if module is not None:
return module
hash_key = hash(key)
nocleanup = False
try:
location = dlimport_workdir(self.dirname)
module = lnk.compile_cmodule(location)
name = module.__file__
assert name.startswith(location)
assert name not in self.module_from_name
self.module_from_name[name] = module
nocleanup = True
except OSError as e:
_logger.error(e)
if e.errno == 31:
_logger.error('There are %i files in %s',
len(os.listdir(config.compiledir)),
config.compiledir)
raise
finally:
if not nocleanup:
_rmtree(location, ignore_if_missing=True,
msg='exception during compilation')
# Changing the hash of the key is not allowed during
# compilation.
assert hash(key) == hash_key
key_data = self._add_to_cache(module, key, module_hash)
self.module_hash_to_key_data[module_hash] = key_data
self.stats[2] += 1
return module
|
[
"def",
"module_from_key",
"(",
"self",
",",
"key",
",",
"lnk",
"=",
"None",
",",
"keep_lock",
"=",
"False",
")",
":",
"# Is the module in the cache?",
"module",
"=",
"self",
".",
"_get_from_key",
"(",
"key",
")",
"if",
"module",
"is",
"not",
"None",
":",
"return",
"module",
"src_code",
"=",
"lnk",
".",
"get_src_code",
"(",
")",
"# Is the source code already in the cache?",
"module_hash",
"=",
"get_module_hash",
"(",
"src_code",
",",
"key",
")",
"module",
"=",
"self",
".",
"_get_from_hash",
"(",
"module_hash",
",",
"key",
",",
"keep_lock",
"=",
"keep_lock",
")",
"if",
"module",
"is",
"not",
"None",
":",
"return",
"module",
"with",
"compilelock",
".",
"lock_ctx",
"(",
"keep_lock",
"=",
"keep_lock",
")",
":",
"# 1) Maybe somebody else compiled it for us while we",
"# where waiting for the lock. Try to load it again.",
"# 2) If other repo that import Theano have Theano ops defined,",
"# we need to refresh the cache here. Otherwise, there are import",
"# order problems.",
"# When device=gpu, we compile during Theano",
"# import. This triggers the loading of the cache. But",
"# unpickling the cache asks that the external Ops are",
"# completly loaded, which isn't always the case!",
"# If a module isn't completly loaded and its unpickling",
"# fails, it means it is safe for this function",
"# compilation to skip them, but not for future",
"# compilations. So reloading the cache here",
"# compilation fixes this problem. (we could do that only once)",
"self",
".",
"refresh",
"(",
"cleanup",
"=",
"False",
")",
"module",
"=",
"self",
".",
"_get_from_key",
"(",
"key",
")",
"if",
"module",
"is",
"not",
"None",
":",
"return",
"module",
"module",
"=",
"self",
".",
"_get_from_hash",
"(",
"module_hash",
",",
"key",
")",
"if",
"module",
"is",
"not",
"None",
":",
"return",
"module",
"hash_key",
"=",
"hash",
"(",
"key",
")",
"nocleanup",
"=",
"False",
"try",
":",
"location",
"=",
"dlimport_workdir",
"(",
"self",
".",
"dirname",
")",
"module",
"=",
"lnk",
".",
"compile_cmodule",
"(",
"location",
")",
"name",
"=",
"module",
".",
"__file__",
"assert",
"name",
".",
"startswith",
"(",
"location",
")",
"assert",
"name",
"not",
"in",
"self",
".",
"module_from_name",
"self",
".",
"module_from_name",
"[",
"name",
"]",
"=",
"module",
"nocleanup",
"=",
"True",
"except",
"OSError",
"as",
"e",
":",
"_logger",
".",
"error",
"(",
"e",
")",
"if",
"e",
".",
"errno",
"==",
"31",
":",
"_logger",
".",
"error",
"(",
"'There are %i files in %s'",
",",
"len",
"(",
"os",
".",
"listdir",
"(",
"config",
".",
"compiledir",
")",
")",
",",
"config",
".",
"compiledir",
")",
"raise",
"finally",
":",
"if",
"not",
"nocleanup",
":",
"_rmtree",
"(",
"location",
",",
"ignore_if_missing",
"=",
"True",
",",
"msg",
"=",
"'exception during compilation'",
")",
"# Changing the hash of the key is not allowed during",
"# compilation.",
"assert",
"hash",
"(",
"key",
")",
"==",
"hash_key",
"key_data",
"=",
"self",
".",
"_add_to_cache",
"(",
"module",
",",
"key",
",",
"module_hash",
")",
"self",
".",
"module_hash_to_key_data",
"[",
"module_hash",
"]",
"=",
"key_data",
"self",
".",
"stats",
"[",
"2",
"]",
"+=",
"1",
"return",
"module"
] |
https://github.com/Theano/Theano/blob/8fd9203edfeecebced9344b0c70193be292a9ade/theano/gof/cmodule.py#L1129-L1215
|
|
dimagi/commcare-hq
|
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
|
custom/abt/reports/late_pmt.py
|
python
|
LatePmtReport.rows
|
(self)
|
return rows
|
[] |
def rows(self):
def _to_report_format(date, user, error_msg):
return [
date.strftime("%Y-%m-%d"),
user['username'].split('@')[0],
user['phone_number'],
user['country'],
user['level_1'],
user['level_2'],
user['level_3'],
user['level_4'],
error_msg
]
users = self.get_users
dates = rrule(
DAILY,
dtstart=self.startdate,
until=self.enddate,
byweekday=(MO, TU, WE, TH, FR, SA)
)
include_missing_pmt_data = self.report_config['submission_status'] != 'group_b'
include_incorrect_pmt_data = self.report_config['submission_status'] != 'group_a'
rows = []
if users:
for date in dates:
for user in users:
sms_received = (date.date(), user['user_id']) in self.smss_received
valid_sms = (date.date(), user['user_id']) in self.valid_smss_received
if not sms_received and include_missing_pmt_data:
error_msg = _('No PMT data Submitted')
elif sms_received and not valid_sms and include_incorrect_pmt_data:
error_msg = _('Incorrect PMT data Submitted')
else:
continue
rows.append(_to_report_format(date, user, error_msg))
return rows
|
[
"def",
"rows",
"(",
"self",
")",
":",
"def",
"_to_report_format",
"(",
"date",
",",
"user",
",",
"error_msg",
")",
":",
"return",
"[",
"date",
".",
"strftime",
"(",
"\"%Y-%m-%d\"",
")",
",",
"user",
"[",
"'username'",
"]",
".",
"split",
"(",
"'@'",
")",
"[",
"0",
"]",
",",
"user",
"[",
"'phone_number'",
"]",
",",
"user",
"[",
"'country'",
"]",
",",
"user",
"[",
"'level_1'",
"]",
",",
"user",
"[",
"'level_2'",
"]",
",",
"user",
"[",
"'level_3'",
"]",
",",
"user",
"[",
"'level_4'",
"]",
",",
"error_msg",
"]",
"users",
"=",
"self",
".",
"get_users",
"dates",
"=",
"rrule",
"(",
"DAILY",
",",
"dtstart",
"=",
"self",
".",
"startdate",
",",
"until",
"=",
"self",
".",
"enddate",
",",
"byweekday",
"=",
"(",
"MO",
",",
"TU",
",",
"WE",
",",
"TH",
",",
"FR",
",",
"SA",
")",
")",
"include_missing_pmt_data",
"=",
"self",
".",
"report_config",
"[",
"'submission_status'",
"]",
"!=",
"'group_b'",
"include_incorrect_pmt_data",
"=",
"self",
".",
"report_config",
"[",
"'submission_status'",
"]",
"!=",
"'group_a'",
"rows",
"=",
"[",
"]",
"if",
"users",
":",
"for",
"date",
"in",
"dates",
":",
"for",
"user",
"in",
"users",
":",
"sms_received",
"=",
"(",
"date",
".",
"date",
"(",
")",
",",
"user",
"[",
"'user_id'",
"]",
")",
"in",
"self",
".",
"smss_received",
"valid_sms",
"=",
"(",
"date",
".",
"date",
"(",
")",
",",
"user",
"[",
"'user_id'",
"]",
")",
"in",
"self",
".",
"valid_smss_received",
"if",
"not",
"sms_received",
"and",
"include_missing_pmt_data",
":",
"error_msg",
"=",
"_",
"(",
"'No PMT data Submitted'",
")",
"elif",
"sms_received",
"and",
"not",
"valid_sms",
"and",
"include_incorrect_pmt_data",
":",
"error_msg",
"=",
"_",
"(",
"'Incorrect PMT data Submitted'",
")",
"else",
":",
"continue",
"rows",
".",
"append",
"(",
"_to_report_format",
"(",
"date",
",",
"user",
",",
"error_msg",
")",
")",
"return",
"rows"
] |
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/custom/abt/reports/late_pmt.py#L195-L231
|
|||
aleju/self-driving-truck
|
0d6870ea8d00eb5daa89deee2ce0b8fe4d04783b
|
train_steering_wheel/train.py
|
python
|
downscale_image
|
(steering_wheel_image)
|
return ia.imresize_single_image(
steering_wheel_image,
(MODEL_HEIGHT, MODEL_WIDTH),
interpolation="linear"
)
|
Downscale an image to the model's input sizes (height, width).
|
Downscale an image to the model's input sizes (height, width).
|
[
"Downscale",
"an",
"image",
"to",
"the",
"model",
"s",
"input",
"sizes",
"(",
"height",
"width",
")",
"."
] |
def downscale_image(steering_wheel_image):
"""Downscale an image to the model's input sizes (height, width)."""
return ia.imresize_single_image(
steering_wheel_image,
(MODEL_HEIGHT, MODEL_WIDTH),
interpolation="linear"
)
|
[
"def",
"downscale_image",
"(",
"steering_wheel_image",
")",
":",
"return",
"ia",
".",
"imresize_single_image",
"(",
"steering_wheel_image",
",",
"(",
"MODEL_HEIGHT",
",",
"MODEL_WIDTH",
")",
",",
"interpolation",
"=",
"\"linear\"",
")"
] |
https://github.com/aleju/self-driving-truck/blob/0d6870ea8d00eb5daa89deee2ce0b8fe4d04783b/train_steering_wheel/train.py#L239-L245
|
|
magenta/magenta
|
be6558f1a06984faff6d6949234f5fe9ad0ffdb5
|
magenta/models/latent_transfer/common_joint.py
|
python
|
config_is_wavegan
|
(config)
|
return config['dataset'].lower() == 'wavegan'
|
[] |
def config_is_wavegan(config):
return config['dataset'].lower() == 'wavegan'
|
[
"def",
"config_is_wavegan",
"(",
"config",
")",
":",
"return",
"config",
"[",
"'dataset'",
"]",
".",
"lower",
"(",
")",
"==",
"'wavegan'"
] |
https://github.com/magenta/magenta/blob/be6558f1a06984faff6d6949234f5fe9ad0ffdb5/magenta/models/latent_transfer/common_joint.py#L356-L357
|
|||
indigo-dc/udocker
|
87fb41cb5bcdb211d70f2b7f067c8e33d8959a1f
|
udocker/utils/uenv.py
|
python
|
Uenv.__iter__
|
(self)
|
return UenvIterator(self)
|
Returns the Uenv iterator
|
Returns the Uenv iterator
|
[
"Returns",
"the",
"Uenv",
"iterator"
] |
def __iter__(self):
"""Returns the Uenv iterator"""
return UenvIterator(self)
|
[
"def",
"__iter__",
"(",
"self",
")",
":",
"return",
"UenvIterator",
"(",
"self",
")"
] |
https://github.com/indigo-dc/udocker/blob/87fb41cb5bcdb211d70f2b7f067c8e33d8959a1f/udocker/utils/uenv.py#L66-L68
|
|
DonnchaC/shadowbrokers-exploits
|
42d8265db860b634717da4faa668b2670457cf7e
|
windows/fuzzbunch/pyreadline/modes/basemode.py
|
python
|
BaseMode.paste
|
(self,e)
|
Paste windows clipboard.
Assume single line strip other lines and end of line markers and trailing spaces
|
Paste windows clipboard.
Assume single line strip other lines and end of line markers and trailing spaces
|
[
"Paste",
"windows",
"clipboard",
".",
"Assume",
"single",
"line",
"strip",
"other",
"lines",
"and",
"end",
"of",
"line",
"markers",
"and",
"trailing",
"spaces"
] |
def paste(self,e):
'''Paste windows clipboard.
Assume single line strip other lines and end of line markers and trailing spaces''' #(Control-v)
if self.enable_win32_clipboard:
txt=clipboard.get_clipboard_text_and_convert(False)
txt=txt.split("\n")[0].strip("\r").strip("\n")
log("paste: >%s<"%map(ord,txt))
self.insert_text(txt)
|
[
"def",
"paste",
"(",
"self",
",",
"e",
")",
":",
"#(Control-v)",
"if",
"self",
".",
"enable_win32_clipboard",
":",
"txt",
"=",
"clipboard",
".",
"get_clipboard_text_and_convert",
"(",
"False",
")",
"txt",
"=",
"txt",
".",
"split",
"(",
"\"\\n\"",
")",
"[",
"0",
"]",
".",
"strip",
"(",
"\"\\r\"",
")",
".",
"strip",
"(",
"\"\\n\"",
")",
"log",
"(",
"\"paste: >%s<\"",
"%",
"map",
"(",
"ord",
",",
"txt",
")",
")",
"self",
".",
"insert_text",
"(",
"txt",
")"
] |
https://github.com/DonnchaC/shadowbrokers-exploits/blob/42d8265db860b634717da4faa668b2670457cf7e/windows/fuzzbunch/pyreadline/modes/basemode.py#L381-L388
|
||
Parsely/pykafka
|
e7665bf36bfe521050fdcb017c68e92365bd89ed
|
pykafka/protocol/produce.py
|
python
|
ProduceRequest.get_bytes
|
(self)
|
return output
|
Serialize the message
:returns: Serialized message
:rtype: :class:`bytearray`
|
Serialize the message
:returns: Serialized message
:rtype: :class:`bytearray`
|
[
"Serialize",
"the",
"message",
":",
"returns",
":",
"Serialized",
"message",
":",
"rtype",
":",
":",
"class",
":",
"bytearray"
] |
def get_bytes(self):
"""Serialize the message
:returns: Serialized message
:rtype: :class:`bytearray`
"""
output = bytearray(len(self))
self._write_header(output)
offset = self.HEADER_LEN
struct.pack_into('!hii', output, offset,
self.required_acks, self.timeout, len(self.msets))
offset += 10
for topic_name, partitions in iteritems(self.msets):
fmt = '!h%dsi' % len(topic_name)
struct.pack_into(fmt, output, offset, len(topic_name),
topic_name, len(partitions))
offset += struct.calcsize(fmt)
for partition_id, message_set in iteritems(partitions):
mset_len = len(message_set)
struct.pack_into('!ii', output, offset, partition_id, mset_len)
offset += 8
message_set.pack_into(output, offset)
offset += mset_len
return output
|
[
"def",
"get_bytes",
"(",
"self",
")",
":",
"output",
"=",
"bytearray",
"(",
"len",
"(",
"self",
")",
")",
"self",
".",
"_write_header",
"(",
"output",
")",
"offset",
"=",
"self",
".",
"HEADER_LEN",
"struct",
".",
"pack_into",
"(",
"'!hii'",
",",
"output",
",",
"offset",
",",
"self",
".",
"required_acks",
",",
"self",
".",
"timeout",
",",
"len",
"(",
"self",
".",
"msets",
")",
")",
"offset",
"+=",
"10",
"for",
"topic_name",
",",
"partitions",
"in",
"iteritems",
"(",
"self",
".",
"msets",
")",
":",
"fmt",
"=",
"'!h%dsi'",
"%",
"len",
"(",
"topic_name",
")",
"struct",
".",
"pack_into",
"(",
"fmt",
",",
"output",
",",
"offset",
",",
"len",
"(",
"topic_name",
")",
",",
"topic_name",
",",
"len",
"(",
"partitions",
")",
")",
"offset",
"+=",
"struct",
".",
"calcsize",
"(",
"fmt",
")",
"for",
"partition_id",
",",
"message_set",
"in",
"iteritems",
"(",
"partitions",
")",
":",
"mset_len",
"=",
"len",
"(",
"message_set",
")",
"struct",
".",
"pack_into",
"(",
"'!ii'",
",",
"output",
",",
"offset",
",",
"partition_id",
",",
"mset_len",
")",
"offset",
"+=",
"8",
"message_set",
".",
"pack_into",
"(",
"output",
",",
"offset",
")",
"offset",
"+=",
"mset_len",
"return",
"output"
] |
https://github.com/Parsely/pykafka/blob/e7665bf36bfe521050fdcb017c68e92365bd89ed/pykafka/protocol/produce.py#L81-L103
|
|
huxiaoman7/leetcodebook
|
2bb52713522a0b4b64ad1f3e064d6045d40dfac8
|
python/40_CombinationSumII.py
|
python
|
Solution.combinationSum2
|
(self, candidates, target)
|
return res
|
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
|
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
|
[
":",
"type",
"candidates",
":",
"List",
"[",
"int",
"]",
":",
"type",
"target",
":",
"int",
":",
"rtype",
":",
"List",
"[",
"List",
"[",
"int",
"]]"
] |
def combinationSum2(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
#方法一:DFS
candidates.sort()
print candidates
res=[]
self.dfs(candidates, target, 0, res, [])
return res
|
[
"def",
"combinationSum2",
"(",
"self",
",",
"candidates",
",",
"target",
")",
":",
"#方法一:DFS",
"candidates",
".",
"sort",
"(",
")",
"print",
"candidates",
"res",
"=",
"[",
"]",
"self",
".",
"dfs",
"(",
"candidates",
",",
"target",
",",
"0",
",",
"res",
",",
"[",
"]",
")",
"return",
"res"
] |
https://github.com/huxiaoman7/leetcodebook/blob/2bb52713522a0b4b64ad1f3e064d6045d40dfac8/python/40_CombinationSumII.py#L11-L22
|
|
TencentCloud/tencentcloud-sdk-python
|
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
|
tencentcloud/yunjing/v20180228/yunjing_client.py
|
python
|
YunjingClient.ExportNonlocalLoginPlaces
|
(self, request)
|
本接口 (ExportNonlocalLoginPlaces) 用于导出异地登录事件记录CSV文件。
:param request: Request instance for ExportNonlocalLoginPlaces.
:type request: :class:`tencentcloud.yunjing.v20180228.models.ExportNonlocalLoginPlacesRequest`
:rtype: :class:`tencentcloud.yunjing.v20180228.models.ExportNonlocalLoginPlacesResponse`
|
本接口 (ExportNonlocalLoginPlaces) 用于导出异地登录事件记录CSV文件。
|
[
"本接口",
"(",
"ExportNonlocalLoginPlaces",
")",
"用于导出异地登录事件记录CSV文件。"
] |
def ExportNonlocalLoginPlaces(self, request):
"""本接口 (ExportNonlocalLoginPlaces) 用于导出异地登录事件记录CSV文件。
:param request: Request instance for ExportNonlocalLoginPlaces.
:type request: :class:`tencentcloud.yunjing.v20180228.models.ExportNonlocalLoginPlacesRequest`
:rtype: :class:`tencentcloud.yunjing.v20180228.models.ExportNonlocalLoginPlacesResponse`
"""
try:
params = request._serialize()
body = self.call("ExportNonlocalLoginPlaces", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ExportNonlocalLoginPlacesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
|
[
"def",
"ExportNonlocalLoginPlaces",
"(",
"self",
",",
"request",
")",
":",
"try",
":",
"params",
"=",
"request",
".",
"_serialize",
"(",
")",
"body",
"=",
"self",
".",
"call",
"(",
"\"ExportNonlocalLoginPlaces\"",
",",
"params",
")",
"response",
"=",
"json",
".",
"loads",
"(",
"body",
")",
"if",
"\"Error\"",
"not",
"in",
"response",
"[",
"\"Response\"",
"]",
":",
"model",
"=",
"models",
".",
"ExportNonlocalLoginPlacesResponse",
"(",
")",
"model",
".",
"_deserialize",
"(",
"response",
"[",
"\"Response\"",
"]",
")",
"return",
"model",
"else",
":",
"code",
"=",
"response",
"[",
"\"Response\"",
"]",
"[",
"\"Error\"",
"]",
"[",
"\"Code\"",
"]",
"message",
"=",
"response",
"[",
"\"Response\"",
"]",
"[",
"\"Error\"",
"]",
"[",
"\"Message\"",
"]",
"reqid",
"=",
"response",
"[",
"\"Response\"",
"]",
"[",
"\"RequestId\"",
"]",
"raise",
"TencentCloudSDKException",
"(",
"code",
",",
"message",
",",
"reqid",
")",
"except",
"Exception",
"as",
"e",
":",
"if",
"isinstance",
"(",
"e",
",",
"TencentCloudSDKException",
")",
":",
"raise",
"else",
":",
"raise",
"TencentCloudSDKException",
"(",
"e",
".",
"message",
",",
"e",
".",
"message",
")"
] |
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/yunjing/v20180228/yunjing_client.py#L2213-L2238
|
||
KhronosGroup/glTF-Blender-Exporter
|
dd7a3dbd8f43a79d572e7c45f4215f770bb92a37
|
scripts/addons/io_scene_gltf2/gltf2_get.py
|
python
|
get_scalar
|
(default_value, init_value = 0.0)
|
return return_value
|
Return scalar with a given default/fallback value.
|
Return scalar with a given default/fallback value.
|
[
"Return",
"scalar",
"with",
"a",
"given",
"default",
"/",
"fallback",
"value",
"."
] |
def get_scalar(default_value, init_value = 0.0):
"""
Return scalar with a given default/fallback value.
"""
return_value = init_value
if default_value is None:
return return_value
return_value = default_value
return return_value
|
[
"def",
"get_scalar",
"(",
"default_value",
",",
"init_value",
"=",
"0.0",
")",
":",
"return_value",
"=",
"init_value",
"if",
"default_value",
"is",
"None",
":",
"return",
"return_value",
"return_value",
"=",
"default_value",
"return",
"return_value"
] |
https://github.com/KhronosGroup/glTF-Blender-Exporter/blob/dd7a3dbd8f43a79d572e7c45f4215f770bb92a37/scripts/addons/io_scene_gltf2/gltf2_get.py#L436-L448
|
|
openstack/ironic
|
b392dc19bcd29cef5a69ec00d2f18a7a19a679e5
|
ironic/drivers/hardware_type.py
|
python
|
AbstractHardwareType.supported_storage_interfaces
|
(self)
|
return [noop_storage.NoopStorage]
|
List of supported storage interfaces.
|
List of supported storage interfaces.
|
[
"List",
"of",
"supported",
"storage",
"interfaces",
"."
] |
def supported_storage_interfaces(self):
"""List of supported storage interfaces."""
return [noop_storage.NoopStorage]
|
[
"def",
"supported_storage_interfaces",
"(",
"self",
")",
":",
"return",
"[",
"noop_storage",
".",
"NoopStorage",
"]"
] |
https://github.com/openstack/ironic/blob/b392dc19bcd29cef5a69ec00d2f18a7a19a679e5/ironic/drivers/hardware_type.py#L97-L99
|
|
hellojialee/Improved-Body-Parts
|
0fa17dff1ea829c2951c18185d5f1ce7fe0072fc
|
parallel_encoding/paralle.py
|
python
|
DataParallelModel.replicate
|
(self, module, device_ids)
|
return modules
|
[] |
def replicate(self, module, device_ids):
modules = super(DataParallelModel, self).replicate(module, device_ids)
execute_replication_callbacks(modules)
return modules
|
[
"def",
"replicate",
"(",
"self",
",",
"module",
",",
"device_ids",
")",
":",
"modules",
"=",
"super",
"(",
"DataParallelModel",
",",
"self",
")",
".",
"replicate",
"(",
"module",
",",
"device_ids",
")",
"execute_replication_callbacks",
"(",
"modules",
")",
"return",
"modules"
] |
https://github.com/hellojialee/Improved-Body-Parts/blob/0fa17dff1ea829c2951c18185d5f1ce7fe0072fc/parallel_encoding/paralle.py#L127-L130
|
|||
eternnoir/pyTelegramBotAPI
|
fdbc0e6a619f671c2ac97afa2f694c17c6dce7d9
|
telebot/async_telebot.py
|
python
|
AsyncTeleBot.forward_message
|
(
self, chat_id: Union[int, str], from_chat_id: Union[int, str],
message_id: int, disable_notification: Optional[bool]=None,
protect_content: Optional[bool]=None,
timeout: Optional[int]=None)
|
return types.Message.de_json(
await asyncio_helper.forward_message(self.token, chat_id, from_chat_id, message_id, disable_notification, timeout, protect_content))
|
Use this method to forward messages of any kind.
:param disable_notification:
:param chat_id: which chat to forward
:param from_chat_id: which chat message from
:param message_id: message id
:param protect_content:
:param timeout:
:return: API reply.
|
Use this method to forward messages of any kind.
:param disable_notification:
:param chat_id: which chat to forward
:param from_chat_id: which chat message from
:param message_id: message id
:param protect_content:
:param timeout:
:return: API reply.
|
[
"Use",
"this",
"method",
"to",
"forward",
"messages",
"of",
"any",
"kind",
".",
":",
"param",
"disable_notification",
":",
":",
"param",
"chat_id",
":",
"which",
"chat",
"to",
"forward",
":",
"param",
"from_chat_id",
":",
"which",
"chat",
"message",
"from",
":",
"param",
"message_id",
":",
"message",
"id",
":",
"param",
"protect_content",
":",
":",
"param",
"timeout",
":",
":",
"return",
":",
"API",
"reply",
"."
] |
async def forward_message(
self, chat_id: Union[int, str], from_chat_id: Union[int, str],
message_id: int, disable_notification: Optional[bool]=None,
protect_content: Optional[bool]=None,
timeout: Optional[int]=None) -> types.Message:
"""
Use this method to forward messages of any kind.
:param disable_notification:
:param chat_id: which chat to forward
:param from_chat_id: which chat message from
:param message_id: message id
:param protect_content:
:param timeout:
:return: API reply.
"""
return types.Message.de_json(
await asyncio_helper.forward_message(self.token, chat_id, from_chat_id, message_id, disable_notification, timeout, protect_content))
|
[
"async",
"def",
"forward_message",
"(",
"self",
",",
"chat_id",
":",
"Union",
"[",
"int",
",",
"str",
"]",
",",
"from_chat_id",
":",
"Union",
"[",
"int",
",",
"str",
"]",
",",
"message_id",
":",
"int",
",",
"disable_notification",
":",
"Optional",
"[",
"bool",
"]",
"=",
"None",
",",
"protect_content",
":",
"Optional",
"[",
"bool",
"]",
"=",
"None",
",",
"timeout",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
")",
"->",
"types",
".",
"Message",
":",
"return",
"types",
".",
"Message",
".",
"de_json",
"(",
"await",
"asyncio_helper",
".",
"forward_message",
"(",
"self",
".",
"token",
",",
"chat_id",
",",
"from_chat_id",
",",
"message_id",
",",
"disable_notification",
",",
"timeout",
",",
"protect_content",
")",
")"
] |
https://github.com/eternnoir/pyTelegramBotAPI/blob/fdbc0e6a619f671c2ac97afa2f694c17c6dce7d9/telebot/async_telebot.py#L1528-L1544
|
|
robclewley/pydstool
|
939e3abc9dd1f180d35152bacbde57e24c85ff26
|
PyDSTool/parseUtils.py
|
python
|
findNumTailPos
|
(s)
|
Find position of numeric tail in alphanumeric string.
e.g. findNumTailPos('abc678') = 3
|
Find position of numeric tail in alphanumeric string.
|
[
"Find",
"position",
"of",
"numeric",
"tail",
"in",
"alphanumeric",
"string",
"."
] |
def findNumTailPos(s):
"""Find position of numeric tail in alphanumeric string.
e.g. findNumTailPos('abc678') = 3"""
try:
l = len(s)
if l > 1:
if s[-1] not in num_chars or s[0] in num_chars:
raise ValueError("Argument must be an alphanumeric string "
"starting with a letter and ending in a number")
for i in range(1, l+1):
if s[-i] not in num_chars:
return l-i+1
else:
raise ValueError("Argument must be alphanumeric string starting "
"with a letter and ending in a number")
except TypeError:
raise ValueError("Argument must be alphanumeric string starting "
"with a letter and ending in a number")
|
[
"def",
"findNumTailPos",
"(",
"s",
")",
":",
"try",
":",
"l",
"=",
"len",
"(",
"s",
")",
"if",
"l",
">",
"1",
":",
"if",
"s",
"[",
"-",
"1",
"]",
"not",
"in",
"num_chars",
"or",
"s",
"[",
"0",
"]",
"in",
"num_chars",
":",
"raise",
"ValueError",
"(",
"\"Argument must be an alphanumeric string \"",
"\"starting with a letter and ending in a number\"",
")",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"l",
"+",
"1",
")",
":",
"if",
"s",
"[",
"-",
"i",
"]",
"not",
"in",
"num_chars",
":",
"return",
"l",
"-",
"i",
"+",
"1",
"else",
":",
"raise",
"ValueError",
"(",
"\"Argument must be alphanumeric string starting \"",
"\"with a letter and ending in a number\"",
")",
"except",
"TypeError",
":",
"raise",
"ValueError",
"(",
"\"Argument must be alphanumeric string starting \"",
"\"with a letter and ending in a number\"",
")"
] |
https://github.com/robclewley/pydstool/blob/939e3abc9dd1f180d35152bacbde57e24c85ff26/PyDSTool/parseUtils.py#L1762-L1780
|
||
hellohaptik/chatbot_ner
|
742104790170ae5b73c583c94db6786549337dc4
|
ner_v1/detectors/textual/city/city_detection.py
|
python
|
CityDetector._city_dict_from_text
|
(self, text, from_property=False, to_property=False, via_property=False,
normal_property=False, detection_method=FROM_MESSAGE)
|
return city_dict_list
|
Takes the text and the property values and creates a list of dictionaries based on number of cities detected
Attributes:
text: Text on which TextDetection needs to run on
from_property: True if the text is belonging to "from" property". for example, From Mumbai
to_property: True if the text is belonging to "to" property". for example, To Mumbai
via_property: True if the text is belonging to "via" property". for example, via Mumbai
normal_property: True if the text is belonging to "normal" property". for example, atms in Mumbai
detection_method: method through which it got detected whether its through message or model
Returns:
It returns the list of dictionary containing the fields like detection_method, from, normal, to,
text, value, via
For example:
[
{
'detection_method': 'message',
'from': False,
'normal': True,
'text': 'mumbai',
'to': False,
'value': u'BOM',
'via': False
}
]
|
Takes the text and the property values and creates a list of dictionaries based on number of cities detected
|
[
"Takes",
"the",
"text",
"and",
"the",
"property",
"values",
"and",
"creates",
"a",
"list",
"of",
"dictionaries",
"based",
"on",
"number",
"of",
"cities",
"detected"
] |
def _city_dict_from_text(self, text, from_property=False, to_property=False, via_property=False,
normal_property=False, detection_method=FROM_MESSAGE):
"""
Takes the text and the property values and creates a list of dictionaries based on number of cities detected
Attributes:
text: Text on which TextDetection needs to run on
from_property: True if the text is belonging to "from" property". for example, From Mumbai
to_property: True if the text is belonging to "to" property". for example, To Mumbai
via_property: True if the text is belonging to "via" property". for example, via Mumbai
normal_property: True if the text is belonging to "normal" property". for example, atms in Mumbai
detection_method: method through which it got detected whether its through message or model
Returns:
It returns the list of dictionary containing the fields like detection_method, from, normal, to,
text, value, via
For example:
[
{
'detection_method': 'message',
'from': False,
'normal': True,
'text': 'mumbai',
'to': False,
'value': u'BOM',
'via': False
}
]
"""
city_dict_list = []
city_list, original_list = self._city_value(text=text)
index = 0
for city in city_list:
city_dict_list.append(
{
detector_constant.CITY_VALUE: city,
detector_constant.ORIGINAL_CITY_TEXT: original_list[index],
detector_constant.CITY_FROM_PROPERTY: from_property,
detector_constant.CITY_TO_PROPERTY: to_property,
detector_constant.CITY_VIA_PROPERTY: via_property,
detector_constant.CITY_NORMAL_PROPERTY: normal_property,
detector_constant.CITY_DETECTION_METHOD: detection_method
}
)
index += 1
return city_dict_list
|
[
"def",
"_city_dict_from_text",
"(",
"self",
",",
"text",
",",
"from_property",
"=",
"False",
",",
"to_property",
"=",
"False",
",",
"via_property",
"=",
"False",
",",
"normal_property",
"=",
"False",
",",
"detection_method",
"=",
"FROM_MESSAGE",
")",
":",
"city_dict_list",
"=",
"[",
"]",
"city_list",
",",
"original_list",
"=",
"self",
".",
"_city_value",
"(",
"text",
"=",
"text",
")",
"index",
"=",
"0",
"for",
"city",
"in",
"city_list",
":",
"city_dict_list",
".",
"append",
"(",
"{",
"detector_constant",
".",
"CITY_VALUE",
":",
"city",
",",
"detector_constant",
".",
"ORIGINAL_CITY_TEXT",
":",
"original_list",
"[",
"index",
"]",
",",
"detector_constant",
".",
"CITY_FROM_PROPERTY",
":",
"from_property",
",",
"detector_constant",
".",
"CITY_TO_PROPERTY",
":",
"to_property",
",",
"detector_constant",
".",
"CITY_VIA_PROPERTY",
":",
"via_property",
",",
"detector_constant",
".",
"CITY_NORMAL_PROPERTY",
":",
"normal_property",
",",
"detector_constant",
".",
"CITY_DETECTION_METHOD",
":",
"detection_method",
"}",
")",
"index",
"+=",
"1",
"return",
"city_dict_list"
] |
https://github.com/hellohaptik/chatbot_ner/blob/742104790170ae5b73c583c94db6786549337dc4/ner_v1/detectors/textual/city/city_detection.py#L329-L378
|
|
bgavran/DNC
|
668a2f1ecc676748c68cedab78dd198aca190d3b
|
src/controller_implementations/dnc/memory.py
|
python
|
Memory.reshape_and_softmax
|
(self, r_read_modes)
|
return tf.nn.softmax(r_read_modes, dim=2)
|
[] |
def reshape_and_softmax(self, r_read_modes):
r_read_modes = tf.reshape(r_read_modes, [self.batch_size, self.num_read_heads, 3])
return tf.nn.softmax(r_read_modes, dim=2)
|
[
"def",
"reshape_and_softmax",
"(",
"self",
",",
"r_read_modes",
")",
":",
"r_read_modes",
"=",
"tf",
".",
"reshape",
"(",
"r_read_modes",
",",
"[",
"self",
".",
"batch_size",
",",
"self",
".",
"num_read_heads",
",",
"3",
"]",
")",
"return",
"tf",
".",
"nn",
".",
"softmax",
"(",
"r_read_modes",
",",
"dim",
"=",
"2",
")"
] |
https://github.com/bgavran/DNC/blob/668a2f1ecc676748c68cedab78dd198aca190d3b/src/controller_implementations/dnc/memory.py#L226-L228
|
|||
xfgryujk/blivedm
|
d8f7f6b7828069cb6c1fd13f756cfd891f0b1a46
|
blivedm/models.py
|
python
|
DanmakuMessage.emoticon_options_dict
|
(self)
|
示例:
{'bulge_display': 0, 'emoticon_unique': 'official_13', 'height': 60, 'in_player_area': 1, 'is_dynamic': 1,
'url': 'https://i0.hdslb.com/bfs/live/a98e35996545509188fe4d24bd1a56518ea5af48.png', 'width': 183}
|
示例:
{'bulge_display': 0, 'emoticon_unique': 'official_13', 'height': 60, 'in_player_area': 1, 'is_dynamic': 1,
'url': 'https://i0.hdslb.com/bfs/live/a98e35996545509188fe4d24bd1a56518ea5af48.png', 'width': 183}
|
[
"示例:",
"{",
"bulge_display",
":",
"0",
"emoticon_unique",
":",
"official_13",
"height",
":",
"60",
"in_player_area",
":",
"1",
"is_dynamic",
":",
"1",
"url",
":",
"https",
":",
"//",
"i0",
".",
"hdslb",
".",
"com",
"/",
"bfs",
"/",
"live",
"/",
"a98e35996545509188fe4d24bd1a56518ea5af48",
".",
"png",
"width",
":",
"183",
"}"
] |
def emoticon_options_dict(self) -> dict:
"""
示例:
{'bulge_display': 0, 'emoticon_unique': 'official_13', 'height': 60, 'in_player_area': 1, 'is_dynamic': 1,
'url': 'https://i0.hdslb.com/bfs/live/a98e35996545509188fe4d24bd1a56518ea5af48.png', 'width': 183}
"""
if isinstance(self.emoticon_options, dict):
return self.emoticon_options
try:
return json.loads(self.emoticon_options)
except (json.JSONDecodeError, TypeError):
return {}
|
[
"def",
"emoticon_options_dict",
"(",
"self",
")",
"->",
"dict",
":",
"if",
"isinstance",
"(",
"self",
".",
"emoticon_options",
",",
"dict",
")",
":",
"return",
"self",
".",
"emoticon_options",
"try",
":",
"return",
"json",
".",
"loads",
"(",
"self",
".",
"emoticon_options",
")",
"except",
"(",
"json",
".",
"JSONDecodeError",
",",
"TypeError",
")",
":",
"return",
"{",
"}"
] |
https://github.com/xfgryujk/blivedm/blob/d8f7f6b7828069cb6c1fd13f756cfd891f0b1a46/blivedm/models.py#L222-L233
|
||
thatbrguy/Pedestrian-Detection
|
b11c7d6bed0ff320811726fe1c429be26a87da9e
|
slim/datasets/dataset_utils.py
|
python
|
bytes_feature
|
(values)
|
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[values]))
|
Returns a TF-Feature of bytes.
Args:
values: A string.
Returns:
A TF-Feature.
|
Returns a TF-Feature of bytes.
|
[
"Returns",
"a",
"TF",
"-",
"Feature",
"of",
"bytes",
"."
] |
def bytes_feature(values):
"""Returns a TF-Feature of bytes.
Args:
values: A string.
Returns:
A TF-Feature.
"""
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[values]))
|
[
"def",
"bytes_feature",
"(",
"values",
")",
":",
"return",
"tf",
".",
"train",
".",
"Feature",
"(",
"bytes_list",
"=",
"tf",
".",
"train",
".",
"BytesList",
"(",
"value",
"=",
"[",
"values",
"]",
")",
")"
] |
https://github.com/thatbrguy/Pedestrian-Detection/blob/b11c7d6bed0ff320811726fe1c429be26a87da9e/slim/datasets/dataset_utils.py#L44-L53
|
|
richardaecn/class-balanced-loss
|
1d7857208a2abc03d84e35a9d5383af8225d4b4d
|
tpu/models/official/amoeba_net/inception_preprocessing.py
|
python
|
apply_with_random_selector
|
(x, func, num_cases)
|
return control_flow_ops.merge([
func(control_flow_ops.switch(x, tf.equal(sel, case))[1], case)
for case in range(num_cases)])[0]
|
Computes func(x, sel), with sel sampled from [0...num_cases-1].
Args:
x: input Tensor.
func: Python function to apply.
num_cases: Python int32, number of cases to sample sel from.
Returns:
The result of func(x, sel), where func receives the value of the
selector as a python integer, but sel is sampled dynamically.
|
Computes func(x, sel), with sel sampled from [0...num_cases-1].
|
[
"Computes",
"func",
"(",
"x",
"sel",
")",
"with",
"sel",
"sampled",
"from",
"[",
"0",
"...",
"num_cases",
"-",
"1",
"]",
"."
] |
def apply_with_random_selector(x, func, num_cases):
"""Computes func(x, sel), with sel sampled from [0...num_cases-1].
Args:
x: input Tensor.
func: Python function to apply.
num_cases: Python int32, number of cases to sample sel from.
Returns:
The result of func(x, sel), where func receives the value of the
selector as a python integer, but sel is sampled dynamically.
"""
sel = tf.random_uniform([], maxval=num_cases, dtype=tf.int32)
# Pass the real x only to one of the func calls.
return control_flow_ops.merge([
func(control_flow_ops.switch(x, tf.equal(sel, case))[1], case)
for case in range(num_cases)])[0]
|
[
"def",
"apply_with_random_selector",
"(",
"x",
",",
"func",
",",
"num_cases",
")",
":",
"sel",
"=",
"tf",
".",
"random_uniform",
"(",
"[",
"]",
",",
"maxval",
"=",
"num_cases",
",",
"dtype",
"=",
"tf",
".",
"int32",
")",
"# Pass the real x only to one of the func calls.",
"return",
"control_flow_ops",
".",
"merge",
"(",
"[",
"func",
"(",
"control_flow_ops",
".",
"switch",
"(",
"x",
",",
"tf",
".",
"equal",
"(",
"sel",
",",
"case",
")",
")",
"[",
"1",
"]",
",",
"case",
")",
"for",
"case",
"in",
"range",
"(",
"num_cases",
")",
"]",
")",
"[",
"0",
"]"
] |
https://github.com/richardaecn/class-balanced-loss/blob/1d7857208a2abc03d84e35a9d5383af8225d4b4d/tpu/models/official/amoeba_net/inception_preprocessing.py#L42-L58
|
|
funcwj/setk
|
392c72966ab9f97088955c0bbe6436d7fd59168c
|
scripts/sptk/rir_generate_1d.py
|
python
|
Room.set_mic
|
(self, topo, center, vertical=False)
|
Place microphone array
topo: tuple like (x1, x2, ...)
center: center 3D postion for microphone array
|
Place microphone array
topo: tuple like (x1, x2, ...)
center: center 3D postion for microphone array
|
[
"Place",
"microphone",
"array",
"topo",
":",
"tuple",
"like",
"(",
"x1",
"x2",
"...",
")",
"center",
":",
"center",
"3D",
"postion",
"for",
"microphone",
"array"
] |
def set_mic(self, topo, center, vertical=False):
"""
Place microphone array
topo: tuple like (x1, x2, ...)
center: center 3D postion for microphone array
"""
Mx, My, Mz = center
Mc = (topo[-1] - topo[0]) / 2
if not vertical:
self.rpos = [(Mx - Mc + x, My, Mz) for x in topo]
else:
self.rpos = [(Mx, My - Mc + x, Mz) for x in topo]
self.topo = topo
self.rcen = (Mx, My)
|
[
"def",
"set_mic",
"(",
"self",
",",
"topo",
",",
"center",
",",
"vertical",
"=",
"False",
")",
":",
"Mx",
",",
"My",
",",
"Mz",
"=",
"center",
"Mc",
"=",
"(",
"topo",
"[",
"-",
"1",
"]",
"-",
"topo",
"[",
"0",
"]",
")",
"/",
"2",
"if",
"not",
"vertical",
":",
"self",
".",
"rpos",
"=",
"[",
"(",
"Mx",
"-",
"Mc",
"+",
"x",
",",
"My",
",",
"Mz",
")",
"for",
"x",
"in",
"topo",
"]",
"else",
":",
"self",
".",
"rpos",
"=",
"[",
"(",
"Mx",
",",
"My",
"-",
"Mc",
"+",
"x",
",",
"Mz",
")",
"for",
"x",
"in",
"topo",
"]",
"self",
".",
"topo",
"=",
"topo",
"self",
".",
"rcen",
"=",
"(",
"Mx",
",",
"My",
")"
] |
https://github.com/funcwj/setk/blob/392c72966ab9f97088955c0bbe6436d7fd59168c/scripts/sptk/rir_generate_1d.py#L63-L76
|
||
blackye/webdirdig
|
11eb3df84d228127dde1dd4afcb922f5075903a2
|
thirdparty_libs/requests/packages/urllib3/packages/ordered_dict.py
|
python
|
OrderedDict.__setitem__
|
(self, key, value, dict_setitem=dict.__setitem__)
|
od.__setitem__(i, y) <==> od[i]=y
|
od.__setitem__(i, y) <==> od[i]=y
|
[
"od",
".",
"__setitem__",
"(",
"i",
"y",
")",
"<",
"==",
">",
"od",
"[",
"i",
"]",
"=",
"y"
] |
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
|
[
"def",
"__setitem__",
"(",
"self",
",",
"key",
",",
"value",
",",
"dict_setitem",
"=",
"dict",
".",
"__setitem__",
")",
":",
"# Setting a new item creates a new link which goes at the end of the linked",
"# list, and the inherited dictionary is updated with the new key/value pair.",
"if",
"key",
"not",
"in",
"self",
":",
"root",
"=",
"self",
".",
"__root",
"last",
"=",
"root",
"[",
"0",
"]",
"last",
"[",
"1",
"]",
"=",
"root",
"[",
"0",
"]",
"=",
"self",
".",
"__map",
"[",
"key",
"]",
"=",
"[",
"last",
",",
"root",
",",
"key",
"]",
"dict_setitem",
"(",
"self",
",",
"key",
",",
"value",
")"
] |
https://github.com/blackye/webdirdig/blob/11eb3df84d228127dde1dd4afcb922f5075903a2/thirdparty_libs/requests/packages/urllib3/packages/ordered_dict.py#L45-L53
|
||
lqkweb/sqlflow
|
bc2169cf78bb44879f6a7464dca9174fb02a29b8
|
sqlflow/dsl/parser.py
|
python
|
p_dropindex
|
(p)
|
dropindex : DROP INDEX ID '(' ID ')'
|
dropindex : DROP INDEX ID '(' ID ')'
|
[
"dropindex",
":",
"DROP",
"INDEX",
"ID",
"(",
"ID",
")"
] |
def p_dropindex(p):
""" dropindex : DROP INDEX ID '(' ID ')' """
p[0] = DropIndexNode(p[3], p[5])
|
[
"def",
"p_dropindex",
"(",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"DropIndexNode",
"(",
"p",
"[",
"3",
"]",
",",
"p",
"[",
"5",
"]",
")"
] |
https://github.com/lqkweb/sqlflow/blob/bc2169cf78bb44879f6a7464dca9174fb02a29b8/sqlflow/dsl/parser.py#L129-L131
|
||
zzzeek/sqlalchemy
|
fc5c54fcd4d868c2a4c7ac19668d72f506fe821e
|
lib/sqlalchemy/util/langhelpers.py
|
python
|
getargspec_init
|
(method)
|
inspect.getargspec with considerations for typical __init__ methods
Wraps inspect.getargspec with error handling for typical __init__ cases::
object.__init__ -> (self)
other unreflectable (usually C) -> (self, *args, **kwargs)
|
inspect.getargspec with considerations for typical __init__ methods
|
[
"inspect",
".",
"getargspec",
"with",
"considerations",
"for",
"typical",
"__init__",
"methods"
] |
def getargspec_init(method):
"""inspect.getargspec with considerations for typical __init__ methods
Wraps inspect.getargspec with error handling for typical __init__ cases::
object.__init__ -> (self)
other unreflectable (usually C) -> (self, *args, **kwargs)
"""
try:
return compat.inspect_getfullargspec(method)
except TypeError:
if method is object.__init__:
return (["self"], None, None, None)
else:
return (["self"], "args", "kwargs", None)
|
[
"def",
"getargspec_init",
"(",
"method",
")",
":",
"try",
":",
"return",
"compat",
".",
"inspect_getfullargspec",
"(",
"method",
")",
"except",
"TypeError",
":",
"if",
"method",
"is",
"object",
".",
"__init__",
":",
"return",
"(",
"[",
"\"self\"",
"]",
",",
"None",
",",
"None",
",",
"None",
")",
"else",
":",
"return",
"(",
"[",
"\"self\"",
"]",
",",
"\"args\"",
",",
"\"kwargs\"",
",",
"None",
")"
] |
https://github.com/zzzeek/sqlalchemy/blob/fc5c54fcd4d868c2a4c7ac19668d72f506fe821e/lib/sqlalchemy/util/langhelpers.py#L746-L761
|
||
overhangio/tutor
|
d45b36394af46de29d7817e2b45694d226d5677d
|
tutor/config.py
|
python
|
get_user
|
(root: str)
|
return config
|
Get the user configuration from the tutor root.
Overrides from environment variables are loaded as well.
|
Get the user configuration from the tutor root.
|
[
"Get",
"the",
"user",
"configuration",
"from",
"the",
"tutor",
"root",
"."
] |
def get_user(root: str) -> Config:
"""
Get the user configuration from the tutor root.
Overrides from environment variables are loaded as well.
"""
convert_json2yml(root)
path = config_path(root)
config = {}
if os.path.exists(path):
config = get_yaml_file(path)
upgrade_obsolete(config)
update_with_env(config)
return config
|
[
"def",
"get_user",
"(",
"root",
":",
"str",
")",
"->",
"Config",
":",
"convert_json2yml",
"(",
"root",
")",
"path",
"=",
"config_path",
"(",
"root",
")",
"config",
"=",
"{",
"}",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"config",
"=",
"get_yaml_file",
"(",
"path",
")",
"upgrade_obsolete",
"(",
"config",
")",
"update_with_env",
"(",
"config",
")",
"return",
"config"
] |
https://github.com/overhangio/tutor/blob/d45b36394af46de29d7817e2b45694d226d5677d/tutor/config.py#L87-L100
|
|
jazzband/django-downloadview
|
6dbf06c4ea430b305fba97a2c56482f595360f8d
|
django_downloadview/views/base.py
|
python
|
DownloadMixin.not_modified_response
|
(self, *response_args, **response_kwargs)
|
return HttpResponseNotModified(*response_args, **response_kwargs)
|
Return :class:`django.http.HttpResponseNotModified` instance.
|
Return :class:`django.http.HttpResponseNotModified` instance.
|
[
"Return",
":",
"class",
":",
"django",
".",
"http",
".",
"HttpResponseNotModified",
"instance",
"."
] |
def not_modified_response(self, *response_args, **response_kwargs):
"""Return :class:`django.http.HttpResponseNotModified` instance."""
return HttpResponseNotModified(*response_args, **response_kwargs)
|
[
"def",
"not_modified_response",
"(",
"self",
",",
"*",
"response_args",
",",
"*",
"*",
"response_kwargs",
")",
":",
"return",
"HttpResponseNotModified",
"(",
"*",
"response_args",
",",
"*",
"*",
"response_kwargs",
")"
] |
https://github.com/jazzband/django-downloadview/blob/6dbf06c4ea430b305fba97a2c56482f595360f8d/django_downloadview/views/base.py#L126-L128
|
|
PySimpleGUI/PySimpleGUI
|
6c0d1fb54f493d45e90180b322fbbe70f7a5af3c
|
PySimpleGUI.py
|
python
|
_no_titlebar_setup
|
(window)
|
Does the operations required to turn off the titlebar for the window.
The Raspberry Pi required the settings to be make after the window's creation.
Calling twice seems to have had better overall results so that's what's currently done.
The MAC has been the problem with this feature. It's been a chronic problem on the Mac.
:param window: window to turn off the titlebar if indicated in the settings
:type window: Window
|
Does the operations required to turn off the titlebar for the window.
The Raspberry Pi required the settings to be make after the window's creation.
Calling twice seems to have had better overall results so that's what's currently done.
The MAC has been the problem with this feature. It's been a chronic problem on the Mac.
:param window: window to turn off the titlebar if indicated in the settings
:type window: Window
|
[
"Does",
"the",
"operations",
"required",
"to",
"turn",
"off",
"the",
"titlebar",
"for",
"the",
"window",
".",
"The",
"Raspberry",
"Pi",
"required",
"the",
"settings",
"to",
"be",
"make",
"after",
"the",
"window",
"s",
"creation",
".",
"Calling",
"twice",
"seems",
"to",
"have",
"had",
"better",
"overall",
"results",
"so",
"that",
"s",
"what",
"s",
"currently",
"done",
".",
"The",
"MAC",
"has",
"been",
"the",
"problem",
"with",
"this",
"feature",
".",
"It",
"s",
"been",
"a",
"chronic",
"problem",
"on",
"the",
"Mac",
".",
":",
"param",
"window",
":",
"window",
"to",
"turn",
"off",
"the",
"titlebar",
"if",
"indicated",
"in",
"the",
"settings",
":",
"type",
"window",
":",
"Window"
] |
def _no_titlebar_setup(window):
"""
Does the operations required to turn off the titlebar for the window.
The Raspberry Pi required the settings to be make after the window's creation.
Calling twice seems to have had better overall results so that's what's currently done.
The MAC has been the problem with this feature. It's been a chronic problem on the Mac.
:param window: window to turn off the titlebar if indicated in the settings
:type window: Window
"""
try:
if window.NoTitleBar:
if running_linux():
# window.TKroot.wm_attributes("-type", 'splash')
window.TKroot.wm_attributes("-type", 'dock')
else:
window.TKroot.wm_overrideredirect(True)
# Special case for Mac. Need to clear flag again if not tkinter version 8.6.10+
# Previously restricted patch to only certain tkinter versions. Now use the patch setting exclusively regardless of tk ver
# if running_mac() and ENABLE_MAC_NOTITLEBAR_PATCH and (sum([int(i) for i in tclversion_detailed.split('.')]) < 24):
# if running_mac() and ENABLE_MAC_NOTITLEBAR_PATCH:
if _mac_should_apply_notitlebar_patch():
print('* Applying Mac no_titlebar patch *')
window.TKroot.wm_overrideredirect(False)
except Exception as e:
warnings.warn('** Problem setting no titlebar {} **'.format(e), UserWarning)
|
[
"def",
"_no_titlebar_setup",
"(",
"window",
")",
":",
"try",
":",
"if",
"window",
".",
"NoTitleBar",
":",
"if",
"running_linux",
"(",
")",
":",
"# window.TKroot.wm_attributes(\"-type\", 'splash')",
"window",
".",
"TKroot",
".",
"wm_attributes",
"(",
"\"-type\"",
",",
"'dock'",
")",
"else",
":",
"window",
".",
"TKroot",
".",
"wm_overrideredirect",
"(",
"True",
")",
"# Special case for Mac. Need to clear flag again if not tkinter version 8.6.10+",
"# Previously restricted patch to only certain tkinter versions. Now use the patch setting exclusively regardless of tk ver",
"# if running_mac() and ENABLE_MAC_NOTITLEBAR_PATCH and (sum([int(i) for i in tclversion_detailed.split('.')]) < 24):",
"# if running_mac() and ENABLE_MAC_NOTITLEBAR_PATCH:",
"if",
"_mac_should_apply_notitlebar_patch",
"(",
")",
":",
"print",
"(",
"'* Applying Mac no_titlebar patch *'",
")",
"window",
".",
"TKroot",
".",
"wm_overrideredirect",
"(",
"False",
")",
"except",
"Exception",
"as",
"e",
":",
"warnings",
".",
"warn",
"(",
"'** Problem setting no titlebar {} **'",
".",
"format",
"(",
"e",
")",
",",
"UserWarning",
")"
] |
https://github.com/PySimpleGUI/PySimpleGUI/blob/6c0d1fb54f493d45e90180b322fbbe70f7a5af3c/PySimpleGUI.py#L15655-L15679
|
||
mozilla/addons-server
|
cbfb29e5be99539c30248d70b93bb15e1c1bc9d7
|
src/olympia/stats/views.py
|
python
|
get_daterange_or_404
|
(start, end)
|
return (dates.cleaned_data['start'], dates.cleaned_data['end'])
|
Parse and validate a pair of YYYYMMDD date strings.
|
Parse and validate a pair of YYYYMMDD date strings.
|
[
"Parse",
"and",
"validate",
"a",
"pair",
"of",
"YYYYMMDD",
"date",
"strings",
"."
] |
def get_daterange_or_404(start, end):
"""Parse and validate a pair of YYYYMMDD date strings."""
dates = DateForm(data={'start': start, 'end': end})
if not dates.is_valid():
logger.info('Dates parsed were not valid.')
raise http.Http404
return (dates.cleaned_data['start'], dates.cleaned_data['end'])
|
[
"def",
"get_daterange_or_404",
"(",
"start",
",",
"end",
")",
":",
"dates",
"=",
"DateForm",
"(",
"data",
"=",
"{",
"'start'",
":",
"start",
",",
"'end'",
":",
"end",
"}",
")",
"if",
"not",
"dates",
".",
"is_valid",
"(",
")",
":",
"logger",
".",
"info",
"(",
"'Dates parsed were not valid.'",
")",
"raise",
"http",
".",
"Http404",
"return",
"(",
"dates",
".",
"cleaned_data",
"[",
"'start'",
"]",
",",
"dates",
".",
"cleaned_data",
"[",
"'end'",
"]",
")"
] |
https://github.com/mozilla/addons-server/blob/cbfb29e5be99539c30248d70b93bb15e1c1bc9d7/src/olympia/stats/views.py#L380-L387
|
|
niosus/EasyClangComplete
|
3b16eb17735aaa3f56bb295fc5481b269ee9f2ef
|
plugin/clang/cindex32.py
|
python
|
Type.get_result
|
(self)
|
return conf.lib.clang_getResultType(self)
|
Retrieve the result type associated with a function type.
|
Retrieve the result type associated with a function type.
|
[
"Retrieve",
"the",
"result",
"type",
"associated",
"with",
"a",
"function",
"type",
"."
] |
def get_result(self):
"""
Retrieve the result type associated with a function type.
"""
return conf.lib.clang_getResultType(self)
|
[
"def",
"get_result",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_getResultType",
"(",
"self",
")"
] |
https://github.com/niosus/EasyClangComplete/blob/3b16eb17735aaa3f56bb295fc5481b269ee9f2ef/plugin/clang/cindex32.py#L1596-L1600
|
|
golismero/golismero
|
7d605b937e241f51c1ca4f47b20f755eeefb9d76
|
golismero/api/parallel.py
|
python
|
Task.index
|
(self)
|
return self.__index
|
:returns: Key for the output dictionary. Used later to sort the results.
:rtype: int
|
:returns: Key for the output dictionary. Used later to sort the results.
:rtype: int
|
[
":",
"returns",
":",
"Key",
"for",
"the",
"output",
"dictionary",
".",
"Used",
"later",
"to",
"sort",
"the",
"results",
".",
":",
"rtype",
":",
"int"
] |
def index(self):
"""
:returns: Key for the output dictionary. Used later to sort the results.
:rtype: int
"""
return self.__index
|
[
"def",
"index",
"(",
"self",
")",
":",
"return",
"self",
".",
"__index"
] |
https://github.com/golismero/golismero/blob/7d605b937e241f51c1ca4f47b20f755eeefb9d76/golismero/api/parallel.py#L378-L383
|
|
CLUEbenchmark/CLUEPretrainedModels
|
b384fd41665a8261f9c689c940cf750b3bc21fce
|
baselines/models_pytorch/classifier_pytorch/tools/common.py
|
python
|
seed_everything
|
(seed=1029)
|
设置整个开发环境的seed
:param seed:
:param device:
:return:
|
设置整个开发环境的seed
:param seed:
:param device:
:return:
|
[
"设置整个开发环境的seed",
":",
"param",
"seed",
":",
":",
"param",
"device",
":",
":",
"return",
":"
] |
def seed_everything(seed=1029):
'''
设置整个开发环境的seed
:param seed:
:param device:
:return:
'''
random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
# some cudnn methods can be random even after fixing the seed
# unless you tell it to be deterministic
torch.backends.cudnn.deterministic = True
|
[
"def",
"seed_everything",
"(",
"seed",
"=",
"1029",
")",
":",
"random",
".",
"seed",
"(",
"seed",
")",
"os",
".",
"environ",
"[",
"'PYTHONHASHSEED'",
"]",
"=",
"str",
"(",
"seed",
")",
"np",
".",
"random",
".",
"seed",
"(",
"seed",
")",
"torch",
".",
"manual_seed",
"(",
"seed",
")",
"torch",
".",
"cuda",
".",
"manual_seed",
"(",
"seed",
")",
"torch",
".",
"cuda",
".",
"manual_seed_all",
"(",
"seed",
")",
"# some cudnn methods can be random even after fixing the seed",
"# unless you tell it to be deterministic",
"torch",
".",
"backends",
".",
"cudnn",
".",
"deterministic",
"=",
"True"
] |
https://github.com/CLUEbenchmark/CLUEPretrainedModels/blob/b384fd41665a8261f9c689c940cf750b3bc21fce/baselines/models_pytorch/classifier_pytorch/tools/common.py#L43-L58
|
||
Netflix/security_monkey
|
c28592ffd518fa399527d26262683fc860c30eef
|
security_monkey/auditors/elbv2.py
|
python
|
ELBv2Auditor._get_listener_ports_and_protocols
|
(self, item)
|
return protocol_and_ports
|
"Listeners": [
{
"Protocol": "HTTP",
"Port": 80,
}
],
|
"Listeners": [
{
"Protocol": "HTTP",
"Port": 80,
}
],
|
[
"Listeners",
":",
"[",
"{",
"Protocol",
":",
"HTTP",
"Port",
":",
"80",
"}",
"]"
] |
def _get_listener_ports_and_protocols(self, item):
"""
"Listeners": [
{
"Protocol": "HTTP",
"Port": 80,
}
],
"""
protocol_and_ports = defaultdict(set)
for listener in item.config.get('Listeners', []):
protocol = listener.get('Protocol')
if not protocol:
continue
if protocol == '-1':
protocol = 'ALL_PROTOCOLS'
elif 'HTTP' in protocol:
protocol = 'TCP'
protocol_and_ports[protocol].add(listener.get('Port'))
return protocol_and_ports
|
[
"def",
"_get_listener_ports_and_protocols",
"(",
"self",
",",
"item",
")",
":",
"protocol_and_ports",
"=",
"defaultdict",
"(",
"set",
")",
"for",
"listener",
"in",
"item",
".",
"config",
".",
"get",
"(",
"'Listeners'",
",",
"[",
"]",
")",
":",
"protocol",
"=",
"listener",
".",
"get",
"(",
"'Protocol'",
")",
"if",
"not",
"protocol",
":",
"continue",
"if",
"protocol",
"==",
"'-1'",
":",
"protocol",
"=",
"'ALL_PROTOCOLS'",
"elif",
"'HTTP'",
"in",
"protocol",
":",
"protocol",
"=",
"'TCP'",
"protocol_and_ports",
"[",
"protocol",
"]",
".",
"add",
"(",
"listener",
".",
"get",
"(",
"'Port'",
")",
")",
"return",
"protocol_and_ports"
] |
https://github.com/Netflix/security_monkey/blob/c28592ffd518fa399527d26262683fc860c30eef/security_monkey/auditors/elbv2.py#L42-L61
|
|
cloudera/hue
|
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
|
desktop/core/ext-py/pycryptodomex-3.9.7/lib/Cryptodome/Cipher/PKCS1_v1_5.py
|
python
|
PKCS115_Cipher.__init__
|
(self, key, randfunc)
|
Initialize this PKCS#1 v1.5 cipher object.
:Parameters:
key : an RSA key object
If a private half is given, both encryption and decryption are possible.
If a public half is given, only encryption is possible.
randfunc : callable
Function that returns random bytes.
|
Initialize this PKCS#1 v1.5 cipher object.
|
[
"Initialize",
"this",
"PKCS#1",
"v1",
".",
"5",
"cipher",
"object",
"."
] |
def __init__(self, key, randfunc):
"""Initialize this PKCS#1 v1.5 cipher object.
:Parameters:
key : an RSA key object
If a private half is given, both encryption and decryption are possible.
If a public half is given, only encryption is possible.
randfunc : callable
Function that returns random bytes.
"""
self._key = key
self._randfunc = randfunc
|
[
"def",
"__init__",
"(",
"self",
",",
"key",
",",
"randfunc",
")",
":",
"self",
".",
"_key",
"=",
"key",
"self",
".",
"_randfunc",
"=",
"randfunc"
] |
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/pycryptodomex-3.9.7/lib/Cryptodome/Cipher/PKCS1_v1_5.py#L34-L46
|
||
freedombox/FreedomBox
|
335a7f92cc08f27981f838a7cddfc67740598e54
|
plinth/app.py
|
python
|
App.needs_setup
|
(self)
|
return self.get_setup_state() == self.SetupState.NEEDS_SETUP
|
Return whether the app needs to be setup.
A simple shortcut for get_setup_state() == NEEDS_SETUP
|
Return whether the app needs to be setup.
|
[
"Return",
"whether",
"the",
"app",
"needs",
"to",
"be",
"setup",
"."
] |
def needs_setup(self) -> bool:
"""Return whether the app needs to be setup.
A simple shortcut for get_setup_state() == NEEDS_SETUP
"""
return self.get_setup_state() == self.SetupState.NEEDS_SETUP
|
[
"def",
"needs_setup",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"self",
".",
"get_setup_state",
"(",
")",
"==",
"self",
".",
"SetupState",
".",
"NEEDS_SETUP"
] |
https://github.com/freedombox/FreedomBox/blob/335a7f92cc08f27981f838a7cddfc67740598e54/plinth/app.py#L167-L172
|
|
log2timeline/plaso
|
fe2e316b8c76a0141760c0f2f181d84acb83abc2
|
plaso/parsers/winlnk.py
|
python
|
WinLnkLinkEventData.__init__
|
(self)
|
Initializes event data.
|
Initializes event data.
|
[
"Initializes",
"event",
"data",
"."
] |
def __init__(self):
"""Initializes event data."""
super(WinLnkLinkEventData, self).__init__(data_type=self.DATA_TYPE)
self.birth_droid_file_identifier = None
self.birth_droid_volume_identifier = None
self.command_line_arguments = None
self.description = None
self.drive_serial_number = None
self.drive_type = None
self.droid_file_identifier = None
self.droid_volume_identifier = None
self.env_var_location = None
self.file_attribute_flags = None
self.file_size = None
self.icon_location = None
self.link_target = None
self.local_path = None
self.network_path = None
self.relative_path = None
self.volume_label = None
self.working_directory = None
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"super",
"(",
"WinLnkLinkEventData",
",",
"self",
")",
".",
"__init__",
"(",
"data_type",
"=",
"self",
".",
"DATA_TYPE",
")",
"self",
".",
"birth_droid_file_identifier",
"=",
"None",
"self",
".",
"birth_droid_volume_identifier",
"=",
"None",
"self",
".",
"command_line_arguments",
"=",
"None",
"self",
".",
"description",
"=",
"None",
"self",
".",
"drive_serial_number",
"=",
"None",
"self",
".",
"drive_type",
"=",
"None",
"self",
".",
"droid_file_identifier",
"=",
"None",
"self",
".",
"droid_volume_identifier",
"=",
"None",
"self",
".",
"env_var_location",
"=",
"None",
"self",
".",
"file_attribute_flags",
"=",
"None",
"self",
".",
"file_size",
"=",
"None",
"self",
".",
"icon_location",
"=",
"None",
"self",
".",
"link_target",
"=",
"None",
"self",
".",
"local_path",
"=",
"None",
"self",
".",
"network_path",
"=",
"None",
"self",
".",
"relative_path",
"=",
"None",
"self",
".",
"volume_label",
"=",
"None",
"self",
".",
"working_directory",
"=",
"None"
] |
https://github.com/log2timeline/plaso/blob/fe2e316b8c76a0141760c0f2f181d84acb83abc2/plaso/parsers/winlnk.py#L53-L73
|
||
openstack/magnum
|
fa298eeab19b1d87070d72c7c4fb26cd75b0781e
|
magnum/api/controllers/v1/federation.py
|
python
|
FederationsController.post
|
(self, federation)
|
return FederationID(new_federation.uuid)
|
Create a new federation.
:param federation: a federation within the request body.
|
Create a new federation.
|
[
"Create",
"a",
"new",
"federation",
"."
] |
def post(self, federation):
"""Create a new federation.
:param federation: a federation within the request body.
"""
context = pecan.request.context
policy.enforce(context, 'federation:create',
action='federation:create')
federation_dict = federation.as_dict()
# Validate `hostcluster_id`
hostcluster_id = federation_dict.get('hostcluster_id')
attr_validator.validate_federation_hostcluster(hostcluster_id)
# Validate `properties` dict.
properties_dict = federation_dict.get('properties')
attr_validator.validate_federation_properties(properties_dict)
federation_dict['project_id'] = context.project_id
# If no name is specified, generate a random human-readable name
name = (federation_dict.get('name') or
self._generate_name_for_federation(context))
federation_dict['name'] = name
new_federation = objects.Federation(context, **federation_dict)
new_federation.uuid = uuid.uuid4()
# TODO(clenimar): remove hard-coded `create_timeout`.
pecan.request.rpcapi.federation_create_async(new_federation,
create_timeout=15)
return FederationID(new_federation.uuid)
|
[
"def",
"post",
"(",
"self",
",",
"federation",
")",
":",
"context",
"=",
"pecan",
".",
"request",
".",
"context",
"policy",
".",
"enforce",
"(",
"context",
",",
"'federation:create'",
",",
"action",
"=",
"'federation:create'",
")",
"federation_dict",
"=",
"federation",
".",
"as_dict",
"(",
")",
"# Validate `hostcluster_id`",
"hostcluster_id",
"=",
"federation_dict",
".",
"get",
"(",
"'hostcluster_id'",
")",
"attr_validator",
".",
"validate_federation_hostcluster",
"(",
"hostcluster_id",
")",
"# Validate `properties` dict.",
"properties_dict",
"=",
"federation_dict",
".",
"get",
"(",
"'properties'",
")",
"attr_validator",
".",
"validate_federation_properties",
"(",
"properties_dict",
")",
"federation_dict",
"[",
"'project_id'",
"]",
"=",
"context",
".",
"project_id",
"# If no name is specified, generate a random human-readable name",
"name",
"=",
"(",
"federation_dict",
".",
"get",
"(",
"'name'",
")",
"or",
"self",
".",
"_generate_name_for_federation",
"(",
"context",
")",
")",
"federation_dict",
"[",
"'name'",
"]",
"=",
"name",
"new_federation",
"=",
"objects",
".",
"Federation",
"(",
"context",
",",
"*",
"*",
"federation_dict",
")",
"new_federation",
".",
"uuid",
"=",
"uuid",
".",
"uuid4",
"(",
")",
"# TODO(clenimar): remove hard-coded `create_timeout`.",
"pecan",
".",
"request",
".",
"rpcapi",
".",
"federation_create_async",
"(",
"new_federation",
",",
"create_timeout",
"=",
"15",
")",
"return",
"FederationID",
"(",
"new_federation",
".",
"uuid",
")"
] |
https://github.com/openstack/magnum/blob/fa298eeab19b1d87070d72c7c4fb26cd75b0781e/magnum/api/controllers/v1/federation.py#L265-L298
|
|
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_vendored_deps/library/oc_secret.py
|
python
|
Yedit.yaml_dict
|
(self)
|
return self.__yaml_dict
|
getter method for yaml_dict
|
getter method for yaml_dict
|
[
"getter",
"method",
"for",
"yaml_dict"
] |
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
|
[
"def",
"yaml_dict",
"(",
"self",
")",
":",
"return",
"self",
".",
"__yaml_dict"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_vendored_deps/library/oc_secret.py#L228-L230
|
|
naftaliharris/tauthon
|
5587ceec329b75f7caf6d65a036db61ac1bae214
|
Lib/compiler/transformer.py
|
python
|
Transformer.parseexpr
|
(self, text)
|
return self.transform(parser.expr(text))
|
Return a modified parse tree for the given expression text.
|
Return a modified parse tree for the given expression text.
|
[
"Return",
"a",
"modified",
"parse",
"tree",
"for",
"the",
"given",
"expression",
"text",
"."
] |
def parseexpr(self, text):
"""Return a modified parse tree for the given expression text."""
return self.transform(parser.expr(text))
|
[
"def",
"parseexpr",
"(",
"self",
",",
"text",
")",
":",
"return",
"self",
".",
"transform",
"(",
"parser",
".",
"expr",
"(",
"text",
")",
")"
] |
https://github.com/naftaliharris/tauthon/blob/5587ceec329b75f7caf6d65a036db61ac1bae214/Lib/compiler/transformer.py#L130-L132
|
|
AnalogJ/lexicon
|
c7bedfed6ed34c96950954933b07ca3ce081d0e5
|
lexicon/providers/netcup.py
|
python
|
Provider._apicall
|
(self, method, **params)
|
return data.get("responsedata", {})
|
Call an API method and return response data. For more info, see:
https://ccp.netcup.net/run/webservice/servers/endpoint
|
Call an API method and return response data. For more info, see:
https://ccp.netcup.net/run/webservice/servers/endpoint
|
[
"Call",
"an",
"API",
"method",
"and",
"return",
"response",
"data",
".",
"For",
"more",
"info",
"see",
":",
"https",
":",
"//",
"ccp",
".",
"netcup",
".",
"net",
"/",
"run",
"/",
"webservice",
"/",
"servers",
"/",
"endpoint"
] |
def _apicall(self, method, **params):
"""Call an API method and return response data. For more info, see:
https://ccp.netcup.net/run/webservice/servers/endpoint"""
LOGGER.debug("%s(%r)", method, params)
auth = {
"customernumber": self._get_provider_option("auth_customer_id"),
"apikey": self._get_provider_option("auth_api_key"),
}
if method == "login":
auth["apipassword"] = self._get_provider_option("auth_api_password")
else:
auth["apisessionid"] = self.api_session_id
if not all(auth.values()):
raise Exception("No valid authentication mechanism found")
data = self._request(
"POST", url="", data={"action": method, "param": dict(params, **auth)}
)
if data["status"] != "success":
raise Exception(f"{data['longmessage']} ({data['statuscode']})")
return data.get("responsedata", {})
|
[
"def",
"_apicall",
"(",
"self",
",",
"method",
",",
"*",
"*",
"params",
")",
":",
"LOGGER",
".",
"debug",
"(",
"\"%s(%r)\"",
",",
"method",
",",
"params",
")",
"auth",
"=",
"{",
"\"customernumber\"",
":",
"self",
".",
"_get_provider_option",
"(",
"\"auth_customer_id\"",
")",
",",
"\"apikey\"",
":",
"self",
".",
"_get_provider_option",
"(",
"\"auth_api_key\"",
")",
",",
"}",
"if",
"method",
"==",
"\"login\"",
":",
"auth",
"[",
"\"apipassword\"",
"]",
"=",
"self",
".",
"_get_provider_option",
"(",
"\"auth_api_password\"",
")",
"else",
":",
"auth",
"[",
"\"apisessionid\"",
"]",
"=",
"self",
".",
"api_session_id",
"if",
"not",
"all",
"(",
"auth",
".",
"values",
"(",
")",
")",
":",
"raise",
"Exception",
"(",
"\"No valid authentication mechanism found\"",
")",
"data",
"=",
"self",
".",
"_request",
"(",
"\"POST\"",
",",
"url",
"=",
"\"\"",
",",
"data",
"=",
"{",
"\"action\"",
":",
"method",
",",
"\"param\"",
":",
"dict",
"(",
"params",
",",
"*",
"*",
"auth",
")",
"}",
")",
"if",
"data",
"[",
"\"status\"",
"]",
"!=",
"\"success\"",
":",
"raise",
"Exception",
"(",
"f\"{data['longmessage']} ({data['statuscode']})\"",
")",
"return",
"data",
".",
"get",
"(",
"\"responsedata\"",
",",
"{",
"}",
")"
] |
https://github.com/AnalogJ/lexicon/blob/c7bedfed6ed34c96950954933b07ca3ce081d0e5/lexicon/providers/netcup.py#L151-L170
|
|
openstack/cinder
|
23494a6d6c51451688191e1847a458f1d3cdcaa5
|
cinder/volume/drivers/macrosan/devop_client.py
|
python
|
Client.unmap_target_from_initiator
|
(self, tgt_port_name, initr_wwn)
|
return self.send_request(method='delete', url='/itl', data=data)
|
Unmap target from initiator.
|
Unmap target from initiator.
|
[
"Unmap",
"target",
"from",
"initiator",
"."
] |
def unmap_target_from_initiator(self, tgt_port_name, initr_wwn):
"""Unmap target from initiator."""
data = {
'attr': 'unmaptarget',
'initr_wwn': initr_wwn,
'tgt_port_name': tgt_port_name
}
return self.send_request(method='delete', url='/itl', data=data)
|
[
"def",
"unmap_target_from_initiator",
"(",
"self",
",",
"tgt_port_name",
",",
"initr_wwn",
")",
":",
"data",
"=",
"{",
"'attr'",
":",
"'unmaptarget'",
",",
"'initr_wwn'",
":",
"initr_wwn",
",",
"'tgt_port_name'",
":",
"tgt_port_name",
"}",
"return",
"self",
".",
"send_request",
"(",
"method",
"=",
"'delete'",
",",
"url",
"=",
"'/itl'",
",",
"data",
"=",
"data",
")"
] |
https://github.com/openstack/cinder/blob/23494a6d6c51451688191e1847a458f1d3cdcaa5/cinder/volume/drivers/macrosan/devop_client.py#L407-L414
|
|
jython/frozen-mirror
|
b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99
|
lib-python/2.7/lib2to3/pytree.py
|
python
|
Node.insert_child
|
(self, i, child)
|
Equivalent to 'node.children.insert(i, child)'. This method also sets
the child's parent attribute appropriately.
|
Equivalent to 'node.children.insert(i, child)'. This method also sets
the child's parent attribute appropriately.
|
[
"Equivalent",
"to",
"node",
".",
"children",
".",
"insert",
"(",
"i",
"child",
")",
".",
"This",
"method",
"also",
"sets",
"the",
"child",
"s",
"parent",
"attribute",
"appropriately",
"."
] |
def insert_child(self, i, child):
"""
Equivalent to 'node.children.insert(i, child)'. This method also sets
the child's parent attribute appropriately.
"""
child.parent = self
self.children.insert(i, child)
self.changed()
|
[
"def",
"insert_child",
"(",
"self",
",",
"i",
",",
"child",
")",
":",
"child",
".",
"parent",
"=",
"self",
"self",
".",
"children",
".",
"insert",
"(",
"i",
",",
"child",
")",
"self",
".",
"changed",
"(",
")"
] |
https://github.com/jython/frozen-mirror/blob/b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99/lib-python/2.7/lib2to3/pytree.py#L332-L339
|
||
apple/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
txdav/common/datastore/podding/migration/home_sync.py
|
python
|
CrossPodHomeSync.makeNotifications
|
(self, txn, records)
|
Create L{NotificationObjectRecord} records in the local store.
|
Create L{NotificationObjectRecord} records in the local store.
|
[
"Create",
"L",
"{",
"NotificationObjectRecord",
"}",
"records",
"in",
"the",
"local",
"store",
"."
] |
def makeNotifications(self, txn, records):
"""
Create L{NotificationObjectRecord} records in the local store.
"""
notifications = yield NotificationCollection.notificationsWithUID(txn, self.diruid, status=_HOME_STATUS_MIGRATING, create=True)
for record in records:
# Do this via the "write" API so that sync revisions are updated properly, rather than just
# inserting the records directly.
notification = yield notifications.writeNotificationObject(record.notificationUID, record.notificationType, record.notificationData)
self.accounting(" Added notification local-id={}.".format(notification.id()))
|
[
"def",
"makeNotifications",
"(",
"self",
",",
"txn",
",",
"records",
")",
":",
"notifications",
"=",
"yield",
"NotificationCollection",
".",
"notificationsWithUID",
"(",
"txn",
",",
"self",
".",
"diruid",
",",
"status",
"=",
"_HOME_STATUS_MIGRATING",
",",
"create",
"=",
"True",
")",
"for",
"record",
"in",
"records",
":",
"# Do this via the \"write\" API so that sync revisions are updated properly, rather than just",
"# inserting the records directly.",
"notification",
"=",
"yield",
"notifications",
".",
"writeNotificationObject",
"(",
"record",
".",
"notificationUID",
",",
"record",
".",
"notificationType",
",",
"record",
".",
"notificationData",
")",
"self",
".",
"accounting",
"(",
"\" Added notification local-id={}.\"",
".",
"format",
"(",
"notification",
".",
"id",
"(",
")",
")",
")"
] |
https://github.com/apple/ccs-calendarserver/blob/13c706b985fb728b9aab42dc0fef85aae21921c3/txdav/common/datastore/podding/migration/home_sync.py#L1065-L1075
|
||
JaniceWuo/MovieRecommend
|
4c86db64ca45598917d304f535413df3bc9fea65
|
movierecommend/venv1/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/index.py
|
python
|
Link.__init__
|
(self, url, comes_from=None, requires_python=None)
|
Object representing a parsed link from https://pypi.python.org/simple/*
url:
url of the resource pointed to (href of the link)
comes_from:
instance of HTMLPage where the link was found, or string.
requires_python:
String containing the `Requires-Python` metadata field, specified
in PEP 345. This may be specified by a data-requires-python
attribute in the HTML link tag, as described in PEP 503.
|
Object representing a parsed link from https://pypi.python.org/simple/*
|
[
"Object",
"representing",
"a",
"parsed",
"link",
"from",
"https",
":",
"//",
"pypi",
".",
"python",
".",
"org",
"/",
"simple",
"/",
"*"
] |
def __init__(self, url, comes_from=None, requires_python=None):
"""
Object representing a parsed link from https://pypi.python.org/simple/*
url:
url of the resource pointed to (href of the link)
comes_from:
instance of HTMLPage where the link was found, or string.
requires_python:
String containing the `Requires-Python` metadata field, specified
in PEP 345. This may be specified by a data-requires-python
attribute in the HTML link tag, as described in PEP 503.
"""
# url can be a UNC windows share
if url.startswith('\\\\'):
url = path_to_url(url)
self.url = url
self.comes_from = comes_from
self.requires_python = requires_python if requires_python else None
|
[
"def",
"__init__",
"(",
"self",
",",
"url",
",",
"comes_from",
"=",
"None",
",",
"requires_python",
"=",
"None",
")",
":",
"# url can be a UNC windows share",
"if",
"url",
".",
"startswith",
"(",
"'\\\\\\\\'",
")",
":",
"url",
"=",
"path_to_url",
"(",
"url",
")",
"self",
".",
"url",
"=",
"url",
"self",
".",
"comes_from",
"=",
"comes_from",
"self",
".",
"requires_python",
"=",
"requires_python",
"if",
"requires_python",
"else",
"None"
] |
https://github.com/JaniceWuo/MovieRecommend/blob/4c86db64ca45598917d304f535413df3bc9fea65/movierecommend/venv1/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/index.py#L882-L902
|
||
CLUEbenchmark/CLUE
|
5bd39732734afecb490cf18a5212e692dbf2c007
|
baselines/models/roberta_wwm_large_ext/run_squad.py
|
python
|
InputFeatures.__init__
|
(self,
unique_id,
example_index,
doc_span_index,
tokens,
token_to_orig_map,
token_is_max_context,
input_ids,
input_mask,
segment_ids,
start_position=None,
end_position=None,
is_impossible=None)
|
[] |
def __init__(self,
unique_id,
example_index,
doc_span_index,
tokens,
token_to_orig_map,
token_is_max_context,
input_ids,
input_mask,
segment_ids,
start_position=None,
end_position=None,
is_impossible=None):
self.unique_id = unique_id
self.example_index = example_index
self.doc_span_index = doc_span_index
self.tokens = tokens
self.token_to_orig_map = token_to_orig_map
self.token_is_max_context = token_is_max_context
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.start_position = start_position
self.end_position = end_position
self.is_impossible = is_impossible
|
[
"def",
"__init__",
"(",
"self",
",",
"unique_id",
",",
"example_index",
",",
"doc_span_index",
",",
"tokens",
",",
"token_to_orig_map",
",",
"token_is_max_context",
",",
"input_ids",
",",
"input_mask",
",",
"segment_ids",
",",
"start_position",
"=",
"None",
",",
"end_position",
"=",
"None",
",",
"is_impossible",
"=",
"None",
")",
":",
"self",
".",
"unique_id",
"=",
"unique_id",
"self",
".",
"example_index",
"=",
"example_index",
"self",
".",
"doc_span_index",
"=",
"doc_span_index",
"self",
".",
"tokens",
"=",
"tokens",
"self",
".",
"token_to_orig_map",
"=",
"token_to_orig_map",
"self",
".",
"token_is_max_context",
"=",
"token_is_max_context",
"self",
".",
"input_ids",
"=",
"input_ids",
"self",
".",
"input_mask",
"=",
"input_mask",
"self",
".",
"segment_ids",
"=",
"segment_ids",
"self",
".",
"start_position",
"=",
"start_position",
"self",
".",
"end_position",
"=",
"end_position",
"self",
".",
"is_impossible",
"=",
"is_impossible"
] |
https://github.com/CLUEbenchmark/CLUE/blob/5bd39732734afecb490cf18a5212e692dbf2c007/baselines/models/roberta_wwm_large_ext/run_squad.py#L200-L224
|
||||
microsoft/azure-devops-python-api
|
451cade4c475482792cbe9e522c1fee32393139e
|
azure-devops/azure/devops/v5_1/work_item_tracking/work_item_tracking_client.py
|
python
|
WorkItemTrackingClient.get_reporting_links_by_link_type
|
(self, project=None, link_types=None, types=None, continuation_token=None, start_date_time=None)
|
return self._deserialize('ReportingWorkItemLinksBatch', response)
|
GetReportingLinksByLinkType.
Get a batch of work item links
:param str project: Project ID or project name
:param [str] link_types: A list of types to filter the results to specific link types. Omit this parameter to get work item links of all link types.
:param [str] types: A list of types to filter the results to specific work item types. Omit this parameter to get work item links of all work item types.
:param str continuation_token: Specifies the continuationToken to start the batch from. Omit this parameter to get the first batch of links.
:param datetime start_date_time: Date/time to use as a starting point for link changes. Only link changes that occurred after that date/time will be returned. Cannot be used in conjunction with 'watermark' parameter.
:rtype: :class:`<ReportingWorkItemLinksBatch> <azure.devops.v5_1.work_item_tracking.models.ReportingWorkItemLinksBatch>`
|
GetReportingLinksByLinkType.
Get a batch of work item links
:param str project: Project ID or project name
:param [str] link_types: A list of types to filter the results to specific link types. Omit this parameter to get work item links of all link types.
:param [str] types: A list of types to filter the results to specific work item types. Omit this parameter to get work item links of all work item types.
:param str continuation_token: Specifies the continuationToken to start the batch from. Omit this parameter to get the first batch of links.
:param datetime start_date_time: Date/time to use as a starting point for link changes. Only link changes that occurred after that date/time will be returned. Cannot be used in conjunction with 'watermark' parameter.
:rtype: :class:`<ReportingWorkItemLinksBatch> <azure.devops.v5_1.work_item_tracking.models.ReportingWorkItemLinksBatch>`
|
[
"GetReportingLinksByLinkType",
".",
"Get",
"a",
"batch",
"of",
"work",
"item",
"links",
":",
"param",
"str",
"project",
":",
"Project",
"ID",
"or",
"project",
"name",
":",
"param",
"[",
"str",
"]",
"link_types",
":",
"A",
"list",
"of",
"types",
"to",
"filter",
"the",
"results",
"to",
"specific",
"link",
"types",
".",
"Omit",
"this",
"parameter",
"to",
"get",
"work",
"item",
"links",
"of",
"all",
"link",
"types",
".",
":",
"param",
"[",
"str",
"]",
"types",
":",
"A",
"list",
"of",
"types",
"to",
"filter",
"the",
"results",
"to",
"specific",
"work",
"item",
"types",
".",
"Omit",
"this",
"parameter",
"to",
"get",
"work",
"item",
"links",
"of",
"all",
"work",
"item",
"types",
".",
":",
"param",
"str",
"continuation_token",
":",
"Specifies",
"the",
"continuationToken",
"to",
"start",
"the",
"batch",
"from",
".",
"Omit",
"this",
"parameter",
"to",
"get",
"the",
"first",
"batch",
"of",
"links",
".",
":",
"param",
"datetime",
"start_date_time",
":",
"Date",
"/",
"time",
"to",
"use",
"as",
"a",
"starting",
"point",
"for",
"link",
"changes",
".",
"Only",
"link",
"changes",
"that",
"occurred",
"after",
"that",
"date",
"/",
"time",
"will",
"be",
"returned",
".",
"Cannot",
"be",
"used",
"in",
"conjunction",
"with",
"watermark",
"parameter",
".",
":",
"rtype",
":",
":",
"class",
":",
"<ReportingWorkItemLinksBatch",
">",
"<azure",
".",
"devops",
".",
"v5_1",
".",
"work_item_tracking",
".",
"models",
".",
"ReportingWorkItemLinksBatch",
">"
] |
def get_reporting_links_by_link_type(self, project=None, link_types=None, types=None, continuation_token=None, start_date_time=None):
"""GetReportingLinksByLinkType.
Get a batch of work item links
:param str project: Project ID or project name
:param [str] link_types: A list of types to filter the results to specific link types. Omit this parameter to get work item links of all link types.
:param [str] types: A list of types to filter the results to specific work item types. Omit this parameter to get work item links of all work item types.
:param str continuation_token: Specifies the continuationToken to start the batch from. Omit this parameter to get the first batch of links.
:param datetime start_date_time: Date/time to use as a starting point for link changes. Only link changes that occurred after that date/time will be returned. Cannot be used in conjunction with 'watermark' parameter.
:rtype: :class:`<ReportingWorkItemLinksBatch> <azure.devops.v5_1.work_item_tracking.models.ReportingWorkItemLinksBatch>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if link_types is not None:
link_types = ",".join(link_types)
query_parameters['linkTypes'] = self._serialize.query('link_types', link_types, 'str')
if types is not None:
types = ",".join(types)
query_parameters['types'] = self._serialize.query('types', types, 'str')
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'str')
if start_date_time is not None:
query_parameters['startDateTime'] = self._serialize.query('start_date_time', start_date_time, 'iso-8601')
response = self._send(http_method='GET',
location_id='b5b5b6d0-0308-40a1-b3f4-b9bb3c66878f',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ReportingWorkItemLinksBatch', response)
|
[
"def",
"get_reporting_links_by_link_type",
"(",
"self",
",",
"project",
"=",
"None",
",",
"link_types",
"=",
"None",
",",
"types",
"=",
"None",
",",
"continuation_token",
"=",
"None",
",",
"start_date_time",
"=",
"None",
")",
":",
"route_values",
"=",
"{",
"}",
"if",
"project",
"is",
"not",
"None",
":",
"route_values",
"[",
"'project'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'project'",
",",
"project",
",",
"'str'",
")",
"query_parameters",
"=",
"{",
"}",
"if",
"link_types",
"is",
"not",
"None",
":",
"link_types",
"=",
"\",\"",
".",
"join",
"(",
"link_types",
")",
"query_parameters",
"[",
"'linkTypes'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'link_types'",
",",
"link_types",
",",
"'str'",
")",
"if",
"types",
"is",
"not",
"None",
":",
"types",
"=",
"\",\"",
".",
"join",
"(",
"types",
")",
"query_parameters",
"[",
"'types'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'types'",
",",
"types",
",",
"'str'",
")",
"if",
"continuation_token",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'continuationToken'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'continuation_token'",
",",
"continuation_token",
",",
"'str'",
")",
"if",
"start_date_time",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'startDateTime'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'start_date_time'",
",",
"start_date_time",
",",
"'iso-8601'",
")",
"response",
"=",
"self",
".",
"_send",
"(",
"http_method",
"=",
"'GET'",
",",
"location_id",
"=",
"'b5b5b6d0-0308-40a1-b3f4-b9bb3c66878f'",
",",
"version",
"=",
"'5.1'",
",",
"route_values",
"=",
"route_values",
",",
"query_parameters",
"=",
"query_parameters",
")",
"return",
"self",
".",
"_deserialize",
"(",
"'ReportingWorkItemLinksBatch'",
",",
"response",
")"
] |
https://github.com/microsoft/azure-devops-python-api/blob/451cade4c475482792cbe9e522c1fee32393139e/azure-devops/azure/devops/v5_1/work_item_tracking/work_item_tracking_client.py#L1418-L1447
|
|
arrayfire/arrayfire-python
|
96fa9768ee02e5fb5ffcaf3d1f744c898b141637
|
arrayfire/signal.py
|
python
|
ifft2
|
(signal, dim0 = None, dim1 = None , scale = None)
|
return output
|
Inverse Fast Fourier Transform: 2D
Parameters
----------
signal: af.Array
A 2 dimensional signal or a batch of 2 dimensional signals.
dim0: optional: int. default: None.
- Specifies the size of the output.
- If None, dim0 is calculated to be the first dimension of `signal`.
dim1: optional: int. default: None.
- Specifies the size of the output.
- If None, dim1 is calculated to be the second dimension of `signal`.
scale: optional: scalar. default: None.
- Specifies the scaling factor.
- If None, scale is set to 1.0 / (dim0 * dim1)
Returns
-------
output: af.Array
A complex af.Array containing the full output of the inverse fft.
Note
----
The output is always complex.
|
Inverse Fast Fourier Transform: 2D
|
[
"Inverse",
"Fast",
"Fourier",
"Transform",
":",
"2D"
] |
def ifft2(signal, dim0 = None, dim1 = None , scale = None):
"""
Inverse Fast Fourier Transform: 2D
Parameters
----------
signal: af.Array
A 2 dimensional signal or a batch of 2 dimensional signals.
dim0: optional: int. default: None.
- Specifies the size of the output.
- If None, dim0 is calculated to be the first dimension of `signal`.
dim1: optional: int. default: None.
- Specifies the size of the output.
- If None, dim1 is calculated to be the second dimension of `signal`.
scale: optional: scalar. default: None.
- Specifies the scaling factor.
- If None, scale is set to 1.0 / (dim0 * dim1)
Returns
-------
output: af.Array
A complex af.Array containing the full output of the inverse fft.
Note
----
The output is always complex.
"""
dims = signal.dims()
if dim0 is None:
dim0 = dims[0]
if dim1 is None:
dim1 = dims[1]
if scale is None:
scale = 1.0/float(dim0 * dim1)
output = Array()
safe_call(backend.get().af_ifft2(c_pointer(output.arr), signal.arr, c_double_t(scale),
c_dim_t(dim0), c_dim_t(dim1)))
return output
|
[
"def",
"ifft2",
"(",
"signal",
",",
"dim0",
"=",
"None",
",",
"dim1",
"=",
"None",
",",
"scale",
"=",
"None",
")",
":",
"dims",
"=",
"signal",
".",
"dims",
"(",
")",
"if",
"dim0",
"is",
"None",
":",
"dim0",
"=",
"dims",
"[",
"0",
"]",
"if",
"dim1",
"is",
"None",
":",
"dim1",
"=",
"dims",
"[",
"1",
"]",
"if",
"scale",
"is",
"None",
":",
"scale",
"=",
"1.0",
"/",
"float",
"(",
"dim0",
"*",
"dim1",
")",
"output",
"=",
"Array",
"(",
")",
"safe_call",
"(",
"backend",
".",
"get",
"(",
")",
".",
"af_ifft2",
"(",
"c_pointer",
"(",
"output",
".",
"arr",
")",
",",
"signal",
".",
"arr",
",",
"c_double_t",
"(",
"scale",
")",
",",
"c_dim_t",
"(",
"dim0",
")",
",",
"c_dim_t",
"(",
"dim1",
")",
")",
")",
"return",
"output"
] |
https://github.com/arrayfire/arrayfire-python/blob/96fa9768ee02e5fb5ffcaf3d1f744c898b141637/arrayfire/signal.py#L482-L531
|
|
saltstack/salt
|
fae5bc757ad0f1716483ce7ae180b451545c2058
|
salt/ext/tornado/gen.py
|
python
|
coroutine
|
(func, replace_callback=True)
|
return _make_coroutine_wrapper(func, replace_callback=True)
|
Decorator for asynchronous generators.
Any generator that yields objects from this module must be wrapped
in either this decorator or `engine`.
Coroutines may "return" by raising the special exception
`Return(value) <Return>`. In Python 3.3+, it is also possible for
the function to simply use the ``return value`` statement (prior to
Python 3.3 generators were not allowed to also return values).
In all versions of Python a coroutine that simply wishes to exit
early may use the ``return`` statement without a value.
Functions with this decorator return a `.Future`. Additionally,
they may be called with a ``callback`` keyword argument, which
will be invoked with the future's result when it resolves. If the
coroutine fails, the callback will not be run and an exception
will be raised into the surrounding `.StackContext`. The
``callback`` argument is not visible inside the decorated
function; it is handled by the decorator itself.
From the caller's perspective, ``@gen.coroutine`` is similar to
the combination of ``@return_future`` and ``@gen.engine``.
.. warning::
When exceptions occur inside a coroutine, the exception
information will be stored in the `.Future` object. You must
examine the result of the `.Future` object, or the exception
may go unnoticed by your code. This means yielding the function
if called from another coroutine, using something like
`.IOLoop.run_sync` for top-level calls, or passing the `.Future`
to `.IOLoop.add_future`.
|
Decorator for asynchronous generators.
|
[
"Decorator",
"for",
"asynchronous",
"generators",
"."
] |
def coroutine(func, replace_callback=True):
"""Decorator for asynchronous generators.
Any generator that yields objects from this module must be wrapped
in either this decorator or `engine`.
Coroutines may "return" by raising the special exception
`Return(value) <Return>`. In Python 3.3+, it is also possible for
the function to simply use the ``return value`` statement (prior to
Python 3.3 generators were not allowed to also return values).
In all versions of Python a coroutine that simply wishes to exit
early may use the ``return`` statement without a value.
Functions with this decorator return a `.Future`. Additionally,
they may be called with a ``callback`` keyword argument, which
will be invoked with the future's result when it resolves. If the
coroutine fails, the callback will not be run and an exception
will be raised into the surrounding `.StackContext`. The
``callback`` argument is not visible inside the decorated
function; it is handled by the decorator itself.
From the caller's perspective, ``@gen.coroutine`` is similar to
the combination of ``@return_future`` and ``@gen.engine``.
.. warning::
When exceptions occur inside a coroutine, the exception
information will be stored in the `.Future` object. You must
examine the result of the `.Future` object, or the exception
may go unnoticed by your code. This means yielding the function
if called from another coroutine, using something like
`.IOLoop.run_sync` for top-level calls, or passing the `.Future`
to `.IOLoop.add_future`.
"""
return _make_coroutine_wrapper(func, replace_callback=True)
|
[
"def",
"coroutine",
"(",
"func",
",",
"replace_callback",
"=",
"True",
")",
":",
"return",
"_make_coroutine_wrapper",
"(",
"func",
",",
"replace_callback",
"=",
"True",
")"
] |
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/ext/tornado/gen.py#L213-L248
|
|
edfungus/Crouton
|
ada98b3930192938a48909072b45cb84b945f875
|
clients/python_clients/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py
|
python
|
OrderedDict.__reduce__
|
(self)
|
return self.__class__, (items,)
|
Return state information for pickling
|
Return state information for pickling
|
[
"Return",
"state",
"information",
"for",
"pickling"
] |
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
|
[
"def",
"__reduce__",
"(",
"self",
")",
":",
"items",
"=",
"[",
"[",
"k",
",",
"self",
"[",
"k",
"]",
"]",
"for",
"k",
"in",
"self",
"]",
"inst_dict",
"=",
"vars",
"(",
"self",
")",
".",
"copy",
"(",
")",
"for",
"k",
"in",
"vars",
"(",
"OrderedDict",
"(",
")",
")",
":",
"inst_dict",
".",
"pop",
"(",
"k",
",",
"None",
")",
"if",
"inst_dict",
":",
"return",
"(",
"self",
".",
"__class__",
",",
"(",
"items",
",",
")",
",",
"inst_dict",
")",
"return",
"self",
".",
"__class__",
",",
"(",
"items",
",",
")"
] |
https://github.com/edfungus/Crouton/blob/ada98b3930192938a48909072b45cb84b945f875/clients/python_clients/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py#L210-L218
|
|
xiaolonw/TimeCycle
|
16d33ac0fb0a08105a9ca781c7b1b36898e3b601
|
utils/imutils2.py
|
python
|
load_image
|
(img_path)
|
return im_to_torch(img)
|
[] |
def load_image(img_path):
# H x W x C => C x H x W
img = cv2.imread(img_path)
img = img.astype(np.float32)
img = img / 255.0
img = img[:,:,::-1]
img = img.copy()
return im_to_torch(img)
|
[
"def",
"load_image",
"(",
"img_path",
")",
":",
"# H x W x C => C x H x W",
"img",
"=",
"cv2",
".",
"imread",
"(",
"img_path",
")",
"img",
"=",
"img",
".",
"astype",
"(",
"np",
".",
"float32",
")",
"img",
"=",
"img",
"/",
"255.0",
"img",
"=",
"img",
"[",
":",
",",
":",
",",
":",
":",
"-",
"1",
"]",
"img",
"=",
"img",
".",
"copy",
"(",
")",
"return",
"im_to_torch",
"(",
"img",
")"
] |
https://github.com/xiaolonw/TimeCycle/blob/16d33ac0fb0a08105a9ca781c7b1b36898e3b601/utils/imutils2.py#L20-L27
|
|||
projecthamster/hamster
|
19d160090de30e756bdc3122ff935bdaa86e2843
|
waflib/Context.py
|
python
|
Context.exec_command
|
(self, cmd, **kw)
|
return ret
|
Runs an external process and returns the exit status::
def run(tsk):
ret = tsk.generator.bld.exec_command('touch foo.txt')
return ret
If the context has the attribute 'log', then captures and logs the process stderr/stdout.
Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
stdout/stderr values captured.
:param cmd: command argument for subprocess.Popen
:type cmd: string or list
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
:type kw: dict
:returns: process exit status
:rtype: integer
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure
|
Runs an external process and returns the exit status::
|
[
"Runs",
"an",
"external",
"process",
"and",
"returns",
"the",
"exit",
"status",
"::"
] |
def exec_command(self, cmd, **kw):
"""
Runs an external process and returns the exit status::
def run(tsk):
ret = tsk.generator.bld.exec_command('touch foo.txt')
return ret
If the context has the attribute 'log', then captures and logs the process stderr/stdout.
Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
stdout/stderr values captured.
:param cmd: command argument for subprocess.Popen
:type cmd: string or list
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
:type kw: dict
:returns: process exit status
:rtype: integer
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure
"""
subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)
self.log_command(cmd, kw)
if self.logger:
self.logger.info(cmd)
if 'stdout' not in kw:
kw['stdout'] = subprocess.PIPE
if 'stderr' not in kw:
kw['stderr'] = subprocess.PIPE
if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
raise Errors.WafError('Program %s not found!' % cmd[0])
cargs = {}
if 'timeout' in kw:
if sys.hexversion >= 0x3030000:
cargs['timeout'] = kw['timeout']
if not 'start_new_session' in kw:
kw['start_new_session'] = True
del kw['timeout']
if 'input' in kw:
if kw['input']:
cargs['input'] = kw['input']
kw['stdin'] = subprocess.PIPE
del kw['input']
if 'cwd' in kw:
if not isinstance(kw['cwd'], str):
kw['cwd'] = kw['cwd'].abspath()
encoding = kw.pop('decode_as', default_encoding)
try:
ret, out, err = Utils.run_process(cmd, kw, cargs)
except Exception as e:
raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
if out:
if not isinstance(out, str):
out = out.decode(encoding, errors='replace')
if self.logger:
self.logger.debug('out: %s', out)
else:
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
if err:
if not isinstance(err, str):
err = err.decode(encoding, errors='replace')
if self.logger:
self.logger.error('err: %s' % err)
else:
Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
return ret
|
[
"def",
"exec_command",
"(",
"self",
",",
"cmd",
",",
"*",
"*",
"kw",
")",
":",
"subprocess",
"=",
"Utils",
".",
"subprocess",
"kw",
"[",
"'shell'",
"]",
"=",
"isinstance",
"(",
"cmd",
",",
"str",
")",
"self",
".",
"log_command",
"(",
"cmd",
",",
"kw",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"cmd",
")",
"if",
"'stdout'",
"not",
"in",
"kw",
":",
"kw",
"[",
"'stdout'",
"]",
"=",
"subprocess",
".",
"PIPE",
"if",
"'stderr'",
"not",
"in",
"kw",
":",
"kw",
"[",
"'stderr'",
"]",
"=",
"subprocess",
".",
"PIPE",
"if",
"Logs",
".",
"verbose",
"and",
"not",
"kw",
"[",
"'shell'",
"]",
"and",
"not",
"Utils",
".",
"check_exe",
"(",
"cmd",
"[",
"0",
"]",
")",
":",
"raise",
"Errors",
".",
"WafError",
"(",
"'Program %s not found!'",
"%",
"cmd",
"[",
"0",
"]",
")",
"cargs",
"=",
"{",
"}",
"if",
"'timeout'",
"in",
"kw",
":",
"if",
"sys",
".",
"hexversion",
">=",
"0x3030000",
":",
"cargs",
"[",
"'timeout'",
"]",
"=",
"kw",
"[",
"'timeout'",
"]",
"if",
"not",
"'start_new_session'",
"in",
"kw",
":",
"kw",
"[",
"'start_new_session'",
"]",
"=",
"True",
"del",
"kw",
"[",
"'timeout'",
"]",
"if",
"'input'",
"in",
"kw",
":",
"if",
"kw",
"[",
"'input'",
"]",
":",
"cargs",
"[",
"'input'",
"]",
"=",
"kw",
"[",
"'input'",
"]",
"kw",
"[",
"'stdin'",
"]",
"=",
"subprocess",
".",
"PIPE",
"del",
"kw",
"[",
"'input'",
"]",
"if",
"'cwd'",
"in",
"kw",
":",
"if",
"not",
"isinstance",
"(",
"kw",
"[",
"'cwd'",
"]",
",",
"str",
")",
":",
"kw",
"[",
"'cwd'",
"]",
"=",
"kw",
"[",
"'cwd'",
"]",
".",
"abspath",
"(",
")",
"encoding",
"=",
"kw",
".",
"pop",
"(",
"'decode_as'",
",",
"default_encoding",
")",
"try",
":",
"ret",
",",
"out",
",",
"err",
"=",
"Utils",
".",
"run_process",
"(",
"cmd",
",",
"kw",
",",
"cargs",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"Errors",
".",
"WafError",
"(",
"'Execution failure: %s'",
"%",
"str",
"(",
"e",
")",
",",
"ex",
"=",
"e",
")",
"if",
"out",
":",
"if",
"not",
"isinstance",
"(",
"out",
",",
"str",
")",
":",
"out",
"=",
"out",
".",
"decode",
"(",
"encoding",
",",
"errors",
"=",
"'replace'",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'out: %s'",
",",
"out",
")",
"else",
":",
"Logs",
".",
"info",
"(",
"out",
",",
"extra",
"=",
"{",
"'stream'",
":",
"sys",
".",
"stdout",
",",
"'c1'",
":",
"''",
"}",
")",
"if",
"err",
":",
"if",
"not",
"isinstance",
"(",
"err",
",",
"str",
")",
":",
"err",
"=",
"err",
".",
"decode",
"(",
"encoding",
",",
"errors",
"=",
"'replace'",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"error",
"(",
"'err: %s'",
"%",
"err",
")",
"else",
":",
"Logs",
".",
"info",
"(",
"err",
",",
"extra",
"=",
"{",
"'stream'",
":",
"sys",
".",
"stderr",
",",
"'c1'",
":",
"''",
"}",
")",
"return",
"ret"
] |
https://github.com/projecthamster/hamster/blob/19d160090de30e756bdc3122ff935bdaa86e2843/waflib/Context.py#L307-L382
|
|
JDAI-CV/Partial-Person-ReID
|
fb94dbfbec1105bbc22a442702bc6e385427d416
|
fastreid/solver/optim/swa.py
|
python
|
SWA.update_swa
|
(self)
|
r"""Updates the SWA running averages of all optimized parameters.
|
r"""Updates the SWA running averages of all optimized parameters.
|
[
"r",
"Updates",
"the",
"SWA",
"running",
"averages",
"of",
"all",
"optimized",
"parameters",
"."
] |
def update_swa(self):
r"""Updates the SWA running averages of all optimized parameters.
"""
for group in self.param_groups:
self.update_swa_group(group)
|
[
"def",
"update_swa",
"(",
"self",
")",
":",
"for",
"group",
"in",
"self",
".",
"param_groups",
":",
"self",
".",
"update_swa_group",
"(",
"group",
")"
] |
https://github.com/JDAI-CV/Partial-Person-ReID/blob/fb94dbfbec1105bbc22a442702bc6e385427d416/fastreid/solver/optim/swa.py#L163-L167
|
||
pyparallel/pyparallel
|
11e8c6072d48c8f13641925d17b147bf36ee0ba3
|
Lib/imaplib.py
|
python
|
IMAP4.setquota
|
(self, root, limits)
|
return self._untagged_response(typ, dat, 'QUOTA')
|
Set the quota root's resource limits.
(typ, [data]) = <instance>.setquota(root, limits)
|
Set the quota root's resource limits.
|
[
"Set",
"the",
"quota",
"root",
"s",
"resource",
"limits",
"."
] |
def setquota(self, root, limits):
"""Set the quota root's resource limits.
(typ, [data]) = <instance>.setquota(root, limits)
"""
typ, dat = self._simple_command('SETQUOTA', root, limits)
return self._untagged_response(typ, dat, 'QUOTA')
|
[
"def",
"setquota",
"(",
"self",
",",
"root",
",",
"limits",
")",
":",
"typ",
",",
"dat",
"=",
"self",
".",
"_simple_command",
"(",
"'SETQUOTA'",
",",
"root",
",",
"limits",
")",
"return",
"self",
".",
"_untagged_response",
"(",
"typ",
",",
"dat",
",",
"'QUOTA'",
")"
] |
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/imaplib.py#L712-L718
|
|
Source-Python-Dev-Team/Source.Python
|
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
|
addons/source-python/Python3/dis.py
|
python
|
disassemble
|
(co, lasti=-1, *, file=None)
|
Disassemble a code object.
|
Disassemble a code object.
|
[
"Disassemble",
"a",
"code",
"object",
"."
] |
def disassemble(co, lasti=-1, *, file=None):
"""Disassemble a code object."""
cell_names = co.co_cellvars + co.co_freevars
linestarts = dict(findlinestarts(co))
_disassemble_bytes(co.co_code, lasti, co.co_varnames, co.co_names,
co.co_consts, cell_names, linestarts, file=file)
|
[
"def",
"disassemble",
"(",
"co",
",",
"lasti",
"=",
"-",
"1",
",",
"*",
",",
"file",
"=",
"None",
")",
":",
"cell_names",
"=",
"co",
".",
"co_cellvars",
"+",
"co",
".",
"co_freevars",
"linestarts",
"=",
"dict",
"(",
"findlinestarts",
"(",
"co",
")",
")",
"_disassemble_bytes",
"(",
"co",
".",
"co_code",
",",
"lasti",
",",
"co",
".",
"co_varnames",
",",
"co",
".",
"co_names",
",",
"co",
".",
"co_consts",
",",
"cell_names",
",",
"linestarts",
",",
"file",
"=",
"file",
")"
] |
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/Python3/dis.py#L330-L335
|
||
linxid/Machine_Learning_Study_Path
|
558e82d13237114bbb8152483977806fc0c222af
|
Machine Learning In Action/Chapter4-NaiveBayes/venv/Lib/tempfile.py
|
python
|
TemporaryDirectory.__exit__
|
(self, exc, value, tb)
|
[] |
def __exit__(self, exc, value, tb):
self.cleanup()
|
[
"def",
"__exit__",
"(",
"self",
",",
"exc",
",",
"value",
",",
"tb",
")",
":",
"self",
".",
"cleanup",
"(",
")"
] |
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter4-NaiveBayes/venv/Lib/tempfile.py#L806-L807
|
||||
AnonGit90210/RamanujanMachine
|
1f4f8f76e61291f4dc4a81fead4a721f21f5f943
|
cont_fracs.py
|
python
|
ContFrac.iteration_algorithm
|
(self, params, state_mat, i, print_calc)
|
return (params, new_promo_mat)
|
Generates the next iteration, using the "matrix product" of params and promo_matrix of:
| P_{i+1} Q_{i+1} | | a_i b_i | | P_i Q_i | | P_0 Q_0 | | a_0 1 |
| P_i Q_i | = | 1 0 | X | P_{i-1} Q_{i-1} | , | P_{-1} Q_{-1} | = | 1 0 |
contfrac_i = P_i/Q_i
params - {'a_coeffs': ..., 'b_coeffs': ..., 'contfrac_res': ...}
state_mat - [[p_i, p_{i-1}], [q_i, q_{i-1}]], is actually the current state matrix. Name is only for signature
compatibility.
i - iteration index, used to generate the [a_i b_i; 1 0] matrix.
print_calc - whether to print the calculation process. For debugging.
|
Generates the next iteration, using the "matrix product" of params and promo_matrix of:
| P_{i+1} Q_{i+1} | | a_i b_i | | P_i Q_i | | P_0 Q_0 | | a_0 1 |
| P_i Q_i | = | 1 0 | X | P_{i-1} Q_{i-1} | , | P_{-1} Q_{-1} | = | 1 0 |
contfrac_i = P_i/Q_i
|
[
"Generates",
"the",
"next",
"iteration",
"using",
"the",
"matrix",
"product",
"of",
"params",
"and",
"promo_matrix",
"of",
":",
"|",
"P_",
"{",
"i",
"+",
"1",
"}",
"Q_",
"{",
"i",
"+",
"1",
"}",
"|",
"|",
"a_i",
"b_i",
"|",
"|",
"P_i",
"Q_i",
"|",
"|",
"P_0",
"Q_0",
"|",
"|",
"a_0",
"1",
"|",
"|",
"P_i",
"Q_i",
"|",
"=",
"|",
"1",
"0",
"|",
"X",
"|",
"P_",
"{",
"i",
"-",
"1",
"}",
"Q_",
"{",
"i",
"-",
"1",
"}",
"|",
"|",
"P_",
"{",
"-",
"1",
"}",
"Q_",
"{",
"-",
"1",
"}",
"|",
"=",
"|",
"1",
"0",
"|",
"contfrac_i",
"=",
"P_i",
"/",
"Q_i"
] |
def iteration_algorithm(self, params, state_mat, i, print_calc):
"""Generates the next iteration, using the "matrix product" of params and promo_matrix of:
| P_{i+1} Q_{i+1} | | a_i b_i | | P_i Q_i | | P_0 Q_0 | | a_0 1 |
| P_i Q_i | = | 1 0 | X | P_{i-1} Q_{i-1} | , | P_{-1} Q_{-1} | = | 1 0 |
contfrac_i = P_i/Q_i
params - {'a_coeffs': ..., 'b_coeffs': ..., 'contfrac_res': ...}
state_mat - [[p_i, p_{i-1}], [q_i, q_{i-1}]], is actually the current state matrix. Name is only for signature
compatibility.
i - iteration index, used to generate the [a_i b_i; 1 0] matrix.
print_calc - whether to print the calculation process. For debugging."""
i += 1
a_coeffs = params['a_coeffs']
b_coeffs = params['b_coeffs']
# state_mat - the [a_i b_i; 1 0] matrix. Actually, it's just a [a_i, b_i] list
ab_mat = self._iter_promo_matrix_generator(i, a_coeffs, b_coeffs, print_calc)
p_vec, q_vec = state_mat
# TODO: if calculations are slow, once in a while (i % 100 == 0?) check if gcd(p_i,p_{i-1}) != 0
# TODO: and take it out as a factor (add p_gcd, q_gcd params)
new_p_vec = (sum([l*m for l, m in zip(p_vec, ab_mat)]), p_vec[0])
new_q_vec = (sum([l*m for l, m in zip(q_vec, ab_mat)]), q_vec[0])
if dec('inf') in new_p_vec:
raise ValueError('infinity p')
if dec('inf') in new_q_vec:
raise ValueError('infinity q')
if self._logging:
p_i = dec(new_p_vec[0])
q_i = dec(new_q_vec[0])
if not isnormal(q_i):
contfrac_res_i = dec('NaN')
else:
contfrac_res_i = p_i / q_i
params['contfrac_res'] = contfrac_res_i
new_promo_mat = (new_p_vec, new_q_vec)
return (params, new_promo_mat)
|
[
"def",
"iteration_algorithm",
"(",
"self",
",",
"params",
",",
"state_mat",
",",
"i",
",",
"print_calc",
")",
":",
"i",
"+=",
"1",
"a_coeffs",
"=",
"params",
"[",
"'a_coeffs'",
"]",
"b_coeffs",
"=",
"params",
"[",
"'b_coeffs'",
"]",
"# state_mat - the [a_i b_i; 1 0] matrix. Actually, it's just a [a_i, b_i] list",
"ab_mat",
"=",
"self",
".",
"_iter_promo_matrix_generator",
"(",
"i",
",",
"a_coeffs",
",",
"b_coeffs",
",",
"print_calc",
")",
"p_vec",
",",
"q_vec",
"=",
"state_mat",
"# TODO: if calculations are slow, once in a while (i % 100 == 0?) check if gcd(p_i,p_{i-1}) != 0",
"# TODO: and take it out as a factor (add p_gcd, q_gcd params)",
"new_p_vec",
"=",
"(",
"sum",
"(",
"[",
"l",
"*",
"m",
"for",
"l",
",",
"m",
"in",
"zip",
"(",
"p_vec",
",",
"ab_mat",
")",
"]",
")",
",",
"p_vec",
"[",
"0",
"]",
")",
"new_q_vec",
"=",
"(",
"sum",
"(",
"[",
"l",
"*",
"m",
"for",
"l",
",",
"m",
"in",
"zip",
"(",
"q_vec",
",",
"ab_mat",
")",
"]",
")",
",",
"q_vec",
"[",
"0",
"]",
")",
"if",
"dec",
"(",
"'inf'",
")",
"in",
"new_p_vec",
":",
"raise",
"ValueError",
"(",
"'infinity p'",
")",
"if",
"dec",
"(",
"'inf'",
")",
"in",
"new_q_vec",
":",
"raise",
"ValueError",
"(",
"'infinity q'",
")",
"if",
"self",
".",
"_logging",
":",
"p_i",
"=",
"dec",
"(",
"new_p_vec",
"[",
"0",
"]",
")",
"q_i",
"=",
"dec",
"(",
"new_q_vec",
"[",
"0",
"]",
")",
"if",
"not",
"isnormal",
"(",
"q_i",
")",
":",
"contfrac_res_i",
"=",
"dec",
"(",
"'NaN'",
")",
"else",
":",
"contfrac_res_i",
"=",
"p_i",
"/",
"q_i",
"params",
"[",
"'contfrac_res'",
"]",
"=",
"contfrac_res_i",
"new_promo_mat",
"=",
"(",
"new_p_vec",
",",
"new_q_vec",
")",
"return",
"(",
"params",
",",
"new_promo_mat",
")"
] |
https://github.com/AnonGit90210/RamanujanMachine/blob/1f4f8f76e61291f4dc4a81fead4a721f21f5f943/cont_fracs.py#L68-L105
|
|
nlloyd/SubliminalCollaborator
|
5c619e17ddbe8acb9eea8996ec038169ddcd50a1
|
libs/twisted/words/im/basechat.py
|
python
|
ChatUI.registerAccountClient
|
(self, client)
|
return client
|
Notify the user that an account has been signed on to.
@type client: L{IClient<interfaces.IClient>} provider
@param client: The client account for the person who has just signed on.
@rtype client: L{IClient<interfaces.IClient>} provider
@return: The client, so that it may be used in a callback chain.
|
Notify the user that an account has been signed on to.
|
[
"Notify",
"the",
"user",
"that",
"an",
"account",
"has",
"been",
"signed",
"on",
"to",
"."
] |
def registerAccountClient(self, client):
"""
Notify the user that an account has been signed on to.
@type client: L{IClient<interfaces.IClient>} provider
@param client: The client account for the person who has just signed on.
@rtype client: L{IClient<interfaces.IClient>} provider
@return: The client, so that it may be used in a callback chain.
"""
self.onlineClients.append(client)
self.contactsList.registerAccountClient(client)
return client
|
[
"def",
"registerAccountClient",
"(",
"self",
",",
"client",
")",
":",
"self",
".",
"onlineClients",
".",
"append",
"(",
"client",
")",
"self",
".",
"contactsList",
".",
"registerAccountClient",
"(",
"client",
")",
"return",
"client"
] |
https://github.com/nlloyd/SubliminalCollaborator/blob/5c619e17ddbe8acb9eea8996ec038169ddcd50a1/libs/twisted/words/im/basechat.py#L345-L357
|
|
IntelPython/sdc
|
1ebf55c00ef38dfbd401a70b3945e352a5a38b87
|
sdc/extensions/sdc_string_view_ext.py
|
python
|
install_string_view_delegating_methods
|
(nbtype)
|
[] |
def install_string_view_delegating_methods(nbtype):
# TO-DO: generalize?
from numba.core.registry import CPUDispatcher
from numba.core import utils
# need to do refresh, as unicode templates may not be avaialble yet
typingctx = CPUDispatcher.targetdescr.typing_context
typingctx.refresh()
# filter only methods from all attribute templates registered for nbtype
method_templates = list(typingctx._get_attribute_templates(nbtype))
method_templates = [x for x in method_templates if getattr(x, 'is_method', None)]
method_names = [x._attr for x in method_templates]
# for all unicode methods register corresponding StringView overload
# that delegates to it via creating a temporary unicode string
for this_name, this_template in zip(method_names, method_templates):
pysig_str = str(utils.pysignature(this_template._overload_func))
pysig_params = utils.pysignature(this_template._overload_func).parameters.keys()
self_param_name = list(pysig_params)[0]
method_param_names = list(pysig_params)[1:]
inner_call_params = ', '.join([f'{x}={x}' for x in method_param_names])
from textwrap import dedent
func_name = f'string_view_{this_name}'
text = dedent(f"""
@overload_method(StdStringViewType, '{this_name}')
def {func_name}_ovld{pysig_str}:
if not isinstance({self_param_name}, StdStringViewType):
return None
def _impl{pysig_str}:
return str({self_param_name}).{this_name}({inner_call_params})
return _impl
""")
global_vars, local_vars = {'StdStringViewType': StdStringViewType,
'overload_method': overload_method}, {}
exec(text, global_vars, local_vars)
|
[
"def",
"install_string_view_delegating_methods",
"(",
"nbtype",
")",
":",
"# TO-DO: generalize?",
"from",
"numba",
".",
"core",
".",
"registry",
"import",
"CPUDispatcher",
"from",
"numba",
".",
"core",
"import",
"utils",
"# need to do refresh, as unicode templates may not be avaialble yet",
"typingctx",
"=",
"CPUDispatcher",
".",
"targetdescr",
".",
"typing_context",
"typingctx",
".",
"refresh",
"(",
")",
"# filter only methods from all attribute templates registered for nbtype",
"method_templates",
"=",
"list",
"(",
"typingctx",
".",
"_get_attribute_templates",
"(",
"nbtype",
")",
")",
"method_templates",
"=",
"[",
"x",
"for",
"x",
"in",
"method_templates",
"if",
"getattr",
"(",
"x",
",",
"'is_method'",
",",
"None",
")",
"]",
"method_names",
"=",
"[",
"x",
".",
"_attr",
"for",
"x",
"in",
"method_templates",
"]",
"# for all unicode methods register corresponding StringView overload",
"# that delegates to it via creating a temporary unicode string",
"for",
"this_name",
",",
"this_template",
"in",
"zip",
"(",
"method_names",
",",
"method_templates",
")",
":",
"pysig_str",
"=",
"str",
"(",
"utils",
".",
"pysignature",
"(",
"this_template",
".",
"_overload_func",
")",
")",
"pysig_params",
"=",
"utils",
".",
"pysignature",
"(",
"this_template",
".",
"_overload_func",
")",
".",
"parameters",
".",
"keys",
"(",
")",
"self_param_name",
"=",
"list",
"(",
"pysig_params",
")",
"[",
"0",
"]",
"method_param_names",
"=",
"list",
"(",
"pysig_params",
")",
"[",
"1",
":",
"]",
"inner_call_params",
"=",
"', '",
".",
"join",
"(",
"[",
"f'{x}={x}'",
"for",
"x",
"in",
"method_param_names",
"]",
")",
"from",
"textwrap",
"import",
"dedent",
"func_name",
"=",
"f'string_view_{this_name}'",
"text",
"=",
"dedent",
"(",
"f\"\"\"\n @overload_method(StdStringViewType, '{this_name}')\n def {func_name}_ovld{pysig_str}:\n if not isinstance({self_param_name}, StdStringViewType):\n return None\n def _impl{pysig_str}:\n return str({self_param_name}).{this_name}({inner_call_params})\n return _impl\n \"\"\"",
")",
"global_vars",
",",
"local_vars",
"=",
"{",
"'StdStringViewType'",
":",
"StdStringViewType",
",",
"'overload_method'",
":",
"overload_method",
"}",
",",
"{",
"}",
"exec",
"(",
"text",
",",
"global_vars",
",",
"local_vars",
")"
] |
https://github.com/IntelPython/sdc/blob/1ebf55c00ef38dfbd401a70b3945e352a5a38b87/sdc/extensions/sdc_string_view_ext.py#L274-L310
|
||||
mrkipling/maraschino
|
c6be9286937783ae01df2d6d8cebfc8b2734a7d7
|
lib/sqlalchemy/orm/query.py
|
python
|
Query._with_invoke_all_eagers
|
(self, value)
|
Set the 'invoke all eagers' flag which causes joined- and
subquery loaders to traverse into already-loaded related objects
and collections.
Default is that of :attr:`.Query._invoke_all_eagers`.
|
Set the 'invoke all eagers' flag which causes joined- and
subquery loaders to traverse into already-loaded related objects
and collections.
Default is that of :attr:`.Query._invoke_all_eagers`.
|
[
"Set",
"the",
"invoke",
"all",
"eagers",
"flag",
"which",
"causes",
"joined",
"-",
"and",
"subquery",
"loaders",
"to",
"traverse",
"into",
"already",
"-",
"loaded",
"related",
"objects",
"and",
"collections",
".",
"Default",
"is",
"that",
"of",
":",
"attr",
":",
".",
"Query",
".",
"_invoke_all_eagers",
"."
] |
def _with_invoke_all_eagers(self, value):
"""Set the 'invoke all eagers' flag which causes joined- and
subquery loaders to traverse into already-loaded related objects
and collections.
Default is that of :attr:`.Query._invoke_all_eagers`.
"""
self._invoke_all_eagers = value
|
[
"def",
"_with_invoke_all_eagers",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"_invoke_all_eagers",
"=",
"value"
] |
https://github.com/mrkipling/maraschino/blob/c6be9286937783ae01df2d6d8cebfc8b2734a7d7/lib/sqlalchemy/orm/query.py#L777-L785
|
||
pycontribs/pyrax
|
a0c022981f76a4cba96a22ecc19bb52843ac4fbe
|
pyrax/cloudblockstorage.py
|
python
|
CloudBlockStorageVolume.rename
|
(self, name)
|
return self.update(display_name=name)
|
Allows for direct renaming of an existing volume.
|
Allows for direct renaming of an existing volume.
|
[
"Allows",
"for",
"direct",
"renaming",
"of",
"an",
"existing",
"volume",
"."
] |
def rename(self, name):
"""
Allows for direct renaming of an existing volume.
"""
return self.update(display_name=name)
|
[
"def",
"rename",
"(",
"self",
",",
"name",
")",
":",
"return",
"self",
".",
"update",
"(",
"display_name",
"=",
"name",
")"
] |
https://github.com/pycontribs/pyrax/blob/a0c022981f76a4cba96a22ecc19bb52843ac4fbe/pyrax/cloudblockstorage.py#L216-L220
|
|
emeryberger/CSrankings
|
805e55a40e4d3669a51bef2f030492991395bfa9
|
util/scholar.py
|
python
|
ScholarSettings.set_citation_format
|
(self, citform)
|
[] |
def set_citation_format(self, citform):
citform = ScholarUtils.ensure_int(citform)
if citform < 0 or citform > self.CITFORM_BIBTEX:
raise FormatError('citation format invalid, is "%s"' % citform)
self.citform = citform
self._is_configured = True
|
[
"def",
"set_citation_format",
"(",
"self",
",",
"citform",
")",
":",
"citform",
"=",
"ScholarUtils",
".",
"ensure_int",
"(",
"citform",
")",
"if",
"citform",
"<",
"0",
"or",
"citform",
">",
"self",
".",
"CITFORM_BIBTEX",
":",
"raise",
"FormatError",
"(",
"'citation format invalid, is \"%s\"'",
"%",
"citform",
")",
"self",
".",
"citform",
"=",
"citform",
"self",
".",
"_is_configured",
"=",
"True"
] |
https://github.com/emeryberger/CSrankings/blob/805e55a40e4d3669a51bef2f030492991395bfa9/util/scholar.py#L899-L904
|
||||
inguma/bokken
|
6109dd0025093a11631cb88cf48cb5c5ed5e617d
|
lib/web/wsgiserver/__init__.py
|
python
|
HTTPServer.bind
|
(self, family, type, proto=0)
|
Create (or recreate) the actual socket object.
|
Create (or recreate) the actual socket object.
|
[
"Create",
"(",
"or",
"recreate",
")",
"the",
"actual",
"socket",
"object",
"."
] |
def bind(self, family, type, proto=0):
"""Create (or recreate) the actual socket object."""
self.socket = socket.socket(family, type, proto)
prevent_socket_inheritance(self.socket)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay and not isinstance(self.bind_addr, str):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self.ssl_adapter is not None:
self.socket = self.ssl_adapter.bind(self.socket)
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See http://www.cherrypy.org/ticket/871.
if (hasattr(socket, 'AF_INET6') and family == socket.AF_INET6
and self.bind_addr[0] in ('::', '::0', '::0.0.0.0')):
try:
self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (AttributeError, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
self.socket.bind(self.bind_addr)
|
[
"def",
"bind",
"(",
"self",
",",
"family",
",",
"type",
",",
"proto",
"=",
"0",
")",
":",
"self",
".",
"socket",
"=",
"socket",
".",
"socket",
"(",
"family",
",",
"type",
",",
"proto",
")",
"prevent_socket_inheritance",
"(",
"self",
".",
"socket",
")",
"self",
".",
"socket",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_REUSEADDR",
",",
"1",
")",
"if",
"self",
".",
"nodelay",
"and",
"not",
"isinstance",
"(",
"self",
".",
"bind_addr",
",",
"str",
")",
":",
"self",
".",
"socket",
".",
"setsockopt",
"(",
"socket",
".",
"IPPROTO_TCP",
",",
"socket",
".",
"TCP_NODELAY",
",",
"1",
")",
"if",
"self",
".",
"ssl_adapter",
"is",
"not",
"None",
":",
"self",
".",
"socket",
"=",
"self",
".",
"ssl_adapter",
".",
"bind",
"(",
"self",
".",
"socket",
")",
"# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),",
"# activate dual-stack. See http://www.cherrypy.org/ticket/871.",
"if",
"(",
"hasattr",
"(",
"socket",
",",
"'AF_INET6'",
")",
"and",
"family",
"==",
"socket",
".",
"AF_INET6",
"and",
"self",
".",
"bind_addr",
"[",
"0",
"]",
"in",
"(",
"'::'",
",",
"'::0'",
",",
"'::0.0.0.0'",
")",
")",
":",
"try",
":",
"self",
".",
"socket",
".",
"setsockopt",
"(",
"socket",
".",
"IPPROTO_IPV6",
",",
"socket",
".",
"IPV6_V6ONLY",
",",
"0",
")",
"except",
"(",
"AttributeError",
",",
"socket",
".",
"error",
")",
":",
"# Apparently, the socket option is not available in",
"# this machine's TCP stack",
"pass",
"self",
".",
"socket",
".",
"bind",
"(",
"self",
".",
"bind_addr",
")"
] |
https://github.com/inguma/bokken/blob/6109dd0025093a11631cb88cf48cb5c5ed5e617d/lib/web/wsgiserver/__init__.py#L1773-L1795
|
||
datalogue/keras-attention
|
12bbcd1121976e9db9fe87371772bb7f4dcd0302
|
data/reader.py
|
python
|
Data.generator
|
(self, batch_size)
|
Creates a generator that can be used in `model.fit_generator()`
Batches are generated randomly.
:param batch_size: the number of instances to include per batch
|
Creates a generator that can be used in `model.fit_generator()`
Batches are generated randomly.
:param batch_size: the number of instances to include per batch
|
[
"Creates",
"a",
"generator",
"that",
"can",
"be",
"used",
"in",
"model",
".",
"fit_generator",
"()",
"Batches",
"are",
"generated",
"randomly",
".",
":",
"param",
"batch_size",
":",
"the",
"number",
"of",
"instances",
"to",
"include",
"per",
"batch"
] |
def generator(self, batch_size):
"""
Creates a generator that can be used in `model.fit_generator()`
Batches are generated randomly.
:param batch_size: the number of instances to include per batch
"""
instance_id = range(len(self.inputs))
while True:
try:
batch_ids = random.sample(instance_id, batch_size)
yield (np.array(self.inputs[batch_ids], dtype=int),
np.array(self.targets[batch_ids]))
except Exception as e:
print('EXCEPTION OMG')
print(e)
yield None, None
|
[
"def",
"generator",
"(",
"self",
",",
"batch_size",
")",
":",
"instance_id",
"=",
"range",
"(",
"len",
"(",
"self",
".",
"inputs",
")",
")",
"while",
"True",
":",
"try",
":",
"batch_ids",
"=",
"random",
".",
"sample",
"(",
"instance_id",
",",
"batch_size",
")",
"yield",
"(",
"np",
".",
"array",
"(",
"self",
".",
"inputs",
"[",
"batch_ids",
"]",
",",
"dtype",
"=",
"int",
")",
",",
"np",
".",
"array",
"(",
"self",
".",
"targets",
"[",
"batch_ids",
"]",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"'EXCEPTION OMG'",
")",
"print",
"(",
"e",
")",
"yield",
"None",
",",
"None"
] |
https://github.com/datalogue/keras-attention/blob/12bbcd1121976e9db9fe87371772bb7f4dcd0302/data/reader.py#L126-L141
|
||
tp4a/teleport
|
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
|
server/www/packages/packages-linux/x64/ldap3/strategy/base.py
|
python
|
BaseStrategy.send
|
(self, message_type, request, controls=None)
|
return message_id
|
Send an LDAP message
Returns the message_id
|
Send an LDAP message
Returns the message_id
|
[
"Send",
"an",
"LDAP",
"message",
"Returns",
"the",
"message_id"
] |
def send(self, message_type, request, controls=None):
"""
Send an LDAP message
Returns the message_id
"""
self.connection.request = None
if self.connection.listening:
if self.connection.sasl_in_progress and message_type not in ['bindRequest']: # as per RFC4511 (4.2.1)
self.connection.last_error = 'cannot send operation requests while SASL bind is in progress'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSASLBindInProgressError(self.connection.last_error)
message_id = self.connection.server.next_message_id()
ldap_message = LDAPMessage()
ldap_message['messageID'] = MessageID(message_id)
ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
message_controls = build_controls_list(controls)
if message_controls is not None:
ldap_message['controls'] = message_controls
self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
self._outstanding[message_id] = self.connection.request
self.sending(ldap_message)
else:
self.connection.last_error = 'unable to send message, socket is not open'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPSocketOpenError(self.connection.last_error)
return message_id
|
[
"def",
"send",
"(",
"self",
",",
"message_type",
",",
"request",
",",
"controls",
"=",
"None",
")",
":",
"self",
".",
"connection",
".",
"request",
"=",
"None",
"if",
"self",
".",
"connection",
".",
"listening",
":",
"if",
"self",
".",
"connection",
".",
"sasl_in_progress",
"and",
"message_type",
"not",
"in",
"[",
"'bindRequest'",
"]",
":",
"# as per RFC4511 (4.2.1)",
"self",
".",
"connection",
".",
"last_error",
"=",
"'cannot send operation requests while SASL bind is in progress'",
"if",
"log_enabled",
"(",
"ERROR",
")",
":",
"log",
"(",
"ERROR",
",",
"'<%s> for <%s>'",
",",
"self",
".",
"connection",
".",
"last_error",
",",
"self",
".",
"connection",
")",
"raise",
"LDAPSASLBindInProgressError",
"(",
"self",
".",
"connection",
".",
"last_error",
")",
"message_id",
"=",
"self",
".",
"connection",
".",
"server",
".",
"next_message_id",
"(",
")",
"ldap_message",
"=",
"LDAPMessage",
"(",
")",
"ldap_message",
"[",
"'messageID'",
"]",
"=",
"MessageID",
"(",
"message_id",
")",
"ldap_message",
"[",
"'protocolOp'",
"]",
"=",
"ProtocolOp",
"(",
")",
".",
"setComponentByName",
"(",
"message_type",
",",
"request",
")",
"message_controls",
"=",
"build_controls_list",
"(",
"controls",
")",
"if",
"message_controls",
"is",
"not",
"None",
":",
"ldap_message",
"[",
"'controls'",
"]",
"=",
"message_controls",
"self",
".",
"connection",
".",
"request",
"=",
"BaseStrategy",
".",
"decode_request",
"(",
"message_type",
",",
"request",
",",
"controls",
")",
"self",
".",
"_outstanding",
"[",
"message_id",
"]",
"=",
"self",
".",
"connection",
".",
"request",
"self",
".",
"sending",
"(",
"ldap_message",
")",
"else",
":",
"self",
".",
"connection",
".",
"last_error",
"=",
"'unable to send message, socket is not open'",
"if",
"log_enabled",
"(",
"ERROR",
")",
":",
"log",
"(",
"ERROR",
",",
"'<%s> for <%s>'",
",",
"self",
".",
"connection",
".",
"last_error",
",",
"self",
".",
"connection",
")",
"raise",
"LDAPSocketOpenError",
"(",
"self",
".",
"connection",
".",
"last_error",
")",
"return",
"message_id"
] |
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-linux/x64/ldap3/strategy/base.py#L308-L336
|
|
tendenci/tendenci
|
0f2c348cc0e7d41bc56f50b00ce05544b083bf1d
|
tendenci/apps/articles/views.py
|
python
|
export_download
|
(request, identifier)
|
return response
|
Download the profiles export.
|
Download the profiles export.
|
[
"Download",
"the",
"profiles",
"export",
"."
] |
def export_download(request, identifier):
"""Download the profiles export."""
if not request.user.profile.is_staff:
raise Http403
file_name = '%s.csv' % identifier
file_path = 'export/articles/%s' % file_name
if not default_storage.exists(file_path):
raise Http404
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="articles_export_%s"' % file_name
response.content = default_storage.open(file_path).read()
return response
|
[
"def",
"export_download",
"(",
"request",
",",
"identifier",
")",
":",
"if",
"not",
"request",
".",
"user",
".",
"profile",
".",
"is_staff",
":",
"raise",
"Http403",
"file_name",
"=",
"'%s.csv'",
"%",
"identifier",
"file_path",
"=",
"'export/articles/%s'",
"%",
"file_name",
"if",
"not",
"default_storage",
".",
"exists",
"(",
"file_path",
")",
":",
"raise",
"Http404",
"response",
"=",
"HttpResponse",
"(",
"content_type",
"=",
"'text/csv'",
")",
"response",
"[",
"'Content-Disposition'",
"]",
"=",
"'attachment; filename=\"articles_export_%s\"'",
"%",
"file_name",
"response",
".",
"content",
"=",
"default_storage",
".",
"open",
"(",
"file_path",
")",
".",
"read",
"(",
")",
"return",
"response"
] |
https://github.com/tendenci/tendenci/blob/0f2c348cc0e7d41bc56f50b00ce05544b083bf1d/tendenci/apps/articles/views.py#L449-L462
|
|
tumblr/pytumblr
|
afe2c4c67068095771a1d3e70eba20fa4e513c08
|
pytumblr/__init__.py
|
python
|
TumblrRestClient.reblog
|
(self, blogname, **kwargs)
|
return self.send_api_request('post', url, kwargs, valid_options)
|
Creates a reblog on the given blogname
:param blogname: a string, the url of the blog you want to reblog to
:param id: an int, the post id that you are reblogging
:param reblog_key: a string, the reblog key of the post
:param comment: a string, a comment added to the reblogged post
:returns: a dict created from the JSON response
|
Creates a reblog on the given blogname
|
[
"Creates",
"a",
"reblog",
"on",
"the",
"given",
"blogname"
] |
def reblog(self, blogname, **kwargs):
"""
Creates a reblog on the given blogname
:param blogname: a string, the url of the blog you want to reblog to
:param id: an int, the post id that you are reblogging
:param reblog_key: a string, the reblog key of the post
:param comment: a string, a comment added to the reblogged post
:returns: a dict created from the JSON response
"""
url = "/v2/blog/{}/post/reblog".format(blogname)
valid_options = ['id', 'reblog_key', 'comment'] + self._post_valid_options(kwargs.get('type', None))
if 'tags' in kwargs and kwargs['tags']:
# Take a list of tags and make them acceptable for upload
kwargs['tags'] = ",".join(kwargs['tags'])
return self.send_api_request('post', url, kwargs, valid_options)
|
[
"def",
"reblog",
"(",
"self",
",",
"blogname",
",",
"*",
"*",
"kwargs",
")",
":",
"url",
"=",
"\"/v2/blog/{}/post/reblog\"",
".",
"format",
"(",
"blogname",
")",
"valid_options",
"=",
"[",
"'id'",
",",
"'reblog_key'",
",",
"'comment'",
"]",
"+",
"self",
".",
"_post_valid_options",
"(",
"kwargs",
".",
"get",
"(",
"'type'",
",",
"None",
")",
")",
"if",
"'tags'",
"in",
"kwargs",
"and",
"kwargs",
"[",
"'tags'",
"]",
":",
"# Take a list of tags and make them acceptable for upload",
"kwargs",
"[",
"'tags'",
"]",
"=",
"\",\"",
".",
"join",
"(",
"kwargs",
"[",
"'tags'",
"]",
")",
"return",
"self",
".",
"send_api_request",
"(",
"'post'",
",",
"url",
",",
"kwargs",
",",
"valid_options",
")"
] |
https://github.com/tumblr/pytumblr/blob/afe2c4c67068095771a1d3e70eba20fa4e513c08/pytumblr/__init__.py#L434-L451
|
|
NxRLab/ModernRobotics
|
4a3d25ae0a64d6c9c65d78345452155e400efa8e
|
packages/Python/modern_robotics/core.py
|
python
|
InverseDynamicsTrajectory
|
(thetamat, dthetamat, ddthetamat, g, \
Ftipmat, Mlist, Glist, Slist)
|
return taumat
|
Calculates the joint forces/torques required to move the serial chain
along the given trajectory using inverse dynamics
:param thetamat: An N x n matrix of robot joint variables
:param dthetamat: An N x n matrix of robot joint velocities
:param ddthetamat: An N x n matrix of robot joint accelerations
:param g: Gravity vector g
:param Ftipmat: An N x 6 matrix of spatial forces applied by the end-
effector (If there are no tip forces the user should
input a zero and a zero matrix will be used)
:param Mlist: List of link frames i relative to i-1 at the home position
:param Glist: Spatial inertia matrices Gi of the links
:param Slist: Screw axes Si of the joints in a space frame, in the format
of a matrix with axes as the columns
:return: The N x n matrix of joint forces/torques for the specified
trajectory, where each of the N rows is the vector of joint
forces/torques at each time step
Example Inputs (3 Link Robot):
from __future__ import print_function
import numpy as np
import modern_robotics as mr
# Create a trajectory to follow using functions from Chapter 9
thetastart = np.array([0, 0, 0])
thetaend = np.array([np.pi / 2, np.pi / 2, np.pi / 2])
Tf = 3
N= 1000
method = 5
traj = mr.JointTrajectory(thetastart, thetaend, Tf, N, method)
thetamat = np.array(traj).copy()
dthetamat = np.zeros((1000,3 ))
ddthetamat = np.zeros((1000, 3))
dt = Tf / (N - 1.0)
for i in range(np.array(traj).shape[0] - 1):
dthetamat[i + 1, :] = (thetamat[i + 1, :] - thetamat[i, :]) / dt
ddthetamat[i + 1, :] \
= (dthetamat[i + 1, :] - dthetamat[i, :]) / dt
# Initialize robot description (Example with 3 links)
g = np.array([0, 0, -9.8])
Ftipmat = np.ones((N, 6))
M01 = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0.089159],
[0, 0, 0, 1]])
M12 = np.array([[ 0, 0, 1, 0.28],
[ 0, 1, 0, 0.13585],
[-1, 0, 0, 0],
[ 0, 0, 0, 1]])
M23 = np.array([[1, 0, 0, 0],
[0, 1, 0, -0.1197],
[0, 0, 1, 0.395],
[0, 0, 0, 1]])
M34 = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0.14225],
[0, 0, 0, 1]])
G1 = np.diag([0.010267, 0.010267, 0.00666, 3.7, 3.7, 3.7])
G2 = np.diag([0.22689, 0.22689, 0.0151074, 8.393, 8.393, 8.393])
G3 = np.diag([0.0494433, 0.0494433, 0.004095, 2.275, 2.275, 2.275])
Glist = np.array([G1, G2, G3])
Mlist = np.array([M01, M12, M23, M34])
Slist = np.array([[1, 0, 1, 0, 1, 0],
[0, 1, 0, -0.089, 0, 0],
[0, 1, 0, -0.089, 0, 0.425]]).T
taumat \
= mr.InverseDynamicsTrajectory(thetamat, dthetamat, ddthetamat, g, \
Ftipmat, Mlist, Glist, Slist)
# Output using matplotlib to plot the joint forces/torques
Tau1 = taumat[:, 0]
Tau2 = taumat[:, 1]
Tau3 = taumat[:, 2]
timestamp = np.linspace(0, Tf, N)
try:
import matplotlib.pyplot as plt
except:
print('The result will not be plotted due to a lack of package matplotlib')
else:
plt.plot(timestamp, Tau1, label = "Tau1")
plt.plot(timestamp, Tau2, label = "Tau2")
plt.plot(timestamp, Tau3, label = "Tau3")
plt.ylim (-40, 120)
plt.legend(loc = 'lower right')
plt.xlabel("Time")
plt.ylabel("Torque")
plt.title("Plot of Torque Trajectories")
plt.show()
|
Calculates the joint forces/torques required to move the serial chain
along the given trajectory using inverse dynamics
|
[
"Calculates",
"the",
"joint",
"forces",
"/",
"torques",
"required",
"to",
"move",
"the",
"serial",
"chain",
"along",
"the",
"given",
"trajectory",
"using",
"inverse",
"dynamics"
] |
def InverseDynamicsTrajectory(thetamat, dthetamat, ddthetamat, g, \
Ftipmat, Mlist, Glist, Slist):
"""Calculates the joint forces/torques required to move the serial chain
along the given trajectory using inverse dynamics
:param thetamat: An N x n matrix of robot joint variables
:param dthetamat: An N x n matrix of robot joint velocities
:param ddthetamat: An N x n matrix of robot joint accelerations
:param g: Gravity vector g
:param Ftipmat: An N x 6 matrix of spatial forces applied by the end-
effector (If there are no tip forces the user should
input a zero and a zero matrix will be used)
:param Mlist: List of link frames i relative to i-1 at the home position
:param Glist: Spatial inertia matrices Gi of the links
:param Slist: Screw axes Si of the joints in a space frame, in the format
of a matrix with axes as the columns
:return: The N x n matrix of joint forces/torques for the specified
trajectory, where each of the N rows is the vector of joint
forces/torques at each time step
Example Inputs (3 Link Robot):
from __future__ import print_function
import numpy as np
import modern_robotics as mr
# Create a trajectory to follow using functions from Chapter 9
thetastart = np.array([0, 0, 0])
thetaend = np.array([np.pi / 2, np.pi / 2, np.pi / 2])
Tf = 3
N= 1000
method = 5
traj = mr.JointTrajectory(thetastart, thetaend, Tf, N, method)
thetamat = np.array(traj).copy()
dthetamat = np.zeros((1000,3 ))
ddthetamat = np.zeros((1000, 3))
dt = Tf / (N - 1.0)
for i in range(np.array(traj).shape[0] - 1):
dthetamat[i + 1, :] = (thetamat[i + 1, :] - thetamat[i, :]) / dt
ddthetamat[i + 1, :] \
= (dthetamat[i + 1, :] - dthetamat[i, :]) / dt
# Initialize robot description (Example with 3 links)
g = np.array([0, 0, -9.8])
Ftipmat = np.ones((N, 6))
M01 = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0.089159],
[0, 0, 0, 1]])
M12 = np.array([[ 0, 0, 1, 0.28],
[ 0, 1, 0, 0.13585],
[-1, 0, 0, 0],
[ 0, 0, 0, 1]])
M23 = np.array([[1, 0, 0, 0],
[0, 1, 0, -0.1197],
[0, 0, 1, 0.395],
[0, 0, 0, 1]])
M34 = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0.14225],
[0, 0, 0, 1]])
G1 = np.diag([0.010267, 0.010267, 0.00666, 3.7, 3.7, 3.7])
G2 = np.diag([0.22689, 0.22689, 0.0151074, 8.393, 8.393, 8.393])
G3 = np.diag([0.0494433, 0.0494433, 0.004095, 2.275, 2.275, 2.275])
Glist = np.array([G1, G2, G3])
Mlist = np.array([M01, M12, M23, M34])
Slist = np.array([[1, 0, 1, 0, 1, 0],
[0, 1, 0, -0.089, 0, 0],
[0, 1, 0, -0.089, 0, 0.425]]).T
taumat \
= mr.InverseDynamicsTrajectory(thetamat, dthetamat, ddthetamat, g, \
Ftipmat, Mlist, Glist, Slist)
# Output using matplotlib to plot the joint forces/torques
Tau1 = taumat[:, 0]
Tau2 = taumat[:, 1]
Tau3 = taumat[:, 2]
timestamp = np.linspace(0, Tf, N)
try:
import matplotlib.pyplot as plt
except:
print('The result will not be plotted due to a lack of package matplotlib')
else:
plt.plot(timestamp, Tau1, label = "Tau1")
plt.plot(timestamp, Tau2, label = "Tau2")
plt.plot(timestamp, Tau3, label = "Tau3")
plt.ylim (-40, 120)
plt.legend(loc = 'lower right')
plt.xlabel("Time")
plt.ylabel("Torque")
plt.title("Plot of Torque Trajectories")
plt.show()
"""
thetamat = np.array(thetamat).T
dthetamat = np.array(dthetamat).T
ddthetamat = np.array(ddthetamat).T
Ftipmat = np.array(Ftipmat).T
taumat = np.array(thetamat).copy()
for i in range(np.array(thetamat).shape[1]):
taumat[:, i] \
= InverseDynamics(thetamat[:, i], dthetamat[:, i], \
ddthetamat[:, i], g, Ftipmat[:, i], Mlist, \
Glist, Slist)
taumat = np.array(taumat).T
return taumat
|
[
"def",
"InverseDynamicsTrajectory",
"(",
"thetamat",
",",
"dthetamat",
",",
"ddthetamat",
",",
"g",
",",
"Ftipmat",
",",
"Mlist",
",",
"Glist",
",",
"Slist",
")",
":",
"thetamat",
"=",
"np",
".",
"array",
"(",
"thetamat",
")",
".",
"T",
"dthetamat",
"=",
"np",
".",
"array",
"(",
"dthetamat",
")",
".",
"T",
"ddthetamat",
"=",
"np",
".",
"array",
"(",
"ddthetamat",
")",
".",
"T",
"Ftipmat",
"=",
"np",
".",
"array",
"(",
"Ftipmat",
")",
".",
"T",
"taumat",
"=",
"np",
".",
"array",
"(",
"thetamat",
")",
".",
"copy",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"np",
".",
"array",
"(",
"thetamat",
")",
".",
"shape",
"[",
"1",
"]",
")",
":",
"taumat",
"[",
":",
",",
"i",
"]",
"=",
"InverseDynamics",
"(",
"thetamat",
"[",
":",
",",
"i",
"]",
",",
"dthetamat",
"[",
":",
",",
"i",
"]",
",",
"ddthetamat",
"[",
":",
",",
"i",
"]",
",",
"g",
",",
"Ftipmat",
"[",
":",
",",
"i",
"]",
",",
"Mlist",
",",
"Glist",
",",
"Slist",
")",
"taumat",
"=",
"np",
".",
"array",
"(",
"taumat",
")",
".",
"T",
"return",
"taumat"
] |
https://github.com/NxRLab/ModernRobotics/blob/4a3d25ae0a64d6c9c65d78345452155e400efa8e/packages/Python/modern_robotics/core.py#L1230-L1330
|
|
MushroomRL/mushroom-rl
|
a0eaa2cf8001e433419234a9fc48b64170e3f61c
|
mushroom_rl/environments/mujoco.py
|
python
|
MuJoCo._is_absorbing
|
(self, state)
|
Check whether the given state is an absorbing state or not.
Args:
state (np.array): the state of the system.
Returns:
A boolean flag indicating whether this state is absorbing or not.
|
Check whether the given state is an absorbing state or not.
|
[
"Check",
"whether",
"the",
"given",
"state",
"is",
"an",
"absorbing",
"state",
"or",
"not",
"."
] |
def _is_absorbing(self, state):
"""
Check whether the given state is an absorbing state or not.
Args:
state (np.array): the state of the system.
Returns:
A boolean flag indicating whether this state is absorbing or not.
"""
raise NotImplementedError
|
[
"def",
"_is_absorbing",
"(",
"self",
",",
"state",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/MushroomRL/mushroom-rl/blob/a0eaa2cf8001e433419234a9fc48b64170e3f61c/mushroom_rl/environments/mujoco.py#L391-L402
|
||
oilshell/oil
|
94388e7d44a9ad879b12615f6203b38596b5a2d3
|
core/vm.py
|
python
|
_Executor.Time
|
(self)
|
[] |
def Time(self):
# type: () -> None
pass
|
[
"def",
"Time",
"(",
"self",
")",
":",
"# type: () -> None",
"pass"
] |
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/core/vm.py#L112-L114
|
||||
GoogleCloudPlatform/PerfKitBenchmarker
|
6e3412d7d5e414b8ca30ed5eaf970cef1d919a67
|
perfkitbenchmarker/linux_packages/oldisim_dependencies.py
|
python
|
BinaryPath
|
(name)
|
return os.path.join(OLDISIM_DIR, BINARY_BASE, name)
|
Returns the path of a binary within the package.
|
Returns the path of a binary within the package.
|
[
"Returns",
"the",
"path",
"of",
"a",
"binary",
"within",
"the",
"package",
"."
] |
def BinaryPath(name):
"""Returns the path of a binary within the package."""
return os.path.join(OLDISIM_DIR, BINARY_BASE, name)
|
[
"def",
"BinaryPath",
"(",
"name",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"OLDISIM_DIR",
",",
"BINARY_BASE",
",",
"name",
")"
] |
https://github.com/GoogleCloudPlatform/PerfKitBenchmarker/blob/6e3412d7d5e414b8ca30ed5eaf970cef1d919a67/perfkitbenchmarker/linux_packages/oldisim_dependencies.py#L55-L57
|
|
sagemath/sage
|
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
|
src/sage/combinat/species/product_species.py
|
python
|
ProductSpeciesStructure.__repr__
|
(self)
|
return "%s*%s" % (left, right)
|
Return the string representation of this object.
EXAMPLES::
sage: S = species.SetSpecies()
sage: (S*S).structures(['a','b','c'])[0]
{}*{'a', 'b', 'c'}
sage: (S*S*S).structures(['a','b','c'])[13]
({'c'}*{'a'})*{'b'}
|
Return the string representation of this object.
|
[
"Return",
"the",
"string",
"representation",
"of",
"this",
"object",
"."
] |
def __repr__(self):
"""
Return the string representation of this object.
EXAMPLES::
sage: S = species.SetSpecies()
sage: (S*S).structures(['a','b','c'])[0]
{}*{'a', 'b', 'c'}
sage: (S*S*S).structures(['a','b','c'])[13]
({'c'}*{'a'})*{'b'}
"""
left, right = map(repr, self._list)
if "*" in left:
left = "(%s)" % left
if "*" in right:
right = "(%s)" % right
return "%s*%s" % (left, right)
|
[
"def",
"__repr__",
"(",
"self",
")",
":",
"left",
",",
"right",
"=",
"map",
"(",
"repr",
",",
"self",
".",
"_list",
")",
"if",
"\"*\"",
"in",
"left",
":",
"left",
"=",
"\"(%s)\"",
"%",
"left",
"if",
"\"*\"",
"in",
"right",
":",
"right",
"=",
"\"(%s)\"",
"%",
"right",
"return",
"\"%s*%s\"",
"%",
"(",
"left",
",",
"right",
")"
] |
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/combinat/species/product_species.py#L38-L55
|
|
collinsctk/PyQYT
|
7af3673955f94ff1b2df2f94220cd2dab2e252af
|
ExtentionPackages/paramiko/server.py
|
python
|
ServerInterface.check_auth_interactive
|
(self, username, submethods)
|
return AUTH_FAILED
|
Begin an interactive authentication challenge, if supported. You
should override this method in server mode if you want to support the
``"keyboard-interactive"`` auth type, which requires you to send a
series of questions for the client to answer.
Return `.AUTH_FAILED` if this auth method isn't supported. Otherwise,
you should return an `.InteractiveQuery` object containing the prompts
and instructions for the user. The response will be sent via a call
to `check_auth_interactive_response`.
The default implementation always returns `.AUTH_FAILED`.
:param str username: the username of the authenticating client
:param str submethods:
a comma-separated list of methods preferred by the client (usually
empty)
:return:
`.AUTH_FAILED` if this auth method isn't supported; otherwise an
object containing queries for the user
:rtype: int or `.InteractiveQuery`
|
Begin an interactive authentication challenge, if supported. You
should override this method in server mode if you want to support the
``"keyboard-interactive"`` auth type, which requires you to send a
series of questions for the client to answer.
Return `.AUTH_FAILED` if this auth method isn't supported. Otherwise,
you should return an `.InteractiveQuery` object containing the prompts
and instructions for the user. The response will be sent via a call
to `check_auth_interactive_response`.
The default implementation always returns `.AUTH_FAILED`.
:param str username: the username of the authenticating client
:param str submethods:
a comma-separated list of methods preferred by the client (usually
empty)
:return:
`.AUTH_FAILED` if this auth method isn't supported; otherwise an
object containing queries for the user
:rtype: int or `.InteractiveQuery`
|
[
"Begin",
"an",
"interactive",
"authentication",
"challenge",
"if",
"supported",
".",
"You",
"should",
"override",
"this",
"method",
"in",
"server",
"mode",
"if",
"you",
"want",
"to",
"support",
"the",
"keyboard",
"-",
"interactive",
"auth",
"type",
"which",
"requires",
"you",
"to",
"send",
"a",
"series",
"of",
"questions",
"for",
"the",
"client",
"to",
"answer",
".",
"Return",
".",
"AUTH_FAILED",
"if",
"this",
"auth",
"method",
"isn",
"t",
"supported",
".",
"Otherwise",
"you",
"should",
"return",
"an",
".",
"InteractiveQuery",
"object",
"containing",
"the",
"prompts",
"and",
"instructions",
"for",
"the",
"user",
".",
"The",
"response",
"will",
"be",
"sent",
"via",
"a",
"call",
"to",
"check_auth_interactive_response",
".",
"The",
"default",
"implementation",
"always",
"returns",
".",
"AUTH_FAILED",
".",
":",
"param",
"str",
"username",
":",
"the",
"username",
"of",
"the",
"authenticating",
"client",
":",
"param",
"str",
"submethods",
":",
"a",
"comma",
"-",
"separated",
"list",
"of",
"methods",
"preferred",
"by",
"the",
"client",
"(",
"usually",
"empty",
")",
":",
"return",
":",
".",
"AUTH_FAILED",
"if",
"this",
"auth",
"method",
"isn",
"t",
"supported",
";",
"otherwise",
"an",
"object",
"containing",
"queries",
"for",
"the",
"user",
":",
"rtype",
":",
"int",
"or",
".",
"InteractiveQuery"
] |
def check_auth_interactive(self, username, submethods):
"""
Begin an interactive authentication challenge, if supported. You
should override this method in server mode if you want to support the
``"keyboard-interactive"`` auth type, which requires you to send a
series of questions for the client to answer.
Return `.AUTH_FAILED` if this auth method isn't supported. Otherwise,
you should return an `.InteractiveQuery` object containing the prompts
and instructions for the user. The response will be sent via a call
to `check_auth_interactive_response`.
The default implementation always returns `.AUTH_FAILED`.
:param str username: the username of the authenticating client
:param str submethods:
a comma-separated list of methods preferred by the client (usually
empty)
:return:
`.AUTH_FAILED` if this auth method isn't supported; otherwise an
object containing queries for the user
:rtype: int or `.InteractiveQuery`
"""
return AUTH_FAILED
|
[
"def",
"check_auth_interactive",
"(",
"self",
",",
"username",
",",
"submethods",
")",
":",
"return",
"AUTH_FAILED"
] |
https://github.com/collinsctk/PyQYT/blob/7af3673955f94ff1b2df2f94220cd2dab2e252af/ExtentionPackages/paramiko/server.py#L177-L200
|
|
joschabach/micropsi2
|
74a2642d20da9da1d64acc5e4c11aeabee192a27
|
micropsi_core/nodenet/dict_engine/dict_link.py
|
python
|
DictLink.__init__
|
(self, source_node, source_gate_name, target_node, target_slot_name, weight=1, certainty=1)
|
create a link between the source_node and the target_node, from the source_gate to the target_slot.
Note: you should make sure that no link between source and gate exists.
Attributes:
weight (optional): the weight of the link (default is 1)
|
create a link between the source_node and the target_node, from the source_gate to the target_slot.
Note: you should make sure that no link between source and gate exists.
|
[
"create",
"a",
"link",
"between",
"the",
"source_node",
"and",
"the",
"target_node",
"from",
"the",
"source_gate",
"to",
"the",
"target_slot",
".",
"Note",
":",
"you",
"should",
"make",
"sure",
"that",
"no",
"link",
"between",
"source",
"and",
"gate",
"exists",
"."
] |
def __init__(self, source_node, source_gate_name, target_node, target_slot_name, weight=1, certainty=1):
"""create a link between the source_node and the target_node, from the source_gate to the target_slot.
Note: you should make sure that no link between source and gate exists.
Attributes:
weight (optional): the weight of the link (default is 1)
"""
self.link(source_node, source_gate_name, target_node, target_slot_name, weight, certainty)
|
[
"def",
"__init__",
"(",
"self",
",",
"source_node",
",",
"source_gate_name",
",",
"target_node",
",",
"target_slot_name",
",",
"weight",
"=",
"1",
",",
"certainty",
"=",
"1",
")",
":",
"self",
".",
"link",
"(",
"source_node",
",",
"source_gate_name",
",",
"target_node",
",",
"target_slot_name",
",",
"weight",
",",
"certainty",
")"
] |
https://github.com/joschabach/micropsi2/blob/74a2642d20da9da1d64acc5e4c11aeabee192a27/micropsi_core/nodenet/dict_engine/dict_link.py#L47-L54
|
||
spyder-ide/spyder
|
55da47c032dfcf519600f67f8b30eab467f965e7
|
spyder/plugins/base.py
|
python
|
BasePluginWidgetMixin._on_top_level_changed
|
(self, top_level)
|
Actions to perform when a plugin is undocked to be moved.
|
Actions to perform when a plugin is undocked to be moved.
|
[
"Actions",
"to",
"perform",
"when",
"a",
"plugin",
"is",
"undocked",
"to",
"be",
"moved",
"."
] |
def _on_top_level_changed(self, top_level):
"""Actions to perform when a plugin is undocked to be moved."""
if top_level:
self._undock_action.setDisabled(True)
else:
self._undock_action.setDisabled(False)
|
[
"def",
"_on_top_level_changed",
"(",
"self",
",",
"top_level",
")",
":",
"if",
"top_level",
":",
"self",
".",
"_undock_action",
".",
"setDisabled",
"(",
"True",
")",
"else",
":",
"self",
".",
"_undock_action",
".",
"setDisabled",
"(",
"False",
")"
] |
https://github.com/spyder-ide/spyder/blob/55da47c032dfcf519600f67f8b30eab467f965e7/spyder/plugins/base.py#L389-L394
|
||
scrapy/scrapy
|
b04cfa48328d5d5749dca6f50fa34e0cfc664c89
|
scrapy/utils/iterators.py
|
python
|
_StreamReader.read
|
(self, n=65535)
|
return self.read(n).lstrip()
|
[] |
def read(self, n=65535):
self.read = self._read_unicode if self._is_unicode else self._read_string
return self.read(n).lstrip()
|
[
"def",
"read",
"(",
"self",
",",
"n",
"=",
"65535",
")",
":",
"self",
".",
"read",
"=",
"self",
".",
"_read_unicode",
"if",
"self",
".",
"_is_unicode",
"else",
"self",
".",
"_read_string",
"return",
"self",
".",
"read",
"(",
"n",
")",
".",
"lstrip",
"(",
")"
] |
https://github.com/scrapy/scrapy/blob/b04cfa48328d5d5749dca6f50fa34e0cfc664c89/scrapy/utils/iterators.py#L81-L83
|
|||
Teradata/stacki
|
a8085dce179dbe903f65f136f4b63bcc076cc057
|
common/src/stack/command/stack/commands/__init__.py
|
python
|
DatabaseConnection.count
|
(self, command, args=None )
|
return rows[0][0]
|
Return a count of the number of matching items in the database.
The command query should start with the column in parentheses you
wish to count.
The return value will either be an int or None if something
unexpected happened.
Example: count('(ID) from subnets where name=%s', (name,))
|
Return a count of the number of matching items in the database.
The command query should start with the column in parentheses you
wish to count.
|
[
"Return",
"a",
"count",
"of",
"the",
"number",
"of",
"matching",
"items",
"in",
"the",
"database",
".",
"The",
"command",
"query",
"should",
"start",
"with",
"the",
"column",
"in",
"parentheses",
"you",
"wish",
"to",
"count",
"."
] |
def count(self, command, args=None ):
"""
Return a count of the number of matching items in the database.
The command query should start with the column in parentheses you
wish to count.
The return value will either be an int or None if something
unexpected happened.
Example: count('(ID) from subnets where name=%s', (name,))
"""
# Run our select count
rows = self.select(f'count{command.strip()}', args)
# We should always get a single row back
if len(rows) != 1:
return None
return rows[0][0]
|
[
"def",
"count",
"(",
"self",
",",
"command",
",",
"args",
"=",
"None",
")",
":",
"# Run our select count",
"rows",
"=",
"self",
".",
"select",
"(",
"f'count{command.strip()}'",
",",
"args",
")",
"# We should always get a single row back",
"if",
"len",
"(",
"rows",
")",
"!=",
"1",
":",
"return",
"None",
"return",
"rows",
"[",
"0",
"]",
"[",
"0",
"]"
] |
https://github.com/Teradata/stacki/blob/a8085dce179dbe903f65f136f4b63bcc076cc057/common/src/stack/command/stack/commands/__init__.py#L478-L497
|
|
mobolic/facebook-sdk
|
3fa89fec6a20dd070ccf57968c6f89256f237f54
|
examples/flask/app/views.py
|
python
|
logout
|
()
|
return redirect(url_for("index"))
|
Log out the user from the application.
Log out the user from the application by removing them from the
session. Note: this does not log the user out of Facebook - this is done
by the JavaScript SDK.
|
Log out the user from the application.
|
[
"Log",
"out",
"the",
"user",
"from",
"the",
"application",
"."
] |
def logout():
"""Log out the user from the application.
Log out the user from the application by removing them from the
session. Note: this does not log the user out of Facebook - this is done
by the JavaScript SDK.
"""
session.pop("user", None)
return redirect(url_for("index"))
|
[
"def",
"logout",
"(",
")",
":",
"session",
".",
"pop",
"(",
"\"user\"",
",",
"None",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"\"index\"",
")",
")"
] |
https://github.com/mobolic/facebook-sdk/blob/3fa89fec6a20dd070ccf57968c6f89256f237f54/examples/flask/app/views.py#L26-L34
|
|
statsmodels/statsmodels
|
debbe7ea6ba28fe5bdb78f09f8cac694bef98722
|
statsmodels/tsa/statespace/mlemodel.py
|
python
|
MLEModel.initialize_statespace
|
(self, **kwargs)
|
Initialize the state space representation
Parameters
----------
**kwargs
Additional keyword arguments to pass to the state space class
constructor.
|
Initialize the state space representation
|
[
"Initialize",
"the",
"state",
"space",
"representation"
] |
def initialize_statespace(self, **kwargs):
"""
Initialize the state space representation
Parameters
----------
**kwargs
Additional keyword arguments to pass to the state space class
constructor.
"""
# (Now self.endog is C-ordered and in long format (nobs x k_endog). To
# get F-ordered and in wide format just need to transpose)
endog = self.endog.T
# Instantiate the state space object
self.ssm = SimulationSmoother(endog.shape[0], self.k_states,
nobs=endog.shape[1], **kwargs)
# Bind the data to the model
self.ssm.bind(endog)
# Other dimensions, now that `ssm` is available
self.k_endog = self.ssm.k_endog
|
[
"def",
"initialize_statespace",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"# (Now self.endog is C-ordered and in long format (nobs x k_endog). To",
"# get F-ordered and in wide format just need to transpose)",
"endog",
"=",
"self",
".",
"endog",
".",
"T",
"# Instantiate the state space object",
"self",
".",
"ssm",
"=",
"SimulationSmoother",
"(",
"endog",
".",
"shape",
"[",
"0",
"]",
",",
"self",
".",
"k_states",
",",
"nobs",
"=",
"endog",
".",
"shape",
"[",
"1",
"]",
",",
"*",
"*",
"kwargs",
")",
"# Bind the data to the model",
"self",
".",
"ssm",
".",
"bind",
"(",
"endog",
")",
"# Other dimensions, now that `ssm` is available",
"self",
".",
"k_endog",
"=",
"self",
".",
"ssm",
".",
"k_endog"
] |
https://github.com/statsmodels/statsmodels/blob/debbe7ea6ba28fe5bdb78f09f8cac694bef98722/statsmodels/tsa/statespace/mlemodel.py#L177-L198
|
||
pysathq/pysat
|
07bf3a5a4428d40eca804e7ebdf4f496aadf4213
|
pysat/solvers.py
|
python
|
MapleCM.set_phases
|
(self, literals=[])
|
Sets polarities of a given list of variables.
|
Sets polarities of a given list of variables.
|
[
"Sets",
"polarities",
"of",
"a",
"given",
"list",
"of",
"variables",
"."
] |
def set_phases(self, literals=[]):
"""
Sets polarities of a given list of variables.
"""
if self.maplesat:
pysolvers.maplecm_setphases(self.maplesat, literals)
|
[
"def",
"set_phases",
"(",
"self",
",",
"literals",
"=",
"[",
"]",
")",
":",
"if",
"self",
".",
"maplesat",
":",
"pysolvers",
".",
"maplecm_setphases",
"(",
"self",
".",
"maplesat",
",",
"literals",
")"
] |
https://github.com/pysathq/pysat/blob/07bf3a5a4428d40eca804e7ebdf4f496aadf4213/pysat/solvers.py#L3429-L3435
|
||
knownsec/Pocsuite
|
877d1b1604629b8dcd6e53b167c3c98249e5e94f
|
pocsuite/thirdparty/pyparsing/pyparsing.py
|
python
|
srange
|
(s)
|
r"""Helper to easily define string ranges for use in Word construction. Borrows
syntax from regexp '[]' string range definitions::
srange("[0-9]") -> "0123456789"
srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
The input string must be enclosed in []'s, and the returned string is the expanded
character set joined into a single string.
The values enclosed in the []'s may be::
a single character
an escaped character with a leading backslash (such as \- or \])
an escaped hex character with a leading '\x' (\x21, which is a '!' character)
(\0x## is also supported for backwards compatibility)
an escaped octal character with a leading '\0' (\041, which is a '!' character)
a range of any of the above, separated by a dash ('a-z', etc.)
any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.)
|
r"""Helper to easily define string ranges for use in Word construction. Borrows
syntax from regexp '[]' string range definitions::
srange("[0-9]") -> "0123456789"
srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
The input string must be enclosed in []'s, and the returned string is the expanded
character set joined into a single string.
The values enclosed in the []'s may be::
a single character
an escaped character with a leading backslash (such as \- or \])
an escaped hex character with a leading '\x' (\x21, which is a '!' character)
(\0x## is also supported for backwards compatibility)
an escaped octal character with a leading '\0' (\041, which is a '!' character)
a range of any of the above, separated by a dash ('a-z', etc.)
any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.)
|
[
"r",
"Helper",
"to",
"easily",
"define",
"string",
"ranges",
"for",
"use",
"in",
"Word",
"construction",
".",
"Borrows",
"syntax",
"from",
"regexp",
"[]",
"string",
"range",
"definitions",
"::",
"srange",
"(",
"[",
"0",
"-",
"9",
"]",
")",
"-",
">",
"0123456789",
"srange",
"(",
"[",
"a",
"-",
"z",
"]",
")",
"-",
">",
"abcdefghijklmnopqrstuvwxyz",
"srange",
"(",
"[",
"a",
"-",
"z$_",
"]",
")",
"-",
">",
"abcdefghijklmnopqrstuvwxyz$_",
"The",
"input",
"string",
"must",
"be",
"enclosed",
"in",
"[]",
"s",
"and",
"the",
"returned",
"string",
"is",
"the",
"expanded",
"character",
"set",
"joined",
"into",
"a",
"single",
"string",
".",
"The",
"values",
"enclosed",
"in",
"the",
"[]",
"s",
"may",
"be",
"::",
"a",
"single",
"character",
"an",
"escaped",
"character",
"with",
"a",
"leading",
"backslash",
"(",
"such",
"as",
"\\",
"-",
"or",
"\\",
"]",
")",
"an",
"escaped",
"hex",
"character",
"with",
"a",
"leading",
"\\",
"x",
"(",
"\\",
"x21",
"which",
"is",
"a",
"!",
"character",
")",
"(",
"\\",
"0x##",
"is",
"also",
"supported",
"for",
"backwards",
"compatibility",
")",
"an",
"escaped",
"octal",
"character",
"with",
"a",
"leading",
"\\",
"0",
"(",
"\\",
"041",
"which",
"is",
"a",
"!",
"character",
")",
"a",
"range",
"of",
"any",
"of",
"the",
"above",
"separated",
"by",
"a",
"dash",
"(",
"a",
"-",
"z",
"etc",
".",
")",
"any",
"combination",
"of",
"the",
"above",
"(",
"aeiouy",
"a",
"-",
"zA",
"-",
"Z0",
"-",
"9_$",
"etc",
".",
")"
] |
def srange(s):
r"""Helper to easily define string ranges for use in Word construction. Borrows
syntax from regexp '[]' string range definitions::
srange("[0-9]") -> "0123456789"
srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
The input string must be enclosed in []'s, and the returned string is the expanded
character set joined into a single string.
The values enclosed in the []'s may be::
a single character
an escaped character with a leading backslash (such as \- or \])
an escaped hex character with a leading '\x' (\x21, which is a '!' character)
(\0x## is also supported for backwards compatibility)
an escaped octal character with a leading '\0' (\041, which is a '!' character)
a range of any of the above, separated by a dash ('a-z', etc.)
any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.)
"""
_expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
try:
return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
except:
return ""
|
[
"def",
"srange",
"(",
"s",
")",
":",
"_expanded",
"=",
"lambda",
"p",
":",
"p",
"if",
"not",
"isinstance",
"(",
"p",
",",
"ParseResults",
")",
"else",
"''",
".",
"join",
"(",
"unichr",
"(",
"c",
")",
"for",
"c",
"in",
"range",
"(",
"ord",
"(",
"p",
"[",
"0",
"]",
")",
",",
"ord",
"(",
"p",
"[",
"1",
"]",
")",
"+",
"1",
")",
")",
"try",
":",
"return",
"\"\"",
".",
"join",
"(",
"_expanded",
"(",
"part",
")",
"for",
"part",
"in",
"_reBracketExpr",
".",
"parseString",
"(",
"s",
")",
".",
"body",
")",
"except",
":",
"return",
"\"\""
] |
https://github.com/knownsec/Pocsuite/blob/877d1b1604629b8dcd6e53b167c3c98249e5e94f/pocsuite/thirdparty/pyparsing/pyparsing.py#L3359-L3380
|
||
plotly/plotly.py
|
cfad7862594b35965c0e000813bd7805e8494a5b
|
packages/python/plotly/plotly/graph_objs/violin/_box.py
|
python
|
Box.__init__
|
(
self, arg=None, fillcolor=None, line=None, visible=None, width=None, **kwargs
)
|
Construct a new Box object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.violin.Box`
fillcolor
Sets the inner box plot fill color.
line
:class:`plotly.graph_objects.violin.box.Line` instance
or dict with compatible properties
visible
Determines if an miniature box plot is drawn inside the
violins.
width
Sets the width of the inner box plots relative to the
violins' width. For example, with 1, the inner box
plots are as wide as the violins.
Returns
-------
Box
|
Construct a new Box object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.violin.Box`
fillcolor
Sets the inner box plot fill color.
line
:class:`plotly.graph_objects.violin.box.Line` instance
or dict with compatible properties
visible
Determines if an miniature box plot is drawn inside the
violins.
width
Sets the width of the inner box plots relative to the
violins' width. For example, with 1, the inner box
plots are as wide as the violins.
|
[
"Construct",
"a",
"new",
"Box",
"object",
"Parameters",
"----------",
"arg",
"dict",
"of",
"properties",
"compatible",
"with",
"this",
"constructor",
"or",
"an",
"instance",
"of",
":",
"class",
":",
"plotly",
".",
"graph_objs",
".",
"violin",
".",
"Box",
"fillcolor",
"Sets",
"the",
"inner",
"box",
"plot",
"fill",
"color",
".",
"line",
":",
"class",
":",
"plotly",
".",
"graph_objects",
".",
"violin",
".",
"box",
".",
"Line",
"instance",
"or",
"dict",
"with",
"compatible",
"properties",
"visible",
"Determines",
"if",
"an",
"miniature",
"box",
"plot",
"is",
"drawn",
"inside",
"the",
"violins",
".",
"width",
"Sets",
"the",
"width",
"of",
"the",
"inner",
"box",
"plots",
"relative",
"to",
"the",
"violins",
"width",
".",
"For",
"example",
"with",
"1",
"the",
"inner",
"box",
"plots",
"are",
"as",
"wide",
"as",
"the",
"violins",
"."
] |
def __init__(
self, arg=None, fillcolor=None, line=None, visible=None, width=None, **kwargs
):
"""
Construct a new Box object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.violin.Box`
fillcolor
Sets the inner box plot fill color.
line
:class:`plotly.graph_objects.violin.box.Line` instance
or dict with compatible properties
visible
Determines if an miniature box plot is drawn inside the
violins.
width
Sets the width of the inner box plots relative to the
violins' width. For example, with 1, the inner box
plots are as wide as the violins.
Returns
-------
Box
"""
super(Box, self).__init__("box")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.violin.Box
constructor must be a dict or
an instance of :class:`plotly.graph_objs.violin.Box`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("fillcolor", None)
_v = fillcolor if fillcolor is not None else _v
if _v is not None:
self["fillcolor"] = _v
_v = arg.pop("line", None)
_v = line if line is not None else _v
if _v is not None:
self["line"] = _v
_v = arg.pop("visible", None)
_v = visible if visible is not None else _v
if _v is not None:
self["visible"] = _v
_v = arg.pop("width", None)
_v = width if width is not None else _v
if _v is not None:
self["width"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
[
"def",
"__init__",
"(",
"self",
",",
"arg",
"=",
"None",
",",
"fillcolor",
"=",
"None",
",",
"line",
"=",
"None",
",",
"visible",
"=",
"None",
",",
"width",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"Box",
",",
"self",
")",
".",
"__init__",
"(",
"\"box\"",
")",
"if",
"\"_parent\"",
"in",
"kwargs",
":",
"self",
".",
"_parent",
"=",
"kwargs",
"[",
"\"_parent\"",
"]",
"return",
"# Validate arg",
"# ------------",
"if",
"arg",
"is",
"None",
":",
"arg",
"=",
"{",
"}",
"elif",
"isinstance",
"(",
"arg",
",",
"self",
".",
"__class__",
")",
":",
"arg",
"=",
"arg",
".",
"to_plotly_json",
"(",
")",
"elif",
"isinstance",
"(",
"arg",
",",
"dict",
")",
":",
"arg",
"=",
"_copy",
".",
"copy",
"(",
"arg",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"\"\"\\\nThe first argument to the plotly.graph_objs.violin.Box \nconstructor must be a dict or \nan instance of :class:`plotly.graph_objs.violin.Box`\"\"\"",
")",
"# Handle skip_invalid",
"# -------------------",
"self",
".",
"_skip_invalid",
"=",
"kwargs",
".",
"pop",
"(",
"\"skip_invalid\"",
",",
"False",
")",
"self",
".",
"_validate",
"=",
"kwargs",
".",
"pop",
"(",
"\"_validate\"",
",",
"True",
")",
"# Populate data dict with properties",
"# ----------------------------------",
"_v",
"=",
"arg",
".",
"pop",
"(",
"\"fillcolor\"",
",",
"None",
")",
"_v",
"=",
"fillcolor",
"if",
"fillcolor",
"is",
"not",
"None",
"else",
"_v",
"if",
"_v",
"is",
"not",
"None",
":",
"self",
"[",
"\"fillcolor\"",
"]",
"=",
"_v",
"_v",
"=",
"arg",
".",
"pop",
"(",
"\"line\"",
",",
"None",
")",
"_v",
"=",
"line",
"if",
"line",
"is",
"not",
"None",
"else",
"_v",
"if",
"_v",
"is",
"not",
"None",
":",
"self",
"[",
"\"line\"",
"]",
"=",
"_v",
"_v",
"=",
"arg",
".",
"pop",
"(",
"\"visible\"",
",",
"None",
")",
"_v",
"=",
"visible",
"if",
"visible",
"is",
"not",
"None",
"else",
"_v",
"if",
"_v",
"is",
"not",
"None",
":",
"self",
"[",
"\"visible\"",
"]",
"=",
"_v",
"_v",
"=",
"arg",
".",
"pop",
"(",
"\"width\"",
",",
"None",
")",
"_v",
"=",
"width",
"if",
"width",
"is",
"not",
"None",
"else",
"_v",
"if",
"_v",
"is",
"not",
"None",
":",
"self",
"[",
"\"width\"",
"]",
"=",
"_v",
"# Process unknown kwargs",
"# ----------------------",
"self",
".",
"_process_kwargs",
"(",
"*",
"*",
"dict",
"(",
"arg",
",",
"*",
"*",
"kwargs",
")",
")",
"# Reset skip_invalid",
"# ------------------",
"self",
".",
"_skip_invalid",
"=",
"False"
] |
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/violin/_box.py#L162-L242
|
||
UDST/urbansim
|
0db75668ada0005352b7c7e0a405265f78ccadd7
|
urbansim/models/regression.py
|
python
|
SegmentedRegressionModel.predict_from_cfg
|
(cls, df, cfgname, min_segment_size=None)
|
return price_or_rent, hm
|
Parameters
----------
df : DataFrame
The dataframe which contains the columns to use for the estimation.
cfgname : string
The name of the yaml config file which describes the hedonic model.
min_segment_size : int, optional
Set attribute on the model.
Returns
-------
predicted : pandas.Series
Predicted data in a pandas Series. Will have the index of `data`
after applying filters and minus any groups that do not have
models.
hm : SegmentedRegressionModel which was used to predict
|
Parameters
----------
df : DataFrame
The dataframe which contains the columns to use for the estimation.
cfgname : string
The name of the yaml config file which describes the hedonic model.
min_segment_size : int, optional
Set attribute on the model.
|
[
"Parameters",
"----------",
"df",
":",
"DataFrame",
"The",
"dataframe",
"which",
"contains",
"the",
"columns",
"to",
"use",
"for",
"the",
"estimation",
".",
"cfgname",
":",
"string",
"The",
"name",
"of",
"the",
"yaml",
"config",
"file",
"which",
"describes",
"the",
"hedonic",
"model",
".",
"min_segment_size",
":",
"int",
"optional",
"Set",
"attribute",
"on",
"the",
"model",
"."
] |
def predict_from_cfg(cls, df, cfgname, min_segment_size=None):
"""
Parameters
----------
df : DataFrame
The dataframe which contains the columns to use for the estimation.
cfgname : string
The name of the yaml config file which describes the hedonic model.
min_segment_size : int, optional
Set attribute on the model.
Returns
-------
predicted : pandas.Series
Predicted data in a pandas Series. Will have the index of `data`
after applying filters and minus any groups that do not have
models.
hm : SegmentedRegressionModel which was used to predict
"""
logger.debug('start: predict from configuration {}'.format(cfgname))
hm = cls.from_yaml(str_or_buffer=cfgname)
if min_segment_size:
hm.min_segment_size = min_segment_size
price_or_rent = hm.predict(df)
print(price_or_rent.describe())
logger.debug('finish: predict from configuration {}'.format(cfgname))
return price_or_rent, hm
|
[
"def",
"predict_from_cfg",
"(",
"cls",
",",
"df",
",",
"cfgname",
",",
"min_segment_size",
"=",
"None",
")",
":",
"logger",
".",
"debug",
"(",
"'start: predict from configuration {}'",
".",
"format",
"(",
"cfgname",
")",
")",
"hm",
"=",
"cls",
".",
"from_yaml",
"(",
"str_or_buffer",
"=",
"cfgname",
")",
"if",
"min_segment_size",
":",
"hm",
".",
"min_segment_size",
"=",
"min_segment_size",
"price_or_rent",
"=",
"hm",
".",
"predict",
"(",
"df",
")",
"print",
"(",
"price_or_rent",
".",
"describe",
"(",
")",
")",
"logger",
".",
"debug",
"(",
"'finish: predict from configuration {}'",
".",
"format",
"(",
"cfgname",
")",
")",
"return",
"price_or_rent",
",",
"hm"
] |
https://github.com/UDST/urbansim/blob/0db75668ada0005352b7c7e0a405265f78ccadd7/urbansim/models/regression.py#L1003-L1031
|
|
QUANTAXIS/QUANTAXIS
|
d6eccb97c8385854aa596d6ba8d70ec0655519ff
|
QUANTAXIS/QASU/main.py
|
python
|
QA_SU_save_stock_transaction
|
(engine, client=DATABASE)
|
save stock_transaction
Arguments:
engine {[type]} -- [description]
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
|
save stock_transaction
|
[
"save",
"stock_transaction"
] |
def QA_SU_save_stock_transaction(engine, client=DATABASE):
"""save stock_transaction
Arguments:
engine {[type]} -- [description]
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE})
"""
engine = select_save_engine(engine)
engine.QA_SU_save_stock_transaction(client=client)
|
[
"def",
"QA_SU_save_stock_transaction",
"(",
"engine",
",",
"client",
"=",
"DATABASE",
")",
":",
"engine",
"=",
"select_save_engine",
"(",
"engine",
")",
"engine",
".",
"QA_SU_save_stock_transaction",
"(",
"client",
"=",
"client",
")"
] |
https://github.com/QUANTAXIS/QUANTAXIS/blob/d6eccb97c8385854aa596d6ba8d70ec0655519ff/QUANTAXIS/QASU/main.py#L354-L365
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.