nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
yihui-he/KL-Loss
|
66c0ed9e886a2218f4cf88c0efd4f40199bff54a
|
detectron/roi_data/retinanet.py
|
python
|
add_retinanet_blobs
|
(blobs, im_scales, roidb, image_width, image_height)
|
return True
|
Add RetinaNet blobs.
|
Add RetinaNet blobs.
|
[
"Add",
"RetinaNet",
"blobs",
"."
] |
def add_retinanet_blobs(blobs, im_scales, roidb, image_width, image_height):
"""Add RetinaNet blobs."""
# RetinaNet is applied to many feature levels, as in the FPN paper
k_max, k_min = cfg.FPN.RPN_MAX_LEVEL, cfg.FPN.RPN_MIN_LEVEL
scales_per_octave = cfg.RETINANET.SCALES_PER_OCTAVE
num_aspect_ratios = len(cfg.RETINANET.ASPECT_RATIOS)
aspect_ratios = cfg.RETINANET.ASPECT_RATIOS
anchor_scale = cfg.RETINANET.ANCHOR_SCALE
# get anchors from all levels for all scales/aspect ratios
foas = []
for lvl in range(k_min, k_max + 1):
stride = 2. ** lvl
for octave in range(scales_per_octave):
octave_scale = 2 ** (octave / float(scales_per_octave))
for idx in range(num_aspect_ratios):
anchor_sizes = (stride * octave_scale * anchor_scale, )
anchor_aspect_ratios = (aspect_ratios[idx], )
foa = data_utils.get_field_of_anchors(
stride, anchor_sizes, anchor_aspect_ratios, octave, idx)
foas.append(foa)
all_anchors = np.concatenate([f.field_of_anchors for f in foas])
blobs['retnet_fg_num'], blobs['retnet_bg_num'] = 0.0, 0.0
for im_i, entry in enumerate(roidb):
scale = im_scales[im_i]
im_height = np.round(entry['height'] * scale)
im_width = np.round(entry['width'] * scale)
gt_inds = np.where(
(entry['gt_classes'] > 0) & (entry['is_crowd'] == 0))[0]
assert len(gt_inds) > 0, \
'Empty ground truth empty for image is not allowed. Please check.'
gt_rois = entry['boxes'][gt_inds, :] * scale
gt_classes = entry['gt_classes'][gt_inds]
im_info = np.array([[im_height, im_width, scale]], dtype=np.float32)
blobs['im_info'].append(im_info)
retinanet_blobs, fg_num, bg_num = _get_retinanet_blobs(
foas, all_anchors, gt_rois, gt_classes, image_width, image_height)
for i, foa in enumerate(foas):
for k, v in retinanet_blobs[i].items():
# the way it stacks is:
# [[anchors for image1] + [anchors for images 2]]
level = int(np.log2(foa.stride))
key = '{}_fpn{}'.format(k, level)
if k == 'retnet_roi_fg_bbox_locs':
v[:, 0] = im_i
# loc_stride: 80 * 4 if cls_specific else 4
loc_stride = 4 # 4 coordinate corresponding to bbox prediction
if cfg.RETINANET.CLASS_SPECIFIC_BBOX:
loc_stride *= (cfg.MODEL.NUM_CLASSES - 1)
anchor_ind = foa.octave * num_aspect_ratios + foa.aspect
# v[:, 1] is the class label [range 0-80] if we do
# class-specfic bbox otherwise it is 0. In case of class
# specific, based on the label, the location of current
# anchor is class_label * 4 and then we take into account
# the anchor_ind if the anchors
v[:, 1] *= 4
v[:, 1] += loc_stride * anchor_ind
blobs[key].append(v)
blobs['retnet_fg_num'] += fg_num
blobs['retnet_bg_num'] += bg_num
blobs['retnet_fg_num'] = blobs['retnet_fg_num'].astype(np.float32)
blobs['retnet_bg_num'] = blobs['retnet_bg_num'].astype(np.float32)
N = len(roidb)
for k, v in blobs.items():
if isinstance(v, list) and len(v) > 0:
# compute number of anchors
A = int(len(v) / N)
# for the cls branch labels [per fpn level],
# we have blobs['retnet_cls_labels_fpn{}'] as a list until this step
# and length of this list is N x A where
# N = num_images, A = num_anchors for example, N = 2, A = 9
# Each element of the list has the shape 1 x 1 x H x W where H, W are
# spatial dimension of curret fpn lvl. Let a{i} denote the element
# corresponding to anchor i [9 anchors total] in the list.
# The elements in the list are in order [[a0, ..., a9], [a0, ..., a9]]
# however the network will make predictions like 2 x (9 * 80) x H x W
# so we first concatenate the elements of each image to a numpy array
# and then concatenate the two images to get the 2 x 9 x H x W
if k.find('retnet_cls_labels') >= 0:
tmp = []
# concat anchors within an image
for i in range(0, len(v), A):
tmp.append(np.concatenate(v[i: i + A], axis=1))
# concat images
blobs[k] = np.concatenate(tmp, axis=0)
else:
# for the bbox branch elements [per FPN level],
# we have the targets and the fg boxes locations
# in the shape: M x 4 where M is the number of fg locations in a
# given image at the current FPN level. For the given level,
# the bbox predictions will be. The elements in the list are in
# order [[a0, ..., a9], [a0, ..., a9]]
# Concatenate them to form M x 4
blobs[k] = np.concatenate(v, axis=0)
return True
|
[
"def",
"add_retinanet_blobs",
"(",
"blobs",
",",
"im_scales",
",",
"roidb",
",",
"image_width",
",",
"image_height",
")",
":",
"# RetinaNet is applied to many feature levels, as in the FPN paper",
"k_max",
",",
"k_min",
"=",
"cfg",
".",
"FPN",
".",
"RPN_MAX_LEVEL",
",",
"cfg",
".",
"FPN",
".",
"RPN_MIN_LEVEL",
"scales_per_octave",
"=",
"cfg",
".",
"RETINANET",
".",
"SCALES_PER_OCTAVE",
"num_aspect_ratios",
"=",
"len",
"(",
"cfg",
".",
"RETINANET",
".",
"ASPECT_RATIOS",
")",
"aspect_ratios",
"=",
"cfg",
".",
"RETINANET",
".",
"ASPECT_RATIOS",
"anchor_scale",
"=",
"cfg",
".",
"RETINANET",
".",
"ANCHOR_SCALE",
"# get anchors from all levels for all scales/aspect ratios",
"foas",
"=",
"[",
"]",
"for",
"lvl",
"in",
"range",
"(",
"k_min",
",",
"k_max",
"+",
"1",
")",
":",
"stride",
"=",
"2.",
"**",
"lvl",
"for",
"octave",
"in",
"range",
"(",
"scales_per_octave",
")",
":",
"octave_scale",
"=",
"2",
"**",
"(",
"octave",
"/",
"float",
"(",
"scales_per_octave",
")",
")",
"for",
"idx",
"in",
"range",
"(",
"num_aspect_ratios",
")",
":",
"anchor_sizes",
"=",
"(",
"stride",
"*",
"octave_scale",
"*",
"anchor_scale",
",",
")",
"anchor_aspect_ratios",
"=",
"(",
"aspect_ratios",
"[",
"idx",
"]",
",",
")",
"foa",
"=",
"data_utils",
".",
"get_field_of_anchors",
"(",
"stride",
",",
"anchor_sizes",
",",
"anchor_aspect_ratios",
",",
"octave",
",",
"idx",
")",
"foas",
".",
"append",
"(",
"foa",
")",
"all_anchors",
"=",
"np",
".",
"concatenate",
"(",
"[",
"f",
".",
"field_of_anchors",
"for",
"f",
"in",
"foas",
"]",
")",
"blobs",
"[",
"'retnet_fg_num'",
"]",
",",
"blobs",
"[",
"'retnet_bg_num'",
"]",
"=",
"0.0",
",",
"0.0",
"for",
"im_i",
",",
"entry",
"in",
"enumerate",
"(",
"roidb",
")",
":",
"scale",
"=",
"im_scales",
"[",
"im_i",
"]",
"im_height",
"=",
"np",
".",
"round",
"(",
"entry",
"[",
"'height'",
"]",
"*",
"scale",
")",
"im_width",
"=",
"np",
".",
"round",
"(",
"entry",
"[",
"'width'",
"]",
"*",
"scale",
")",
"gt_inds",
"=",
"np",
".",
"where",
"(",
"(",
"entry",
"[",
"'gt_classes'",
"]",
">",
"0",
")",
"&",
"(",
"entry",
"[",
"'is_crowd'",
"]",
"==",
"0",
")",
")",
"[",
"0",
"]",
"assert",
"len",
"(",
"gt_inds",
")",
">",
"0",
",",
"'Empty ground truth empty for image is not allowed. Please check.'",
"gt_rois",
"=",
"entry",
"[",
"'boxes'",
"]",
"[",
"gt_inds",
",",
":",
"]",
"*",
"scale",
"gt_classes",
"=",
"entry",
"[",
"'gt_classes'",
"]",
"[",
"gt_inds",
"]",
"im_info",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"im_height",
",",
"im_width",
",",
"scale",
"]",
"]",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
"blobs",
"[",
"'im_info'",
"]",
".",
"append",
"(",
"im_info",
")",
"retinanet_blobs",
",",
"fg_num",
",",
"bg_num",
"=",
"_get_retinanet_blobs",
"(",
"foas",
",",
"all_anchors",
",",
"gt_rois",
",",
"gt_classes",
",",
"image_width",
",",
"image_height",
")",
"for",
"i",
",",
"foa",
"in",
"enumerate",
"(",
"foas",
")",
":",
"for",
"k",
",",
"v",
"in",
"retinanet_blobs",
"[",
"i",
"]",
".",
"items",
"(",
")",
":",
"# the way it stacks is:",
"# [[anchors for image1] + [anchors for images 2]]",
"level",
"=",
"int",
"(",
"np",
".",
"log2",
"(",
"foa",
".",
"stride",
")",
")",
"key",
"=",
"'{}_fpn{}'",
".",
"format",
"(",
"k",
",",
"level",
")",
"if",
"k",
"==",
"'retnet_roi_fg_bbox_locs'",
":",
"v",
"[",
":",
",",
"0",
"]",
"=",
"im_i",
"# loc_stride: 80 * 4 if cls_specific else 4",
"loc_stride",
"=",
"4",
"# 4 coordinate corresponding to bbox prediction",
"if",
"cfg",
".",
"RETINANET",
".",
"CLASS_SPECIFIC_BBOX",
":",
"loc_stride",
"*=",
"(",
"cfg",
".",
"MODEL",
".",
"NUM_CLASSES",
"-",
"1",
")",
"anchor_ind",
"=",
"foa",
".",
"octave",
"*",
"num_aspect_ratios",
"+",
"foa",
".",
"aspect",
"# v[:, 1] is the class label [range 0-80] if we do",
"# class-specfic bbox otherwise it is 0. In case of class",
"# specific, based on the label, the location of current",
"# anchor is class_label * 4 and then we take into account",
"# the anchor_ind if the anchors",
"v",
"[",
":",
",",
"1",
"]",
"*=",
"4",
"v",
"[",
":",
",",
"1",
"]",
"+=",
"loc_stride",
"*",
"anchor_ind",
"blobs",
"[",
"key",
"]",
".",
"append",
"(",
"v",
")",
"blobs",
"[",
"'retnet_fg_num'",
"]",
"+=",
"fg_num",
"blobs",
"[",
"'retnet_bg_num'",
"]",
"+=",
"bg_num",
"blobs",
"[",
"'retnet_fg_num'",
"]",
"=",
"blobs",
"[",
"'retnet_fg_num'",
"]",
".",
"astype",
"(",
"np",
".",
"float32",
")",
"blobs",
"[",
"'retnet_bg_num'",
"]",
"=",
"blobs",
"[",
"'retnet_bg_num'",
"]",
".",
"astype",
"(",
"np",
".",
"float32",
")",
"N",
"=",
"len",
"(",
"roidb",
")",
"for",
"k",
",",
"v",
"in",
"blobs",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"v",
",",
"list",
")",
"and",
"len",
"(",
"v",
")",
">",
"0",
":",
"# compute number of anchors",
"A",
"=",
"int",
"(",
"len",
"(",
"v",
")",
"/",
"N",
")",
"# for the cls branch labels [per fpn level],",
"# we have blobs['retnet_cls_labels_fpn{}'] as a list until this step",
"# and length of this list is N x A where",
"# N = num_images, A = num_anchors for example, N = 2, A = 9",
"# Each element of the list has the shape 1 x 1 x H x W where H, W are",
"# spatial dimension of curret fpn lvl. Let a{i} denote the element",
"# corresponding to anchor i [9 anchors total] in the list.",
"# The elements in the list are in order [[a0, ..., a9], [a0, ..., a9]]",
"# however the network will make predictions like 2 x (9 * 80) x H x W",
"# so we first concatenate the elements of each image to a numpy array",
"# and then concatenate the two images to get the 2 x 9 x H x W",
"if",
"k",
".",
"find",
"(",
"'retnet_cls_labels'",
")",
">=",
"0",
":",
"tmp",
"=",
"[",
"]",
"# concat anchors within an image",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"v",
")",
",",
"A",
")",
":",
"tmp",
".",
"append",
"(",
"np",
".",
"concatenate",
"(",
"v",
"[",
"i",
":",
"i",
"+",
"A",
"]",
",",
"axis",
"=",
"1",
")",
")",
"# concat images",
"blobs",
"[",
"k",
"]",
"=",
"np",
".",
"concatenate",
"(",
"tmp",
",",
"axis",
"=",
"0",
")",
"else",
":",
"# for the bbox branch elements [per FPN level],",
"# we have the targets and the fg boxes locations",
"# in the shape: M x 4 where M is the number of fg locations in a",
"# given image at the current FPN level. For the given level,",
"# the bbox predictions will be. The elements in the list are in",
"# order [[a0, ..., a9], [a0, ..., a9]]",
"# Concatenate them to form M x 4",
"blobs",
"[",
"k",
"]",
"=",
"np",
".",
"concatenate",
"(",
"v",
",",
"axis",
"=",
"0",
")",
"return",
"True"
] |
https://github.com/yihui-he/KL-Loss/blob/66c0ed9e886a2218f4cf88c0efd4f40199bff54a/detectron/roi_data/retinanet.py#L78-L179
|
|
HeinleinSupport/check_mk_extensions
|
aa7d7389b812ed00f91dad61d66fb676284897d8
|
msexch_database_size/lib/check_mk/base/cee/plugins/bakery/msexch_database_size.py
|
python
|
get_msexch_database_size_files
|
(conf: Dict[str, Any])
|
[] |
def get_msexch_database_size_files(conf: Dict[str, Any]) -> FileGenerator:
yield Plugin(base_os=OS.WINDOWS,
source=Path("msexch_database_size.ps1"))
|
[
"def",
"get_msexch_database_size_files",
"(",
"conf",
":",
"Dict",
"[",
"str",
",",
"Any",
"]",
")",
"->",
"FileGenerator",
":",
"yield",
"Plugin",
"(",
"base_os",
"=",
"OS",
".",
"WINDOWS",
",",
"source",
"=",
"Path",
"(",
"\"msexch_database_size.ps1\"",
")",
")"
] |
https://github.com/HeinleinSupport/check_mk_extensions/blob/aa7d7389b812ed00f91dad61d66fb676284897d8/msexch_database_size/lib/check_mk/base/cee/plugins/bakery/msexch_database_size.py#L23-L25
|
||||
mcfletch/pyopengl
|
02d11dad9ff18e50db10e975c4756e17bf198464
|
OpenGL/GL/EXT/paletted_texture.py
|
python
|
glInitPalettedTextureEXT
|
()
|
return extensions.hasGLExtension( _EXTENSION_NAME )
|
Return boolean indicating whether this extension is available
|
Return boolean indicating whether this extension is available
|
[
"Return",
"boolean",
"indicating",
"whether",
"this",
"extension",
"is",
"available"
] |
def glInitPalettedTextureEXT():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
|
[
"def",
"glInitPalettedTextureEXT",
"(",
")",
":",
"from",
"OpenGL",
"import",
"extensions",
"return",
"extensions",
".",
"hasGLExtension",
"(",
"_EXTENSION_NAME",
")"
] |
https://github.com/mcfletch/pyopengl/blob/02d11dad9ff18e50db10e975c4756e17bf198464/OpenGL/GL/EXT/paletted_texture.py#L58-L61
|
|
openedx/edx-platform
|
68dd185a0ab45862a2a61e0f803d7e03d2be71b5
|
openedx/core/djangoapps/xblock/utils.py
|
python
|
get_xblock_id_for_anonymous_user
|
(user)
|
Get a unique string that identifies the current anonymous (not logged in)
user. (This is different than the "anonymous user ID", which is an
anonymized identifier for a logged in user.)
Note that this ID is a string, not an int. It is guaranteed to be in a
unique namespace that won't collide with "normal" user IDs, even when
they are converted to a string.
|
Get a unique string that identifies the current anonymous (not logged in)
user. (This is different than the "anonymous user ID", which is an
anonymized identifier for a logged in user.)
|
[
"Get",
"a",
"unique",
"string",
"that",
"identifies",
"the",
"current",
"anonymous",
"(",
"not",
"logged",
"in",
")",
"user",
".",
"(",
"This",
"is",
"different",
"than",
"the",
"anonymous",
"user",
"ID",
"which",
"is",
"an",
"anonymized",
"identifier",
"for",
"a",
"logged",
"in",
"user",
".",
")"
] |
def get_xblock_id_for_anonymous_user(user):
"""
Get a unique string that identifies the current anonymous (not logged in)
user. (This is different than the "anonymous user ID", which is an
anonymized identifier for a logged in user.)
Note that this ID is a string, not an int. It is guaranteed to be in a
unique namespace that won't collide with "normal" user IDs, even when
they are converted to a string.
"""
if not user or not user.is_anonymous:
raise TypeError("get_xblock_id_for_anonymous_user() is only for anonymous (not logged in) users.")
if hasattr(user, 'xblock_id_for_anonymous_user'):
# If code elsewhere (like the xblock_handler API endpoint) has stored
# the key on the AnonymousUser object, just return that - it supersedes
# everything else:
return user.xblock_id_for_anonymous_user
# We use the session to track (and create if needed) a unique ID for this anonymous user:
current_request = crum.get_current_request()
if current_request and current_request.session:
# Make sure we have a key for this user:
if "xblock_id_for_anonymous_user" not in current_request.session:
new_id = f"anon{uuid4().hex[:20]}"
current_request.session["xblock_id_for_anonymous_user"] = new_id
return current_request.session["xblock_id_for_anonymous_user"]
else:
raise RuntimeError("Cannot get a user ID for an anonymous user outside of an HTTP request context.")
|
[
"def",
"get_xblock_id_for_anonymous_user",
"(",
"user",
")",
":",
"if",
"not",
"user",
"or",
"not",
"user",
".",
"is_anonymous",
":",
"raise",
"TypeError",
"(",
"\"get_xblock_id_for_anonymous_user() is only for anonymous (not logged in) users.\"",
")",
"if",
"hasattr",
"(",
"user",
",",
"'xblock_id_for_anonymous_user'",
")",
":",
"# If code elsewhere (like the xblock_handler API endpoint) has stored",
"# the key on the AnonymousUser object, just return that - it supersedes",
"# everything else:",
"return",
"user",
".",
"xblock_id_for_anonymous_user",
"# We use the session to track (and create if needed) a unique ID for this anonymous user:",
"current_request",
"=",
"crum",
".",
"get_current_request",
"(",
")",
"if",
"current_request",
"and",
"current_request",
".",
"session",
":",
"# Make sure we have a key for this user:",
"if",
"\"xblock_id_for_anonymous_user\"",
"not",
"in",
"current_request",
".",
"session",
":",
"new_id",
"=",
"f\"anon{uuid4().hex[:20]}\"",
"current_request",
".",
"session",
"[",
"\"xblock_id_for_anonymous_user\"",
"]",
"=",
"new_id",
"return",
"current_request",
".",
"session",
"[",
"\"xblock_id_for_anonymous_user\"",
"]",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"Cannot get a user ID for an anonymous user outside of an HTTP request context.\"",
")"
] |
https://github.com/openedx/edx-platform/blob/68dd185a0ab45862a2a61e0f803d7e03d2be71b5/openedx/core/djangoapps/xblock/utils.py#L143-L169
|
||
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/lib-python/3/idlelib/Debugger.py
|
python
|
NamespaceViewer.__init__
|
(self, master, title, dict=None)
|
[] |
def __init__(self, master, title, dict=None):
width = 0
height = 40
if dict:
height = 20*len(dict) # XXX 20 == observed height of Entry widget
self.master = master
self.title = title
import reprlib
self.repr = reprlib.Repr()
self.repr.maxstring = 60
self.repr.maxother = 60
self.frame = frame = Frame(master)
self.frame.pack(expand=1, fill="both")
self.label = Label(frame, text=title, borderwidth=2, relief="groove")
self.label.pack(fill="x")
self.vbar = vbar = Scrollbar(frame, name="vbar")
vbar.pack(side="right", fill="y")
self.canvas = canvas = Canvas(frame,
height=min(300, max(40, height)),
scrollregion=(0, 0, width, height))
canvas.pack(side="left", fill="both", expand=1)
vbar["command"] = canvas.yview
canvas["yscrollcommand"] = vbar.set
self.subframe = subframe = Frame(canvas)
self.sfid = canvas.create_window(0, 0, window=subframe, anchor="nw")
self.load_dict(dict)
|
[
"def",
"__init__",
"(",
"self",
",",
"master",
",",
"title",
",",
"dict",
"=",
"None",
")",
":",
"width",
"=",
"0",
"height",
"=",
"40",
"if",
"dict",
":",
"height",
"=",
"20",
"*",
"len",
"(",
"dict",
")",
"# XXX 20 == observed height of Entry widget",
"self",
".",
"master",
"=",
"master",
"self",
".",
"title",
"=",
"title",
"import",
"reprlib",
"self",
".",
"repr",
"=",
"reprlib",
".",
"Repr",
"(",
")",
"self",
".",
"repr",
".",
"maxstring",
"=",
"60",
"self",
".",
"repr",
".",
"maxother",
"=",
"60",
"self",
".",
"frame",
"=",
"frame",
"=",
"Frame",
"(",
"master",
")",
"self",
".",
"frame",
".",
"pack",
"(",
"expand",
"=",
"1",
",",
"fill",
"=",
"\"both\"",
")",
"self",
".",
"label",
"=",
"Label",
"(",
"frame",
",",
"text",
"=",
"title",
",",
"borderwidth",
"=",
"2",
",",
"relief",
"=",
"\"groove\"",
")",
"self",
".",
"label",
".",
"pack",
"(",
"fill",
"=",
"\"x\"",
")",
"self",
".",
"vbar",
"=",
"vbar",
"=",
"Scrollbar",
"(",
"frame",
",",
"name",
"=",
"\"vbar\"",
")",
"vbar",
".",
"pack",
"(",
"side",
"=",
"\"right\"",
",",
"fill",
"=",
"\"y\"",
")",
"self",
".",
"canvas",
"=",
"canvas",
"=",
"Canvas",
"(",
"frame",
",",
"height",
"=",
"min",
"(",
"300",
",",
"max",
"(",
"40",
",",
"height",
")",
")",
",",
"scrollregion",
"=",
"(",
"0",
",",
"0",
",",
"width",
",",
"height",
")",
")",
"canvas",
".",
"pack",
"(",
"side",
"=",
"\"left\"",
",",
"fill",
"=",
"\"both\"",
",",
"expand",
"=",
"1",
")",
"vbar",
"[",
"\"command\"",
"]",
"=",
"canvas",
".",
"yview",
"canvas",
"[",
"\"yscrollcommand\"",
"]",
"=",
"vbar",
".",
"set",
"self",
".",
"subframe",
"=",
"subframe",
"=",
"Frame",
"(",
"canvas",
")",
"self",
".",
"sfid",
"=",
"canvas",
".",
"create_window",
"(",
"0",
",",
"0",
",",
"window",
"=",
"subframe",
",",
"anchor",
"=",
"\"nw\"",
")",
"self",
".",
"load_dict",
"(",
"dict",
")"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/lib-python/3/idlelib/Debugger.py#L408-L433
|
||||
atomistic-machine-learning/schnetpack
|
dacf6076d43509dfd8b6694a846ac8453ae39b5e
|
src/schnetpack/md/simulation_hooks/barostats.py
|
python
|
NHCBarostatIsotropic._update_box_velocities
|
(self, time_step)
|
Update the velocities of the additional degree of freedom associated with the simulation cells.
Args:
time_step (float): Current timestep considering YS and multi-timestep integration.
|
Update the velocities of the additional degree of freedom associated with the simulation cells.
|
[
"Update",
"the",
"velocities",
"of",
"the",
"additional",
"degree",
"of",
"freedom",
"associated",
"with",
"the",
"simulation",
"cells",
"."
] |
def _update_box_velocities(self, time_step):
"""
Update the velocities of the additional degree of freedom associated with the simulation cells.
Args:
time_step (float): Current timestep considering YS and multi-timestep integration.
"""
b_factor = torch.exp(-0.125 * time_step * self.t_velocities_cell[..., 0])
self.b_velocities_cell = (
b_factor ** 2 * self.b_velocities_cell
+ 0.25 * time_step * self.b_forces_cell * b_factor
)
|
[
"def",
"_update_box_velocities",
"(",
"self",
",",
"time_step",
")",
":",
"b_factor",
"=",
"torch",
".",
"exp",
"(",
"-",
"0.125",
"*",
"time_step",
"*",
"self",
".",
"t_velocities_cell",
"[",
"...",
",",
"0",
"]",
")",
"self",
".",
"b_velocities_cell",
"=",
"(",
"b_factor",
"**",
"2",
"*",
"self",
".",
"b_velocities_cell",
"+",
"0.25",
"*",
"time_step",
"*",
"self",
".",
"b_forces_cell",
"*",
"b_factor",
")"
] |
https://github.com/atomistic-machine-learning/schnetpack/blob/dacf6076d43509dfd8b6694a846ac8453ae39b5e/src/schnetpack/md/simulation_hooks/barostats.py#L525-L536
|
||
ahmetcemturan/SFACT
|
7576e29ba72b33e5058049b77b7b558875542747
|
skeinforge_application/skeinforge_plugins/craft_plugins/oozebane.py
|
python
|
OozebaneSkein.getActiveFeedRateRatio
|
(self)
|
return 1.0
|
Get the feed rate of the first active move over the operating feed rate.
|
Get the feed rate of the first active move over the operating feed rate.
|
[
"Get",
"the",
"feed",
"rate",
"of",
"the",
"first",
"active",
"move",
"over",
"the",
"operating",
"feed",
"rate",
"."
] |
def getActiveFeedRateRatio(self):
"Get the feed rate of the first active move over the operating feed rate."
isSearchExtruderActive = self.isExtruderActive
for afterIndex in xrange(self.lineIndex, len(self.lines)):
line = self.lines[ afterIndex ]
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
if firstWord == 'G1':
if isSearchExtruderActive:
return gcodec.getFeedRateMinute( self.feedRateMinute, splitLine ) / self.operatingFeedRateMinute
elif firstWord == 'M101':
isSearchExtruderActive = True
print('active feed rate ratio was not found in oozebane.')
return 1.0
|
[
"def",
"getActiveFeedRateRatio",
"(",
"self",
")",
":",
"isSearchExtruderActive",
"=",
"self",
".",
"isExtruderActive",
"for",
"afterIndex",
"in",
"xrange",
"(",
"self",
".",
"lineIndex",
",",
"len",
"(",
"self",
".",
"lines",
")",
")",
":",
"line",
"=",
"self",
".",
"lines",
"[",
"afterIndex",
"]",
"splitLine",
"=",
"gcodec",
".",
"getSplitLineBeforeBracketSemicolon",
"(",
"line",
")",
"firstWord",
"=",
"gcodec",
".",
"getFirstWord",
"(",
"splitLine",
")",
"if",
"firstWord",
"==",
"'G1'",
":",
"if",
"isSearchExtruderActive",
":",
"return",
"gcodec",
".",
"getFeedRateMinute",
"(",
"self",
".",
"feedRateMinute",
",",
"splitLine",
")",
"/",
"self",
".",
"operatingFeedRateMinute",
"elif",
"firstWord",
"==",
"'M101'",
":",
"isSearchExtruderActive",
"=",
"True",
"print",
"(",
"'active feed rate ratio was not found in oozebane.'",
")",
"return",
"1.0"
] |
https://github.com/ahmetcemturan/SFACT/blob/7576e29ba72b33e5058049b77b7b558875542747/skeinforge_application/skeinforge_plugins/craft_plugins/oozebane.py#L176-L189
|
|
sametmax/Django--an-app-at-a-time
|
99eddf12ead76e6dfbeb09ce0bae61e282e22f8a
|
ignore_this_directory/django/views/generic/dates.py
|
python
|
BaseDateListView.get_date_list
|
(self, queryset, date_type=None, ordering='ASC')
|
return date_list
|
Get a date list by calling `queryset.dates/datetimes()`, checking
along the way for empty lists that aren't allowed.
|
Get a date list by calling `queryset.dates/datetimes()`, checking
along the way for empty lists that aren't allowed.
|
[
"Get",
"a",
"date",
"list",
"by",
"calling",
"queryset",
".",
"dates",
"/",
"datetimes",
"()",
"checking",
"along",
"the",
"way",
"for",
"empty",
"lists",
"that",
"aren",
"t",
"allowed",
"."
] |
def get_date_list(self, queryset, date_type=None, ordering='ASC'):
"""
Get a date list by calling `queryset.dates/datetimes()`, checking
along the way for empty lists that aren't allowed.
"""
date_field = self.get_date_field()
allow_empty = self.get_allow_empty()
if date_type is None:
date_type = self.get_date_list_period()
if self.uses_datetime_field:
date_list = queryset.datetimes(date_field, date_type, ordering)
else:
date_list = queryset.dates(date_field, date_type, ordering)
if date_list is not None and not date_list and not allow_empty:
raise Http404(
_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': queryset.model._meta.verbose_name_plural,
}
)
return date_list
|
[
"def",
"get_date_list",
"(",
"self",
",",
"queryset",
",",
"date_type",
"=",
"None",
",",
"ordering",
"=",
"'ASC'",
")",
":",
"date_field",
"=",
"self",
".",
"get_date_field",
"(",
")",
"allow_empty",
"=",
"self",
".",
"get_allow_empty",
"(",
")",
"if",
"date_type",
"is",
"None",
":",
"date_type",
"=",
"self",
".",
"get_date_list_period",
"(",
")",
"if",
"self",
".",
"uses_datetime_field",
":",
"date_list",
"=",
"queryset",
".",
"datetimes",
"(",
"date_field",
",",
"date_type",
",",
"ordering",
")",
"else",
":",
"date_list",
"=",
"queryset",
".",
"dates",
"(",
"date_field",
",",
"date_type",
",",
"ordering",
")",
"if",
"date_list",
"is",
"not",
"None",
"and",
"not",
"date_list",
"and",
"not",
"allow_empty",
":",
"raise",
"Http404",
"(",
"_",
"(",
"\"No %(verbose_name_plural)s available\"",
")",
"%",
"{",
"'verbose_name_plural'",
":",
"queryset",
".",
"model",
".",
"_meta",
".",
"verbose_name_plural",
",",
"}",
")",
"return",
"date_list"
] |
https://github.com/sametmax/Django--an-app-at-a-time/blob/99eddf12ead76e6dfbeb09ce0bae61e282e22f8a/ignore_this_directory/django/views/generic/dates.py#L351-L372
|
|
VirtueSecurity/aws-extender
|
d123b7e1a845847709ba3a481f11996bddc68a1c
|
BappModules/docutils/parsers/rst/directives/__init__.py
|
python
|
single_char_or_whitespace_or_unicode
|
(argument)
|
return char
|
As with `single_char_or_unicode`, but "tab" and "space" are also supported.
(Directive option conversion function.)
|
As with `single_char_or_unicode`, but "tab" and "space" are also supported.
(Directive option conversion function.)
|
[
"As",
"with",
"single_char_or_unicode",
"but",
"tab",
"and",
"space",
"are",
"also",
"supported",
".",
"(",
"Directive",
"option",
"conversion",
"function",
".",
")"
] |
def single_char_or_whitespace_or_unicode(argument):
"""
As with `single_char_or_unicode`, but "tab" and "space" are also supported.
(Directive option conversion function.)
"""
if argument == 'tab':
char = '\t'
elif argument == 'space':
char = ' '
else:
char = single_char_or_unicode(argument)
return char
|
[
"def",
"single_char_or_whitespace_or_unicode",
"(",
"argument",
")",
":",
"if",
"argument",
"==",
"'tab'",
":",
"char",
"=",
"'\\t'",
"elif",
"argument",
"==",
"'space'",
":",
"char",
"=",
"' '",
"else",
":",
"char",
"=",
"single_char_or_unicode",
"(",
"argument",
")",
"return",
"char"
] |
https://github.com/VirtueSecurity/aws-extender/blob/d123b7e1a845847709ba3a481f11996bddc68a1c/BappModules/docutils/parsers/rst/directives/__init__.py#L328-L339
|
|
mlcommons/inference
|
078e21f2bc0a37c7fd0e435d64f5a49760dca823
|
vision/medical_imaging/3d-unet-kits19/preprocess.py
|
python
|
Preprocessor.preprocess_dataset
|
(self)
|
Performs preprocess of all the cases collected and then prints summary stats of them
|
Performs preprocess of all the cases collected and then prints summary stats of them
|
[
"Performs",
"preprocess",
"of",
"all",
"the",
"cases",
"collected",
"and",
"then",
"prints",
"summary",
"stats",
"of",
"them"
] |
def preprocess_dataset(self):
"""
Performs preprocess of all the cases collected and then prints summary stats of them
"""
for case in self.collect_cases():
self.preprocess_case(case)
self.print_stats()
|
[
"def",
"preprocess_dataset",
"(",
"self",
")",
":",
"for",
"case",
"in",
"self",
".",
"collect_cases",
"(",
")",
":",
"self",
".",
"preprocess_case",
"(",
"case",
")",
"self",
".",
"print_stats",
"(",
")"
] |
https://github.com/mlcommons/inference/blob/078e21f2bc0a37c7fd0e435d64f5a49760dca823/vision/medical_imaging/3d-unet-kits19/preprocess.py#L259-L265
|
||
saltstack/salt
|
fae5bc757ad0f1716483ce7ae180b451545c2058
|
salt/utils/win_chcp.py
|
python
|
chcp
|
(page_id, raise_error=False)
|
Gets or sets the codepage of the shell.
Args:
page_id (str, int):
A number representing the codepage.
raise_error (bool):
``True`` will raise an error if the codepage fails to change.
``False`` will suppress the error
Returns:
int: A number representing the codepage
Raises:
CodePageError: On unsuccessful codepage change
|
Gets or sets the codepage of the shell.
|
[
"Gets",
"or",
"sets",
"the",
"codepage",
"of",
"the",
"shell",
"."
] |
def chcp(page_id, raise_error=False):
"""
Gets or sets the codepage of the shell.
Args:
page_id (str, int):
A number representing the codepage.
raise_error (bool):
``True`` will raise an error if the codepage fails to change.
``False`` will suppress the error
Returns:
int: A number representing the codepage
Raises:
CodePageError: On unsuccessful codepage change
"""
if not isinstance(page_id, int):
try:
page_id = int(page_id)
except ValueError:
error = "The `page_id` needs to be an integer, not {}".format(type(page_id))
if raise_error:
raise CodePageError(error)
log.error(error)
return -1
previous_page_id = get_codepage_id(raise_error=raise_error)
if page_id and previous_page_id and page_id != previous_page_id:
set_code_page = True
else:
set_code_page = False
try:
if set_code_page:
set_codepage_id(page_id, raise_error=raise_error)
# Subprocesses started from now will use the set code page id
yield
finally:
if set_code_page:
# Reset to the old code page
set_codepage_id(previous_page_id, raise_error=raise_error)
|
[
"def",
"chcp",
"(",
"page_id",
",",
"raise_error",
"=",
"False",
")",
":",
"if",
"not",
"isinstance",
"(",
"page_id",
",",
"int",
")",
":",
"try",
":",
"page_id",
"=",
"int",
"(",
"page_id",
")",
"except",
"ValueError",
":",
"error",
"=",
"\"The `page_id` needs to be an integer, not {}\"",
".",
"format",
"(",
"type",
"(",
"page_id",
")",
")",
"if",
"raise_error",
":",
"raise",
"CodePageError",
"(",
"error",
")",
"log",
".",
"error",
"(",
"error",
")",
"return",
"-",
"1",
"previous_page_id",
"=",
"get_codepage_id",
"(",
"raise_error",
"=",
"raise_error",
")",
"if",
"page_id",
"and",
"previous_page_id",
"and",
"page_id",
"!=",
"previous_page_id",
":",
"set_code_page",
"=",
"True",
"else",
":",
"set_code_page",
"=",
"False",
"try",
":",
"if",
"set_code_page",
":",
"set_codepage_id",
"(",
"page_id",
",",
"raise_error",
"=",
"raise_error",
")",
"# Subprocesses started from now will use the set code page id",
"yield",
"finally",
":",
"if",
"set_code_page",
":",
"# Reset to the old code page",
"set_codepage_id",
"(",
"previous_page_id",
",",
"raise_error",
"=",
"raise_error",
")"
] |
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/utils/win_chcp.py#L33-L78
|
||
dimagi/commcare-hq
|
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
|
corehq/apps/app_manager/views/releases.py
|
python
|
short_odk_url
|
(request, domain, app_id, with_media=False)
|
return HttpResponse(short_url)
|
[] |
def short_odk_url(request, domain, app_id, with_media=False):
build_profile_id = request.GET.get('profile')
short_url = get_app(domain, app_id).get_short_odk_url(with_media=with_media, build_profile_id=build_profile_id)
return HttpResponse(short_url)
|
[
"def",
"short_odk_url",
"(",
"request",
",",
"domain",
",",
"app_id",
",",
"with_media",
"=",
"False",
")",
":",
"build_profile_id",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'profile'",
")",
"short_url",
"=",
"get_app",
"(",
"domain",
",",
"app_id",
")",
".",
"get_short_odk_url",
"(",
"with_media",
"=",
"with_media",
",",
"build_profile_id",
"=",
"build_profile_id",
")",
"return",
"HttpResponse",
"(",
"short_url",
")"
] |
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/app_manager/views/releases.py#L459-L462
|
|||
lovelylain/pyctp
|
fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d
|
example/ctp/lts/ApiStruct.py
|
python
|
QrySuperUserFunction.__init__
|
(self, UserID='')
|
[] |
def __init__(self, UserID=''):
self.UserID = ''
|
[
"def",
"__init__",
"(",
"self",
",",
"UserID",
"=",
"''",
")",
":",
"self",
".",
"UserID",
"=",
"''"
] |
https://github.com/lovelylain/pyctp/blob/fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d/example/ctp/lts/ApiStruct.py#L1233-L1234
|
||||
david8862/keras-YOLOv3-model-set
|
e9f0f94109430973525219e66eeafe8a2f51363d
|
yolo3/models/yolo3_mobilenet.py
|
python
|
yolo3lite_spp_mobilenet_body
|
(inputs, num_anchors, num_classes, alpha=1.0)
|
return Model(inputs = inputs, outputs=[y1,y2,y3])
|
Create YOLO_v3 Lite SPP MobileNet model CNN body in keras.
|
Create YOLO_v3 Lite SPP MobileNet model CNN body in keras.
|
[
"Create",
"YOLO_v3",
"Lite",
"SPP",
"MobileNet",
"model",
"CNN",
"body",
"in",
"keras",
"."
] |
def yolo3lite_spp_mobilenet_body(inputs, num_anchors, num_classes, alpha=1.0):
'''Create YOLO_v3 Lite SPP MobileNet model CNN body in keras.'''
mobilenet = MobileNet(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
print('backbone layers number: {}'.format(len(mobilenet.layers)))
# input: 416 x 416 x 3
# conv_pw_13_relu :13 x 13 x (1024*alpha)
# conv_pw_11_relu :26 x 26 x (512*alpha)
# conv_pw_5_relu : 52 x 52 x (256*alpha)
# f1: 13 x 13 x (1024*alpha)
f1 = mobilenet.get_layer('conv_pw_13_relu').output
# f2: 26 x 26 x (512*alpha)
f2 = mobilenet.get_layer('conv_pw_11_relu').output
# f3: 52 x 52 x (256*alpha)
f3 = mobilenet.get_layer('conv_pw_5_relu').output
f1_channel_num = int(1024*alpha)
f2_channel_num = int(512*alpha)
f3_channel_num = int(256*alpha)
y1, y2, y3 = yolo3lite_predictions((f1, f2, f3), (f1_channel_num, f2_channel_num, f3_channel_num), num_anchors, num_classes, use_spp=True)
return Model(inputs = inputs, outputs=[y1,y2,y3])
|
[
"def",
"yolo3lite_spp_mobilenet_body",
"(",
"inputs",
",",
"num_anchors",
",",
"num_classes",
",",
"alpha",
"=",
"1.0",
")",
":",
"mobilenet",
"=",
"MobileNet",
"(",
"input_tensor",
"=",
"inputs",
",",
"weights",
"=",
"'imagenet'",
",",
"include_top",
"=",
"False",
",",
"alpha",
"=",
"alpha",
")",
"print",
"(",
"'backbone layers number: {}'",
".",
"format",
"(",
"len",
"(",
"mobilenet",
".",
"layers",
")",
")",
")",
"# input: 416 x 416 x 3",
"# conv_pw_13_relu :13 x 13 x (1024*alpha)",
"# conv_pw_11_relu :26 x 26 x (512*alpha)",
"# conv_pw_5_relu : 52 x 52 x (256*alpha)",
"# f1: 13 x 13 x (1024*alpha)",
"f1",
"=",
"mobilenet",
".",
"get_layer",
"(",
"'conv_pw_13_relu'",
")",
".",
"output",
"# f2: 26 x 26 x (512*alpha)",
"f2",
"=",
"mobilenet",
".",
"get_layer",
"(",
"'conv_pw_11_relu'",
")",
".",
"output",
"# f3: 52 x 52 x (256*alpha)",
"f3",
"=",
"mobilenet",
".",
"get_layer",
"(",
"'conv_pw_5_relu'",
")",
".",
"output",
"f1_channel_num",
"=",
"int",
"(",
"1024",
"*",
"alpha",
")",
"f2_channel_num",
"=",
"int",
"(",
"512",
"*",
"alpha",
")",
"f3_channel_num",
"=",
"int",
"(",
"256",
"*",
"alpha",
")",
"y1",
",",
"y2",
",",
"y3",
"=",
"yolo3lite_predictions",
"(",
"(",
"f1",
",",
"f2",
",",
"f3",
")",
",",
"(",
"f1_channel_num",
",",
"f2_channel_num",
",",
"f3_channel_num",
")",
",",
"num_anchors",
",",
"num_classes",
",",
"use_spp",
"=",
"True",
")",
"return",
"Model",
"(",
"inputs",
"=",
"inputs",
",",
"outputs",
"=",
"[",
"y1",
",",
"y2",
",",
"y3",
"]",
")"
] |
https://github.com/david8862/keras-YOLOv3-model-set/blob/e9f0f94109430973525219e66eeafe8a2f51363d/yolo3/models/yolo3_mobilenet.py#L66-L89
|
|
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/IPython/utils/version.py
|
python
|
check_version
|
(v, check)
|
check version string v >= check
If dev/prerelease tags result in TypeError for string-number comparison,
it is assumed that the dependency is satisfied.
Users on dev branches are responsible for keeping their own packages up to date.
|
check version string v >= check
|
[
"check",
"version",
"string",
"v",
">",
"=",
"check"
] |
def check_version(v, check):
"""check version string v >= check
If dev/prerelease tags result in TypeError for string-number comparison,
it is assumed that the dependency is satisfied.
Users on dev branches are responsible for keeping their own packages up to date.
"""
try:
return LooseVersion(v) >= LooseVersion(check)
except TypeError:
return True
|
[
"def",
"check_version",
"(",
"v",
",",
"check",
")",
":",
"try",
":",
"return",
"LooseVersion",
"(",
"v",
")",
">=",
"LooseVersion",
"(",
"check",
")",
"except",
"TypeError",
":",
"return",
"True"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/IPython/utils/version.py#L25-L35
|
||
oracle/graalpython
|
577e02da9755d916056184ec441c26e00b70145c
|
graalpython/lib-python/3/_pydecimal.py
|
python
|
Decimal.is_qnan
|
(self)
|
return self._exp == 'n'
|
Return True if self is a quiet NaN; otherwise return False.
|
Return True if self is a quiet NaN; otherwise return False.
|
[
"Return",
"True",
"if",
"self",
"is",
"a",
"quiet",
"NaN",
";",
"otherwise",
"return",
"False",
"."
] |
def is_qnan(self):
"""Return True if self is a quiet NaN; otherwise return False."""
return self._exp == 'n'
|
[
"def",
"is_qnan",
"(",
"self",
")",
":",
"return",
"self",
".",
"_exp",
"==",
"'n'"
] |
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/_pydecimal.py#L3143-L3145
|
|
edisonlz/fastor
|
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
|
base/site-packages/tornado/httpserver.py
|
python
|
HTTPConnection.write
|
(self, chunk, callback=None)
|
Writes a chunk of output to the stream.
|
Writes a chunk of output to the stream.
|
[
"Writes",
"a",
"chunk",
"of",
"output",
"to",
"the",
"stream",
"."
] |
def write(self, chunk, callback=None):
"""Writes a chunk of output to the stream."""
assert self._request, "Request closed"
if not self.stream.closed():
self._write_callback = stack_context.wrap(callback)
self.stream.write(chunk, self._on_write_complete)
|
[
"def",
"write",
"(",
"self",
",",
"chunk",
",",
"callback",
"=",
"None",
")",
":",
"assert",
"self",
".",
"_request",
",",
"\"Request closed\"",
"if",
"not",
"self",
".",
"stream",
".",
"closed",
"(",
")",
":",
"self",
".",
"_write_callback",
"=",
"stack_context",
".",
"wrap",
"(",
"callback",
")",
"self",
".",
"stream",
".",
"write",
"(",
"chunk",
",",
"self",
".",
"_on_write_complete",
")"
] |
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/tornado/httpserver.py#L181-L186
|
||
pventuzelo/octopus
|
e8b8c5a9d5f6d9c63605afe9ef1528ab481ec983
|
octopus/engine/ssa.py
|
python
|
SSAEngine.emulate_one_instruction
|
(self, instr, state, depth)
|
TODO
|
TODO
|
[
"TODO"
] |
def emulate_one_instruction(self, instr, state, depth):
""" TODO """
raise NotImplementedError
|
[
"def",
"emulate_one_instruction",
"(",
"self",
",",
"instr",
",",
"state",
",",
"depth",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/pventuzelo/octopus/blob/e8b8c5a9d5f6d9c63605afe9ef1528ab481ec983/octopus/engine/ssa.py#L16-L18
|
||
DataDog/datadog-serverless-functions
|
2b28937da7cb8e2fdfa007db45e9fb0c7bd472e9
|
aws/logs_monitoring/enhanced_lambda_metrics.py
|
python
|
LambdaTagsCache.get
|
(self, resource_arn)
|
return function_tags
|
Get the tags for the Lambda function from the cache
Will refetch the tags if they are out of date, or a lambda arn is encountered
which isn't in the tag list
Note: the ARNs in the cache have been lowercased, so resource_arn must be lowercased
Args:
resource_arn (str): the arn we're getting tags from the cache for
Returns:
lambda_tags (str[]): the list of "key:value" Datadog tag strings
|
Get the tags for the Lambda function from the cache
|
[
"Get",
"the",
"tags",
"for",
"the",
"Lambda",
"function",
"from",
"the",
"cache"
] |
def get(self, resource_arn):
"""Get the tags for the Lambda function from the cache
Will refetch the tags if they are out of date, or a lambda arn is encountered
which isn't in the tag list
Note: the ARNs in the cache have been lowercased, so resource_arn must be lowercased
Args:
resource_arn (str): the arn we're getting tags from the cache for
Returns:
lambda_tags (str[]): the list of "key:value" Datadog tag strings
"""
if self._is_expired():
send_forwarder_internal_metrics("local_cache_expired")
logger.debug("Local cache expired, fetching cache from S3")
self._refresh()
function_tags = self.tags_by_arn.get(resource_arn, [])
return function_tags
|
[
"def",
"get",
"(",
"self",
",",
"resource_arn",
")",
":",
"if",
"self",
".",
"_is_expired",
"(",
")",
":",
"send_forwarder_internal_metrics",
"(",
"\"local_cache_expired\"",
")",
"logger",
".",
"debug",
"(",
"\"Local cache expired, fetching cache from S3\"",
")",
"self",
".",
"_refresh",
"(",
")",
"function_tags",
"=",
"self",
".",
"tags_by_arn",
".",
"get",
"(",
"resource_arn",
",",
"[",
"]",
")",
"return",
"function_tags"
] |
https://github.com/DataDog/datadog-serverless-functions/blob/2b28937da7cb8e2fdfa007db45e9fb0c7bd472e9/aws/logs_monitoring/enhanced_lambda_metrics.py#L167-L187
|
|
zhl2008/awd-platform
|
0416b31abea29743387b10b3914581fbe8e7da5e
|
web_flaskbb/lib/python2.7/site-packages/setuptools/_vendor/six.py
|
python
|
_SixMetaPathImporter.is_package
|
(self, fullname)
|
return hasattr(self.__get_module(fullname), "__path__")
|
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
|
Return true, if the named module is a package.
|
[
"Return",
"true",
"if",
"the",
"named",
"module",
"is",
"a",
"package",
"."
] |
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
|
[
"def",
"is_package",
"(",
"self",
",",
"fullname",
")",
":",
"return",
"hasattr",
"(",
"self",
".",
"__get_module",
"(",
"fullname",
")",
",",
"\"__path__\"",
")"
] |
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/setuptools/_vendor/six.py#L209-L216
|
|
tensorflow/models
|
6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3
|
research/object_detection/utils/vrd_evaluation.py
|
python
|
VRDDetectionEvaluator.add_single_detected_image_info
|
(self, image_id, detections_dict)
|
Adds detections for a single image to be used for evaluation.
Args:
image_id: A unique string/integer identifier for the image.
detections_dict: A dictionary containing -
standard_fields.DetectionResultFields.detection_boxes: A numpy array of
structures with shape [N, 1], representing N tuples, each tuple
containing the same number of named bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max] (as an example
see datatype vrd_box_data_type, single_box_data_type above).
standard_fields.DetectionResultFields.detection_scores: float32 numpy
array of shape [N] containing detection scores for the boxes.
standard_fields.DetectionResultFields.detection_classes: A numpy array
of structures shape [N, 1], representing the class labels of the
corresponding bounding boxes and possibly additional classes (see
datatype label_data_type above).
|
Adds detections for a single image to be used for evaluation.
|
[
"Adds",
"detections",
"for",
"a",
"single",
"image",
"to",
"be",
"used",
"for",
"evaluation",
"."
] |
def add_single_detected_image_info(self, image_id, detections_dict):
"""Adds detections for a single image to be used for evaluation.
Args:
image_id: A unique string/integer identifier for the image.
detections_dict: A dictionary containing -
standard_fields.DetectionResultFields.detection_boxes: A numpy array of
structures with shape [N, 1], representing N tuples, each tuple
containing the same number of named bounding boxes.
Each box is of the format [y_min, x_min, y_max, x_max] (as an example
see datatype vrd_box_data_type, single_box_data_type above).
standard_fields.DetectionResultFields.detection_scores: float32 numpy
array of shape [N] containing detection scores for the boxes.
standard_fields.DetectionResultFields.detection_classes: A numpy array
of structures shape [N, 1], representing the class labels of the
corresponding bounding boxes and possibly additional classes (see
datatype label_data_type above).
"""
if image_id not in self._image_ids:
logging.warning('No groundtruth for the image with id %s.', image_id)
# Since for the correct work of evaluator it is assumed that groundtruth
# is inserted first we make sure to break the code if is it not the case.
self._image_ids.update([image_id])
self._negative_labels[image_id] = np.array([])
self._evaluatable_labels[image_id] = np.array([])
num_detections = detections_dict[
standard_fields.DetectionResultFields.detection_boxes].shape[0]
detection_class_tuples = detections_dict[
standard_fields.DetectionResultFields.detection_classes]
detection_box_tuples = detections_dict[
standard_fields.DetectionResultFields.detection_boxes]
negative_selector = np.zeros(num_detections, dtype=bool)
selector = np.ones(num_detections, dtype=bool)
# Only check boxable labels
for field in detection_box_tuples.dtype.fields:
# Verify if one of the labels is negative (this is sure FP)
negative_selector |= np.isin(detection_class_tuples[field],
self._negative_labels[image_id])
# Verify if all labels are verified
selector &= np.isin(detection_class_tuples[field],
self._evaluatable_labels[image_id])
selector |= negative_selector
self._evaluation.add_single_detected_image_info(
image_key=image_id,
detected_box_tuples=self._process_detection_boxes(
detection_box_tuples[selector]),
detected_scores=detections_dict[
standard_fields.DetectionResultFields.detection_scores][selector],
detected_class_tuples=detection_class_tuples[selector])
|
[
"def",
"add_single_detected_image_info",
"(",
"self",
",",
"image_id",
",",
"detections_dict",
")",
":",
"if",
"image_id",
"not",
"in",
"self",
".",
"_image_ids",
":",
"logging",
".",
"warning",
"(",
"'No groundtruth for the image with id %s.'",
",",
"image_id",
")",
"# Since for the correct work of evaluator it is assumed that groundtruth",
"# is inserted first we make sure to break the code if is it not the case.",
"self",
".",
"_image_ids",
".",
"update",
"(",
"[",
"image_id",
"]",
")",
"self",
".",
"_negative_labels",
"[",
"image_id",
"]",
"=",
"np",
".",
"array",
"(",
"[",
"]",
")",
"self",
".",
"_evaluatable_labels",
"[",
"image_id",
"]",
"=",
"np",
".",
"array",
"(",
"[",
"]",
")",
"num_detections",
"=",
"detections_dict",
"[",
"standard_fields",
".",
"DetectionResultFields",
".",
"detection_boxes",
"]",
".",
"shape",
"[",
"0",
"]",
"detection_class_tuples",
"=",
"detections_dict",
"[",
"standard_fields",
".",
"DetectionResultFields",
".",
"detection_classes",
"]",
"detection_box_tuples",
"=",
"detections_dict",
"[",
"standard_fields",
".",
"DetectionResultFields",
".",
"detection_boxes",
"]",
"negative_selector",
"=",
"np",
".",
"zeros",
"(",
"num_detections",
",",
"dtype",
"=",
"bool",
")",
"selector",
"=",
"np",
".",
"ones",
"(",
"num_detections",
",",
"dtype",
"=",
"bool",
")",
"# Only check boxable labels",
"for",
"field",
"in",
"detection_box_tuples",
".",
"dtype",
".",
"fields",
":",
"# Verify if one of the labels is negative (this is sure FP)",
"negative_selector",
"|=",
"np",
".",
"isin",
"(",
"detection_class_tuples",
"[",
"field",
"]",
",",
"self",
".",
"_negative_labels",
"[",
"image_id",
"]",
")",
"# Verify if all labels are verified",
"selector",
"&=",
"np",
".",
"isin",
"(",
"detection_class_tuples",
"[",
"field",
"]",
",",
"self",
".",
"_evaluatable_labels",
"[",
"image_id",
"]",
")",
"selector",
"|=",
"negative_selector",
"self",
".",
"_evaluation",
".",
"add_single_detected_image_info",
"(",
"image_key",
"=",
"image_id",
",",
"detected_box_tuples",
"=",
"self",
".",
"_process_detection_boxes",
"(",
"detection_box_tuples",
"[",
"selector",
"]",
")",
",",
"detected_scores",
"=",
"detections_dict",
"[",
"standard_fields",
".",
"DetectionResultFields",
".",
"detection_scores",
"]",
"[",
"selector",
"]",
",",
"detected_class_tuples",
"=",
"detection_class_tuples",
"[",
"selector",
"]",
")"
] |
https://github.com/tensorflow/models/blob/6b8bb0cbeb3e10415c7a87448f08adc3c484c1d3/research/object_detection/utils/vrd_evaluation.py#L169-L218
|
||
Kozea/WeasyPrint
|
6cce2978165134e37683cb5b3d156cac6a11a7f9
|
weasyprint/svg/bounding_box.py
|
python
|
bounding_box_circle
|
(svg, node, font_size)
|
return cx - r, cy - r, 2 * r, 2 * r
|
Bounding box for circle node.
|
Bounding box for circle node.
|
[
"Bounding",
"box",
"for",
"circle",
"node",
"."
] |
def bounding_box_circle(svg, node, font_size):
"""Bounding box for circle node."""
cx, cy = svg.point(node.get('cx'), node.get('cy'), font_size)
r = svg.length(node.get('r'), font_size)
return cx - r, cy - r, 2 * r, 2 * r
|
[
"def",
"bounding_box_circle",
"(",
"svg",
",",
"node",
",",
"font_size",
")",
":",
"cx",
",",
"cy",
"=",
"svg",
".",
"point",
"(",
"node",
".",
"get",
"(",
"'cx'",
")",
",",
"node",
".",
"get",
"(",
"'cy'",
")",
",",
"font_size",
")",
"r",
"=",
"svg",
".",
"length",
"(",
"node",
".",
"get",
"(",
"'r'",
")",
",",
"font_size",
")",
"return",
"cx",
"-",
"r",
",",
"cy",
"-",
"r",
",",
"2",
"*",
"r",
",",
"2",
"*",
"r"
] |
https://github.com/Kozea/WeasyPrint/blob/6cce2978165134e37683cb5b3d156cac6a11a7f9/weasyprint/svg/bounding_box.py#L40-L44
|
|
sergiocorreia/panflute
|
b9546cf7b88fdc9f00117fca395c4d3590f45769
|
examples/pandocfilters/gabc.py
|
python
|
png
|
(contents, latex_command)
|
return src
|
Creates a png if needed.
|
Creates a png if needed.
|
[
"Creates",
"a",
"png",
"if",
"needed",
"."
] |
def png(contents, latex_command):
"""Creates a png if needed."""
outfile = sha(contents + latex_command)
src = os.path.join(IMAGEDIR, outfile + '.png')
if not os.path.isfile(src):
try:
os.mkdir(IMAGEDIR)
stderr.write('Created directory ' + IMAGEDIR + '\n')
except OSError:
pass
latex2png(latex_command + "{" + contents + "}", outfile)
stderr.write('Created image ' + src + '\n')
return src
|
[
"def",
"png",
"(",
"contents",
",",
"latex_command",
")",
":",
"outfile",
"=",
"sha",
"(",
"contents",
"+",
"latex_command",
")",
"src",
"=",
"os",
".",
"path",
".",
"join",
"(",
"IMAGEDIR",
",",
"outfile",
"+",
"'.png'",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"src",
")",
":",
"try",
":",
"os",
".",
"mkdir",
"(",
"IMAGEDIR",
")",
"stderr",
".",
"write",
"(",
"'Created directory '",
"+",
"IMAGEDIR",
"+",
"'\\n'",
")",
"except",
"OSError",
":",
"pass",
"latex2png",
"(",
"latex_command",
"+",
"\"{\"",
"+",
"contents",
"+",
"\"}\"",
",",
"outfile",
")",
"stderr",
".",
"write",
"(",
"'Created image '",
"+",
"src",
"+",
"'\\n'",
")",
"return",
"src"
] |
https://github.com/sergiocorreia/panflute/blob/b9546cf7b88fdc9f00117fca395c4d3590f45769/examples/pandocfilters/gabc.py#L105-L117
|
|
CedricGuillemet/Imogen
|
ee417b42747ed5b46cb11b02ef0c3630000085b3
|
bin/Lib/codecs.py
|
python
|
iterdecode
|
(iterator, encoding, errors='strict', **kwargs)
|
Decoding iterator.
Decodes the input strings from the iterator using an IncrementalDecoder.
errors and kwargs are passed through to the IncrementalDecoder
constructor.
|
Decoding iterator.
|
[
"Decoding",
"iterator",
"."
] |
def iterdecode(iterator, encoding, errors='strict', **kwargs):
"""
Decoding iterator.
Decodes the input strings from the iterator using an IncrementalDecoder.
errors and kwargs are passed through to the IncrementalDecoder
constructor.
"""
decoder = getincrementaldecoder(encoding)(errors, **kwargs)
for input in iterator:
output = decoder.decode(input)
if output:
yield output
output = decoder.decode(b"", True)
if output:
yield output
|
[
"def",
"iterdecode",
"(",
"iterator",
",",
"encoding",
",",
"errors",
"=",
"'strict'",
",",
"*",
"*",
"kwargs",
")",
":",
"decoder",
"=",
"getincrementaldecoder",
"(",
"encoding",
")",
"(",
"errors",
",",
"*",
"*",
"kwargs",
")",
"for",
"input",
"in",
"iterator",
":",
"output",
"=",
"decoder",
".",
"decode",
"(",
"input",
")",
"if",
"output",
":",
"yield",
"output",
"output",
"=",
"decoder",
".",
"decode",
"(",
"b\"\"",
",",
"True",
")",
"if",
"output",
":",
"yield",
"output"
] |
https://github.com/CedricGuillemet/Imogen/blob/ee417b42747ed5b46cb11b02ef0c3630000085b3/bin/Lib/codecs.py#L1031-L1047
|
||
OpenCobolIDE/OpenCobolIDE
|
c78d0d335378e5fe0a5e74f53c19b68b55e85388
|
open_cobol_ide/extlibs/pyqode/core/managers/file.py
|
python
|
FileManager._get_text
|
(self, encoding)
|
return text.encode(encoding)
|
[] |
def _get_text(self, encoding):
lines = self.editor.toPlainText().splitlines()
if self.clean_trailing_whitespaces:
lines = [l.rstrip() for l in lines]
# remove emtpy ending lines
try:
last_line = lines[-1]
except IndexError:
pass # empty file
else:
while last_line == '':
try:
lines.pop()
last_line = lines[-1]
except IndexError:
last_line = None
text = self._eol.join(lines) + self._eol
return text.encode(encoding)
|
[
"def",
"_get_text",
"(",
"self",
",",
"encoding",
")",
":",
"lines",
"=",
"self",
".",
"editor",
".",
"toPlainText",
"(",
")",
".",
"splitlines",
"(",
")",
"if",
"self",
".",
"clean_trailing_whitespaces",
":",
"lines",
"=",
"[",
"l",
".",
"rstrip",
"(",
")",
"for",
"l",
"in",
"lines",
"]",
"# remove emtpy ending lines",
"try",
":",
"last_line",
"=",
"lines",
"[",
"-",
"1",
"]",
"except",
"IndexError",
":",
"pass",
"# empty file",
"else",
":",
"while",
"last_line",
"==",
"''",
":",
"try",
":",
"lines",
".",
"pop",
"(",
")",
"last_line",
"=",
"lines",
"[",
"-",
"1",
"]",
"except",
"IndexError",
":",
"last_line",
"=",
"None",
"text",
"=",
"self",
".",
"_eol",
".",
"join",
"(",
"lines",
")",
"+",
"self",
".",
"_eol",
"return",
"text",
".",
"encode",
"(",
"encoding",
")"
] |
https://github.com/OpenCobolIDE/OpenCobolIDE/blob/c78d0d335378e5fe0a5e74f53c19b68b55e85388/open_cobol_ide/extlibs/pyqode/core/managers/file.py#L323-L340
|
|||
Miserlou/Zappa
|
5a11c17f5ecf0568bdb73b4baf6fb08ff0184f39
|
zappa/cli.py
|
python
|
ZappaCLI.schedule
|
(self)
|
Given a a list of functions and a schedule to execute them,
setup up regular execution.
|
Given a a list of functions and a schedule to execute them,
setup up regular execution.
|
[
"Given",
"a",
"a",
"list",
"of",
"functions",
"and",
"a",
"schedule",
"to",
"execute",
"them",
"setup",
"up",
"regular",
"execution",
"."
] |
def schedule(self):
"""
Given a a list of functions and a schedule to execute them,
setup up regular execution.
"""
events = self.stage_config.get('events', [])
if events:
if not isinstance(events, list): # pragma: no cover
print("Events must be supplied as a list.")
return
for event in events:
self.collision_warning(event.get('function'))
if self.stage_config.get('keep_warm', True):
if not events:
events = []
keep_warm_rate = self.stage_config.get('keep_warm_expression', "rate(4 minutes)")
events.append({'name': 'zappa-keep-warm',
'function': 'handler.keep_warm_callback',
'expression': keep_warm_rate,
'description': 'Zappa Keep Warm - {}'.format(self.lambda_name)})
if events:
try:
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
except botocore.exceptions.ClientError as e: # pragma: no cover
click.echo(click.style("Function does not exist", fg="yellow") + ", please " +
click.style("deploy", bold=True) + "first. Ex:" +
click.style("zappa deploy {}.".format(self.api_stage), bold=True))
sys.exit(-1)
print("Scheduling..")
self.zappa.schedule_events(
lambda_arn=function_response['Configuration']['FunctionArn'],
lambda_name=self.lambda_name,
events=events
)
# Add async tasks SNS
if self.stage_config.get('async_source', None) == 'sns' \
and self.stage_config.get('async_resources', True):
self.lambda_arn = self.zappa.get_lambda_function(
function_name=self.lambda_name)
topic_arn = self.zappa.create_async_sns_topic(
lambda_name=self.lambda_name,
lambda_arn=self.lambda_arn
)
click.echo('SNS Topic created: %s' % topic_arn)
# Add async tasks DynamoDB
table_name = self.stage_config.get('async_response_table', False)
read_capacity = self.stage_config.get('async_response_table_read_capacity', 1)
write_capacity = self.stage_config.get('async_response_table_write_capacity', 1)
if table_name and self.stage_config.get('async_resources', True):
created, response_table = self.zappa.create_async_dynamodb_table(
table_name, read_capacity, write_capacity)
if created:
click.echo('DynamoDB table created: %s' % table_name)
else:
click.echo('DynamoDB table exists: %s' % table_name)
provisioned_throughput = response_table['Table']['ProvisionedThroughput']
if provisioned_throughput['ReadCapacityUnits'] != read_capacity or \
provisioned_throughput['WriteCapacityUnits'] != write_capacity:
click.echo(click.style(
"\nWarning! Existing DynamoDB table ({}) does not match configured capacity.\n".format(table_name),
fg='red'
))
|
[
"def",
"schedule",
"(",
"self",
")",
":",
"events",
"=",
"self",
".",
"stage_config",
".",
"get",
"(",
"'events'",
",",
"[",
"]",
")",
"if",
"events",
":",
"if",
"not",
"isinstance",
"(",
"events",
",",
"list",
")",
":",
"# pragma: no cover",
"print",
"(",
"\"Events must be supplied as a list.\"",
")",
"return",
"for",
"event",
"in",
"events",
":",
"self",
".",
"collision_warning",
"(",
"event",
".",
"get",
"(",
"'function'",
")",
")",
"if",
"self",
".",
"stage_config",
".",
"get",
"(",
"'keep_warm'",
",",
"True",
")",
":",
"if",
"not",
"events",
":",
"events",
"=",
"[",
"]",
"keep_warm_rate",
"=",
"self",
".",
"stage_config",
".",
"get",
"(",
"'keep_warm_expression'",
",",
"\"rate(4 minutes)\"",
")",
"events",
".",
"append",
"(",
"{",
"'name'",
":",
"'zappa-keep-warm'",
",",
"'function'",
":",
"'handler.keep_warm_callback'",
",",
"'expression'",
":",
"keep_warm_rate",
",",
"'description'",
":",
"'Zappa Keep Warm - {}'",
".",
"format",
"(",
"self",
".",
"lambda_name",
")",
"}",
")",
"if",
"events",
":",
"try",
":",
"function_response",
"=",
"self",
".",
"zappa",
".",
"lambda_client",
".",
"get_function",
"(",
"FunctionName",
"=",
"self",
".",
"lambda_name",
")",
"except",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"e",
":",
"# pragma: no cover",
"click",
".",
"echo",
"(",
"click",
".",
"style",
"(",
"\"Function does not exist\"",
",",
"fg",
"=",
"\"yellow\"",
")",
"+",
"\", please \"",
"+",
"click",
".",
"style",
"(",
"\"deploy\"",
",",
"bold",
"=",
"True",
")",
"+",
"\"first. Ex:\"",
"+",
"click",
".",
"style",
"(",
"\"zappa deploy {}.\"",
".",
"format",
"(",
"self",
".",
"api_stage",
")",
",",
"bold",
"=",
"True",
")",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"print",
"(",
"\"Scheduling..\"",
")",
"self",
".",
"zappa",
".",
"schedule_events",
"(",
"lambda_arn",
"=",
"function_response",
"[",
"'Configuration'",
"]",
"[",
"'FunctionArn'",
"]",
",",
"lambda_name",
"=",
"self",
".",
"lambda_name",
",",
"events",
"=",
"events",
")",
"# Add async tasks SNS",
"if",
"self",
".",
"stage_config",
".",
"get",
"(",
"'async_source'",
",",
"None",
")",
"==",
"'sns'",
"and",
"self",
".",
"stage_config",
".",
"get",
"(",
"'async_resources'",
",",
"True",
")",
":",
"self",
".",
"lambda_arn",
"=",
"self",
".",
"zappa",
".",
"get_lambda_function",
"(",
"function_name",
"=",
"self",
".",
"lambda_name",
")",
"topic_arn",
"=",
"self",
".",
"zappa",
".",
"create_async_sns_topic",
"(",
"lambda_name",
"=",
"self",
".",
"lambda_name",
",",
"lambda_arn",
"=",
"self",
".",
"lambda_arn",
")",
"click",
".",
"echo",
"(",
"'SNS Topic created: %s'",
"%",
"topic_arn",
")",
"# Add async tasks DynamoDB",
"table_name",
"=",
"self",
".",
"stage_config",
".",
"get",
"(",
"'async_response_table'",
",",
"False",
")",
"read_capacity",
"=",
"self",
".",
"stage_config",
".",
"get",
"(",
"'async_response_table_read_capacity'",
",",
"1",
")",
"write_capacity",
"=",
"self",
".",
"stage_config",
".",
"get",
"(",
"'async_response_table_write_capacity'",
",",
"1",
")",
"if",
"table_name",
"and",
"self",
".",
"stage_config",
".",
"get",
"(",
"'async_resources'",
",",
"True",
")",
":",
"created",
",",
"response_table",
"=",
"self",
".",
"zappa",
".",
"create_async_dynamodb_table",
"(",
"table_name",
",",
"read_capacity",
",",
"write_capacity",
")",
"if",
"created",
":",
"click",
".",
"echo",
"(",
"'DynamoDB table created: %s'",
"%",
"table_name",
")",
"else",
":",
"click",
".",
"echo",
"(",
"'DynamoDB table exists: %s'",
"%",
"table_name",
")",
"provisioned_throughput",
"=",
"response_table",
"[",
"'Table'",
"]",
"[",
"'ProvisionedThroughput'",
"]",
"if",
"provisioned_throughput",
"[",
"'ReadCapacityUnits'",
"]",
"!=",
"read_capacity",
"or",
"provisioned_throughput",
"[",
"'WriteCapacityUnits'",
"]",
"!=",
"write_capacity",
":",
"click",
".",
"echo",
"(",
"click",
".",
"style",
"(",
"\"\\nWarning! Existing DynamoDB table ({}) does not match configured capacity.\\n\"",
".",
"format",
"(",
"table_name",
")",
",",
"fg",
"=",
"'red'",
")",
")"
] |
https://github.com/Miserlou/Zappa/blob/5a11c17f5ecf0568bdb73b4baf6fb08ff0184f39/zappa/cli.py#L1162-L1232
|
||
espressif/esptool
|
41ceca0a72266960252717b3181389902fd89d55
|
esptool.py
|
python
|
ESP8266ROMFirmwareImage.save
|
(self, basename)
|
Save a set of V1 images for flashing. Parameter is a base filename.
|
Save a set of V1 images for flashing. Parameter is a base filename.
|
[
"Save",
"a",
"set",
"of",
"V1",
"images",
"for",
"flashing",
".",
"Parameter",
"is",
"a",
"base",
"filename",
"."
] |
def save(self, basename):
""" Save a set of V1 images for flashing. Parameter is a base filename. """
# IROM data goes in its own plain binary file
irom_segment = self.get_irom_segment()
if irom_segment is not None:
with open("%s0x%05x.bin" % (basename, irom_segment.addr - ESP8266ROM.IROM_MAP_START), "wb") as f:
f.write(irom_segment.data)
# everything but IROM goes at 0x00000 in an image file
normal_segments = self.get_non_irom_segments()
with open("%s0x00000.bin" % basename, 'wb') as f:
self.write_common_header(f, normal_segments)
checksum = ESPLoader.ESP_CHECKSUM_MAGIC
for segment in normal_segments:
checksum = self.save_segment(f, segment, checksum)
self.append_checksum(f, checksum)
|
[
"def",
"save",
"(",
"self",
",",
"basename",
")",
":",
"# IROM data goes in its own plain binary file",
"irom_segment",
"=",
"self",
".",
"get_irom_segment",
"(",
")",
"if",
"irom_segment",
"is",
"not",
"None",
":",
"with",
"open",
"(",
"\"%s0x%05x.bin\"",
"%",
"(",
"basename",
",",
"irom_segment",
".",
"addr",
"-",
"ESP8266ROM",
".",
"IROM_MAP_START",
")",
",",
"\"wb\"",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"irom_segment",
".",
"data",
")",
"# everything but IROM goes at 0x00000 in an image file",
"normal_segments",
"=",
"self",
".",
"get_non_irom_segments",
"(",
")",
"with",
"open",
"(",
"\"%s0x00000.bin\"",
"%",
"basename",
",",
"'wb'",
")",
"as",
"f",
":",
"self",
".",
"write_common_header",
"(",
"f",
",",
"normal_segments",
")",
"checksum",
"=",
"ESPLoader",
".",
"ESP_CHECKSUM_MAGIC",
"for",
"segment",
"in",
"normal_segments",
":",
"checksum",
"=",
"self",
".",
"save_segment",
"(",
"f",
",",
"segment",
",",
"checksum",
")",
"self",
".",
"append_checksum",
"(",
"f",
",",
"checksum",
")"
] |
https://github.com/espressif/esptool/blob/41ceca0a72266960252717b3181389902fd89d55/esptool.py#L2841-L2856
|
||
biopython/biopython
|
2dd97e71762af7b046d7f7f8a4f1e38db6b06c86
|
Bio/Blast/Record.py
|
python
|
PSIBlast.__init__
|
(self)
|
Initialize the class.
|
Initialize the class.
|
[
"Initialize",
"the",
"class",
"."
] |
def __init__(self):
"""Initialize the class."""
Header.__init__(self)
DatabaseReport.__init__(self)
Parameters.__init__(self)
self.rounds = []
self.converged = 0
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"Header",
".",
"__init__",
"(",
"self",
")",
"DatabaseReport",
".",
"__init__",
"(",
"self",
")",
"Parameters",
".",
"__init__",
"(",
"self",
")",
"self",
".",
"rounds",
"=",
"[",
"]",
"self",
".",
"converged",
"=",
"0"
] |
https://github.com/biopython/biopython/blob/2dd97e71762af7b046d7f7f8a4f1e38db6b06c86/Bio/Blast/Record.py#L454-L460
|
||
aliyun/aliyun-openapi-python-sdk
|
bda53176cc9cf07605b1cf769f0df444cca626a0
|
aliyun-python-sdk-core/aliyunsdkcore/vendored/requests/packages/urllib3/fields.py
|
python
|
RequestField.make_multipart
|
(self, content_disposition=None, content_type=None,
content_location=None)
|
Makes this request field into a multipart request field.
This method overrides "Content-Disposition", "Content-Type" and
"Content-Location" headers to the request parameter.
:param content_type:
The 'Content-Type' of the request body.
:param content_location:
The 'Content-Location' of the request body.
|
Makes this request field into a multipart request field.
|
[
"Makes",
"this",
"request",
"field",
"into",
"a",
"multipart",
"request",
"field",
"."
] |
def make_multipart(self, content_disposition=None, content_type=None,
content_location=None):
"""
Makes this request field into a multipart request field.
This method overrides "Content-Disposition", "Content-Type" and
"Content-Location" headers to the request parameter.
:param content_type:
The 'Content-Type' of the request body.
:param content_location:
The 'Content-Location' of the request body.
"""
self.headers['Content-Disposition'] = content_disposition or 'form-data'
self.headers['Content-Disposition'] += '; '.join([
'', self._render_parts(
(('name', self._name), ('filename', self._filename))
)
])
self.headers['Content-Type'] = content_type
self.headers['Content-Location'] = content_location
|
[
"def",
"make_multipart",
"(",
"self",
",",
"content_disposition",
"=",
"None",
",",
"content_type",
"=",
"None",
",",
"content_location",
"=",
"None",
")",
":",
"self",
".",
"headers",
"[",
"'Content-Disposition'",
"]",
"=",
"content_disposition",
"or",
"'form-data'",
"self",
".",
"headers",
"[",
"'Content-Disposition'",
"]",
"+=",
"'; '",
".",
"join",
"(",
"[",
"''",
",",
"self",
".",
"_render_parts",
"(",
"(",
"(",
"'name'",
",",
"self",
".",
"_name",
")",
",",
"(",
"'filename'",
",",
"self",
".",
"_filename",
")",
")",
")",
"]",
")",
"self",
".",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"content_type",
"self",
".",
"headers",
"[",
"'Content-Location'",
"]",
"=",
"content_location"
] |
https://github.com/aliyun/aliyun-openapi-python-sdk/blob/bda53176cc9cf07605b1cf769f0df444cca626a0/aliyun-python-sdk-core/aliyunsdkcore/vendored/requests/packages/urllib3/fields.py#L157-L178
|
||
ales-tsurko/cells
|
4cf7e395cd433762bea70cdc863a346f3a6fe1d0
|
packaging/macos/python/lib/python3.7/idlelib/pyparse.py
|
python
|
Parser.is_block_opener
|
(self)
|
return self.lastch == ':'
|
Return True if the last interesting statement opens a block.
|
Return True if the last interesting statement opens a block.
|
[
"Return",
"True",
"if",
"the",
"last",
"interesting",
"statement",
"opens",
"a",
"block",
"."
] |
def is_block_opener(self):
"Return True if the last interesting statement opens a block."
self._study2()
return self.lastch == ':'
|
[
"def",
"is_block_opener",
"(",
"self",
")",
":",
"self",
".",
"_study2",
"(",
")",
"return",
"self",
".",
"lastch",
"==",
"':'"
] |
https://github.com/ales-tsurko/cells/blob/4cf7e395cd433762bea70cdc863a346f3a6fe1d0/packaging/macos/python/lib/python3.7/idlelib/pyparse.py#L577-L580
|
|
Tencent/bk-bcs-saas
|
2b437bf2f5fd5ce2078f7787c3a12df609f7679d
|
bcs-app/backend/packages/blue_krill/data_types/enum.py
|
python
|
FeatureFlag.__new__
|
(cls, value)
|
return cls._missing_(value)
|
Cast a string into a predefined feature flag.
|
Cast a string into a predefined feature flag.
|
[
"Cast",
"a",
"string",
"into",
"a",
"predefined",
"feature",
"flag",
"."
] |
def __new__(cls, value):
"""Cast a string into a predefined feature flag."""
for field in cls._get_feature_fields_().values():
if field.name == value:
return value
return cls._missing_(value)
|
[
"def",
"__new__",
"(",
"cls",
",",
"value",
")",
":",
"for",
"field",
"in",
"cls",
".",
"_get_feature_fields_",
"(",
")",
".",
"values",
"(",
")",
":",
"if",
"field",
".",
"name",
"==",
"value",
":",
"return",
"value",
"return",
"cls",
".",
"_missing_",
"(",
"value",
")"
] |
https://github.com/Tencent/bk-bcs-saas/blob/2b437bf2f5fd5ce2078f7787c3a12df609f7679d/bcs-app/backend/packages/blue_krill/data_types/enum.py#L90-L95
|
|
Phype/telnet-iot-honeypot
|
f1d4b75245d72990d339668f37a1670fc85c0c9b
|
backend/webcontroller.py
|
python
|
WebController.get_asn
|
(self, asn)
|
[] |
def get_asn(self, asn):
asn_obj = self.session.query(ASN).filter(ASN.asn == asn).first()
if asn_obj:
return asn_obj.json(depth=1)
else:
return None
|
[
"def",
"get_asn",
"(",
"self",
",",
"asn",
")",
":",
"asn_obj",
"=",
"self",
".",
"session",
".",
"query",
"(",
"ASN",
")",
".",
"filter",
"(",
"ASN",
".",
"asn",
"==",
"asn",
")",
".",
"first",
"(",
")",
"if",
"asn_obj",
":",
"return",
"asn_obj",
".",
"json",
"(",
"depth",
"=",
"1",
")",
"else",
":",
"return",
"None"
] |
https://github.com/Phype/telnet-iot-honeypot/blob/f1d4b75245d72990d339668f37a1670fc85c0c9b/backend/webcontroller.py#L240-L246
|
||||
ClusterHQ/flocker
|
eaa586248986d7cd681c99c948546c2b507e44de
|
admin/packaging.py
|
python
|
BuildOptions.__init__
|
(self, distributions)
|
:param distributions: An iterable of the names of distributions which
are acceptable as values for the ``--distribution`` parameter.
|
:param distributions: An iterable of the names of distributions which
are acceptable as values for the ``--distribution`` parameter.
|
[
":",
"param",
"distributions",
":",
"An",
"iterable",
"of",
"the",
"names",
"of",
"distributions",
"which",
"are",
"acceptable",
"as",
"values",
"for",
"the",
"--",
"distribution",
"parameter",
"."
] |
def __init__(self, distributions):
"""
:param distributions: An iterable of the names of distributions which
are acceptable as values for the ``--distribution`` parameter.
"""
usage.Options.__init__(self)
self.docs["distribution"] = self.docs["distribution"].format(
', '.join(sorted(distributions))
)
|
[
"def",
"__init__",
"(",
"self",
",",
"distributions",
")",
":",
"usage",
".",
"Options",
".",
"__init__",
"(",
"self",
")",
"self",
".",
"docs",
"[",
"\"distribution\"",
"]",
"=",
"self",
".",
"docs",
"[",
"\"distribution\"",
"]",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"sorted",
"(",
"distributions",
")",
")",
")"
] |
https://github.com/ClusterHQ/flocker/blob/eaa586248986d7cd681c99c948546c2b507e44de/admin/packaging.py#L1328-L1336
|
||
tweecode/twine
|
45e6350a2b813ec4282440b78f901cf377ead1a9
|
passagewidget.py
|
python
|
PassageWidget.getConnectedWidgets
|
(self, displayArrows, imageArrows)
|
return ret
|
Returns a list of titles of all widgets that will have lines drawn to them.
|
Returns a list of titles of all widgets that will have lines drawn to them.
|
[
"Returns",
"a",
"list",
"of",
"titles",
"of",
"all",
"widgets",
"that",
"will",
"have",
"lines",
"drawn",
"to",
"them",
"."
] |
def getConnectedWidgets(self, displayArrows, imageArrows):
"""
Returns a list of titles of all widgets that will have lines drawn to them.
"""
ret = []
for link in self.linksAndDisplays():
if link in self.passage.links or displayArrows:
widget = self.parent.findWidget(link)
if widget:
ret.append(widget)
if imageArrows:
for link in self.passage.images:
widget = self.parent.findWidget(link)
if widget:
ret.append(widget)
if self.passage.isStylesheet():
for t in self.passage.tags:
if t not in tiddlywiki.TiddlyWiki.INFO_TAGS:
for otherWidget in self.parent.taggedWidgets(t):
if not otherWidget.dimmed and not otherWidget.passage.isStylesheet():
ret.append(otherWidget)
return ret
|
[
"def",
"getConnectedWidgets",
"(",
"self",
",",
"displayArrows",
",",
"imageArrows",
")",
":",
"ret",
"=",
"[",
"]",
"for",
"link",
"in",
"self",
".",
"linksAndDisplays",
"(",
")",
":",
"if",
"link",
"in",
"self",
".",
"passage",
".",
"links",
"or",
"displayArrows",
":",
"widget",
"=",
"self",
".",
"parent",
".",
"findWidget",
"(",
"link",
")",
"if",
"widget",
":",
"ret",
".",
"append",
"(",
"widget",
")",
"if",
"imageArrows",
":",
"for",
"link",
"in",
"self",
".",
"passage",
".",
"images",
":",
"widget",
"=",
"self",
".",
"parent",
".",
"findWidget",
"(",
"link",
")",
"if",
"widget",
":",
"ret",
".",
"append",
"(",
"widget",
")",
"if",
"self",
".",
"passage",
".",
"isStylesheet",
"(",
")",
":",
"for",
"t",
"in",
"self",
".",
"passage",
".",
"tags",
":",
"if",
"t",
"not",
"in",
"tiddlywiki",
".",
"TiddlyWiki",
".",
"INFO_TAGS",
":",
"for",
"otherWidget",
"in",
"self",
".",
"parent",
".",
"taggedWidgets",
"(",
"t",
")",
":",
"if",
"not",
"otherWidget",
".",
"dimmed",
"and",
"not",
"otherWidget",
".",
"passage",
".",
"isStylesheet",
"(",
")",
":",
"ret",
".",
"append",
"(",
"otherWidget",
")",
"return",
"ret"
] |
https://github.com/tweecode/twine/blob/45e6350a2b813ec4282440b78f901cf377ead1a9/passagewidget.py#L339-L363
|
|
IronLanguages/ironpython2
|
51fdedeeda15727717fb8268a805f71b06c0b9f1
|
Src/StdLib/Lib/site-packages/win32/lib/win32timezone.py
|
python
|
TimeZoneInfo.local
|
(class_)
|
return class_(info, fix_standard_time)
|
Returns the local time zone as defined by the operating system in the
registry.
>>> localTZ = TimeZoneInfo.local()
>>> now_local = datetime.datetime.now(localTZ)
>>> now_UTC = datetime.datetime.utcnow()
>>> (now_UTC - now_local) < datetime.timedelta(seconds = 5)
Traceback (most recent call last):
...
TypeError: can't subtract offset-naive and offset-aware datetimes
>>> now_UTC = now_UTC.replace(tzinfo = TimeZoneInfo('GMT Standard Time', True))
Now one can compare the results of the two offset aware values
>>> (now_UTC - now_local) < datetime.timedelta(seconds = 5)
True
|
Returns the local time zone as defined by the operating system in the
registry.
>>> localTZ = TimeZoneInfo.local()
>>> now_local = datetime.datetime.now(localTZ)
>>> now_UTC = datetime.datetime.utcnow()
>>> (now_UTC - now_local) < datetime.timedelta(seconds = 5)
Traceback (most recent call last):
...
TypeError: can't subtract offset-naive and offset-aware datetimes
|
[
"Returns",
"the",
"local",
"time",
"zone",
"as",
"defined",
"by",
"the",
"operating",
"system",
"in",
"the",
"registry",
".",
">>>",
"localTZ",
"=",
"TimeZoneInfo",
".",
"local",
"()",
">>>",
"now_local",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
"localTZ",
")",
">>>",
"now_UTC",
"=",
"datetime",
".",
"datetime",
".",
"utcnow",
"()",
">>>",
"(",
"now_UTC",
"-",
"now_local",
")",
"<",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"5",
")",
"Traceback",
"(",
"most",
"recent",
"call",
"last",
")",
":",
"...",
"TypeError",
":",
"can",
"t",
"subtract",
"offset",
"-",
"naive",
"and",
"offset",
"-",
"aware",
"datetimes"
] |
def local(class_):
"""Returns the local time zone as defined by the operating system in the
registry.
>>> localTZ = TimeZoneInfo.local()
>>> now_local = datetime.datetime.now(localTZ)
>>> now_UTC = datetime.datetime.utcnow()
>>> (now_UTC - now_local) < datetime.timedelta(seconds = 5)
Traceback (most recent call last):
...
TypeError: can't subtract offset-naive and offset-aware datetimes
>>> now_UTC = now_UTC.replace(tzinfo = TimeZoneInfo('GMT Standard Time', True))
Now one can compare the results of the two offset aware values
>>> (now_UTC - now_local) < datetime.timedelta(seconds = 5)
True
"""
code, info = TimeZoneDefinition.current()
# code is 0 if daylight savings is disabled or not defined
# code is 1 or 2 if daylight savings is enabled, 2 if currently active
fix_standard_time = not code
# note that although the given information is sufficient to construct a WinTZI object, it's
# not sufficient to represent the time zone in which the current user is operating due
# to dynamic time zones.
return class_(info, fix_standard_time)
|
[
"def",
"local",
"(",
"class_",
")",
":",
"code",
",",
"info",
"=",
"TimeZoneDefinition",
".",
"current",
"(",
")",
"# code is 0 if daylight savings is disabled or not defined",
"# code is 1 or 2 if daylight savings is enabled, 2 if currently active",
"fix_standard_time",
"=",
"not",
"code",
"# note that although the given information is sufficient to construct a WinTZI object, it's",
"# not sufficient to represent the time zone in which the current user is operating due",
"# to dynamic time zones.",
"return",
"class_",
"(",
"info",
",",
"fix_standard_time",
")"
] |
https://github.com/IronLanguages/ironpython2/blob/51fdedeeda15727717fb8268a805f71b06c0b9f1/Src/StdLib/Lib/site-packages/win32/lib/win32timezone.py#L519-L543
|
|
intel/virtual-storage-manager
|
00706ab9701acbd0d5e04b19cc80c6b66a2973b8
|
source/python-vsmclient/vsmclient/v1/shell.py
|
python
|
do_server_remove
|
(cs, args)
|
Removes a server.
|
Removes a server.
|
[
"Removes",
"a",
"server",
"."
] |
def do_server_remove(cs, args):
"""Removes a server."""
if not args.id:
raise exceptions.CommandError("you need to specify Server ID")
remove_storage = True
remove_monitor = True
cluster_id = 1
servers = []
for id in args.id:
servers.append({
'id': id,
'cluster_id': cluster_id,
'remove_monitor': remove_monitor,
'remove_storage': remove_storage
})
try:
cs.servers.remove(servers)
print("Succeed to remove servers.")
except:
raise exceptions.CommandError("Failed to remove servers.")
|
[
"def",
"do_server_remove",
"(",
"cs",
",",
"args",
")",
":",
"if",
"not",
"args",
".",
"id",
":",
"raise",
"exceptions",
".",
"CommandError",
"(",
"\"you need to specify Server ID\"",
")",
"remove_storage",
"=",
"True",
"remove_monitor",
"=",
"True",
"cluster_id",
"=",
"1",
"servers",
"=",
"[",
"]",
"for",
"id",
"in",
"args",
".",
"id",
":",
"servers",
".",
"append",
"(",
"{",
"'id'",
":",
"id",
",",
"'cluster_id'",
":",
"cluster_id",
",",
"'remove_monitor'",
":",
"remove_monitor",
",",
"'remove_storage'",
":",
"remove_storage",
"}",
")",
"try",
":",
"cs",
".",
"servers",
".",
"remove",
"(",
"servers",
")",
"print",
"(",
"\"Succeed to remove servers.\"",
")",
"except",
":",
"raise",
"exceptions",
".",
"CommandError",
"(",
"\"Failed to remove servers.\"",
")"
] |
https://github.com/intel/virtual-storage-manager/blob/00706ab9701acbd0d5e04b19cc80c6b66a2973b8/source/python-vsmclient/vsmclient/v1/shell.py#L886-L905
|
||
capitalone/giraffez
|
e5e69cb8728f51fc3b3b45f0c140b39bd6d6f752
|
giraffez/secret.py
|
python
|
Secret.set
|
(self, key, value)
|
Set a decrypted value by key in a giraffez configuration file.
:param str key: The key used to lookup the encrypted value
:param value: Value to set at the given key, can be any value that is
YAML serializeable.
|
Set a decrypted value by key in a giraffez configuration file.
|
[
"Set",
"a",
"decrypted",
"value",
"by",
"key",
"in",
"a",
"giraffez",
"configuration",
"file",
"."
] |
def set(self, key, value):
"""
Set a decrypted value by key in a giraffez configuration file.
:param str key: The key used to lookup the encrypted value
:param value: Value to set at the given key, can be any value that is
YAML serializeable.
"""
if not key.startswith("secure."):
key = "secure.{0}".format(key)
self.config.set_value(key, value)
self.config.write()
|
[
"def",
"set",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"if",
"not",
"key",
".",
"startswith",
"(",
"\"secure.\"",
")",
":",
"key",
"=",
"\"secure.{0}\"",
".",
"format",
"(",
"key",
")",
"self",
".",
"config",
".",
"set_value",
"(",
"key",
",",
"value",
")",
"self",
".",
"config",
".",
"write",
"(",
")"
] |
https://github.com/capitalone/giraffez/blob/e5e69cb8728f51fc3b3b45f0c140b39bd6d6f752/giraffez/secret.py#L53-L64
|
||
nianticlabs/footprints
|
dd660dc5922d05cc9e795953791848df5f4e8801
|
footprints/preprocessing/ground_truth_generation/data_loader.py
|
python
|
MatterportLoader.load_scan_data
|
(self)
|
load depths, poses, footprints for all frames in a scan
|
load depths, poses, footprints for all frames in a scan
|
[
"load",
"depths",
"poses",
"footprints",
"for",
"all",
"frames",
"in",
"a",
"scan"
] |
def load_scan_data(self):
""" load depths, poses, footprints for all frames in a scan"""
ground_segs = []
depths = []
poses = []
intrinsics = []
inv_intrinsics = []
files = sorted(os.listdir(os.path.join(self.training_data_path, 'ground_seg',
self.current_scan, 'data')))
for idx, file in enumerate(files):
if idx % 50 == 0:
print('loaded {} of {}'.format(idx, len(files)))
if file[-4:] == '.npy' and file[0] != '.':
pos, height, direction = file.split('_')
direction = direction[0] # direction contains .npy extension
ground_seg, depth, pose, K = self.load_frame_data(self.current_scan, pos,
height, direction)
ground_segs.append(ground_seg)
depths.append(depth)
poses.append(pose)
intrinsics.append(K)
inv_intrinsics.append(np.linalg.pinv(K))
# store poses to change from absolute pose to relative pose
self.pose_tracker[(pos, height, direction)] = pose
depths = torch.from_numpy(np.stack(depths)).float()
ground_segs = torch.from_numpy(np.stack(ground_segs)).float()
poses = torch.from_numpy(np.stack(poses)).float()
intrinsics = torch.from_numpy(np.stack(intrinsics)).float()
inv_intrinsics = torch.from_numpy(np.stack(inv_intrinsics)).float()
self.scan_data = {'depths': depths,
'ground_segs': ground_segs,
'poses': poses,
'intrinsics': intrinsics,
'inv_intrinsics': inv_intrinsics}
|
[
"def",
"load_scan_data",
"(",
"self",
")",
":",
"ground_segs",
"=",
"[",
"]",
"depths",
"=",
"[",
"]",
"poses",
"=",
"[",
"]",
"intrinsics",
"=",
"[",
"]",
"inv_intrinsics",
"=",
"[",
"]",
"files",
"=",
"sorted",
"(",
"os",
".",
"listdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"training_data_path",
",",
"'ground_seg'",
",",
"self",
".",
"current_scan",
",",
"'data'",
")",
")",
")",
"for",
"idx",
",",
"file",
"in",
"enumerate",
"(",
"files",
")",
":",
"if",
"idx",
"%",
"50",
"==",
"0",
":",
"print",
"(",
"'loaded {} of {}'",
".",
"format",
"(",
"idx",
",",
"len",
"(",
"files",
")",
")",
")",
"if",
"file",
"[",
"-",
"4",
":",
"]",
"==",
"'.npy'",
"and",
"file",
"[",
"0",
"]",
"!=",
"'.'",
":",
"pos",
",",
"height",
",",
"direction",
"=",
"file",
".",
"split",
"(",
"'_'",
")",
"direction",
"=",
"direction",
"[",
"0",
"]",
"# direction contains .npy extension",
"ground_seg",
",",
"depth",
",",
"pose",
",",
"K",
"=",
"self",
".",
"load_frame_data",
"(",
"self",
".",
"current_scan",
",",
"pos",
",",
"height",
",",
"direction",
")",
"ground_segs",
".",
"append",
"(",
"ground_seg",
")",
"depths",
".",
"append",
"(",
"depth",
")",
"poses",
".",
"append",
"(",
"pose",
")",
"intrinsics",
".",
"append",
"(",
"K",
")",
"inv_intrinsics",
".",
"append",
"(",
"np",
".",
"linalg",
".",
"pinv",
"(",
"K",
")",
")",
"# store poses to change from absolute pose to relative pose",
"self",
".",
"pose_tracker",
"[",
"(",
"pos",
",",
"height",
",",
"direction",
")",
"]",
"=",
"pose",
"depths",
"=",
"torch",
".",
"from_numpy",
"(",
"np",
".",
"stack",
"(",
"depths",
")",
")",
".",
"float",
"(",
")",
"ground_segs",
"=",
"torch",
".",
"from_numpy",
"(",
"np",
".",
"stack",
"(",
"ground_segs",
")",
")",
".",
"float",
"(",
")",
"poses",
"=",
"torch",
".",
"from_numpy",
"(",
"np",
".",
"stack",
"(",
"poses",
")",
")",
".",
"float",
"(",
")",
"intrinsics",
"=",
"torch",
".",
"from_numpy",
"(",
"np",
".",
"stack",
"(",
"intrinsics",
")",
")",
".",
"float",
"(",
")",
"inv_intrinsics",
"=",
"torch",
".",
"from_numpy",
"(",
"np",
".",
"stack",
"(",
"inv_intrinsics",
")",
")",
".",
"float",
"(",
")",
"self",
".",
"scan_data",
"=",
"{",
"'depths'",
":",
"depths",
",",
"'ground_segs'",
":",
"ground_segs",
",",
"'poses'",
":",
"poses",
",",
"'intrinsics'",
":",
"intrinsics",
",",
"'inv_intrinsics'",
":",
"inv_intrinsics",
"}"
] |
https://github.com/nianticlabs/footprints/blob/dd660dc5922d05cc9e795953791848df5f4e8801/footprints/preprocessing/ground_truth_generation/data_loader.py#L248-L289
|
||
selinon/selinon
|
3613153566d454022a138639f0375c63f490c4cb
|
selinon/config.py
|
python
|
Config._should_config
|
(node_name, dst_node_name, configuration)
|
return False
|
Syntax sugar for configuration entries that accept lists/booleans.
:param node_name: node name
:param dst_node_name: destination node to which configuration should be propagated
:param configuration: configuration that should be checked
:return: true if node_name satisfies configuration
|
Syntax sugar for configuration entries that accept lists/booleans.
|
[
"Syntax",
"sugar",
"for",
"configuration",
"entries",
"that",
"accept",
"lists",
"/",
"booleans",
"."
] |
def _should_config(node_name, dst_node_name, configuration):
"""Syntax sugar for configuration entries that accept lists/booleans.
:param node_name: node name
:param dst_node_name: destination node to which configuration should be propagated
:param configuration: configuration that should be checked
:return: true if node_name satisfies configuration
"""
if configuration[node_name] is True:
return True
if isinstance(configuration[node_name], list):
return dst_node_name in configuration[node_name]
return False
|
[
"def",
"_should_config",
"(",
"node_name",
",",
"dst_node_name",
",",
"configuration",
")",
":",
"if",
"configuration",
"[",
"node_name",
"]",
"is",
"True",
":",
"return",
"True",
"if",
"isinstance",
"(",
"configuration",
"[",
"node_name",
"]",
",",
"list",
")",
":",
"return",
"dst_node_name",
"in",
"configuration",
"[",
"node_name",
"]",
"return",
"False"
] |
https://github.com/selinon/selinon/blob/3613153566d454022a138639f0375c63f490c4cb/selinon/config.py#L232-L246
|
|
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/type1font.py
|
python
|
Type1Font._read
|
(self, file)
|
return data
|
Read the font from a file, decoding into usable parts.
|
Read the font from a file, decoding into usable parts.
|
[
"Read",
"the",
"font",
"from",
"a",
"file",
"decoding",
"into",
"usable",
"parts",
"."
] |
def _read(self, file):
"""
Read the font from a file, decoding into usable parts.
"""
rawdata = file.read()
if not rawdata.startswith(b'\x80'):
return rawdata
data = b''
while rawdata:
if not rawdata.startswith(b'\x80'):
raise RuntimeError('Broken pfb file (expected byte 128, '
'got %d)' % rawdata[0])
type = rawdata[1]
if type in (1, 2):
length, = struct.unpack('<i', rawdata[2:6])
segment = rawdata[6:6 + length]
rawdata = rawdata[6 + length:]
if type == 1: # ASCII text: include verbatim
data += segment
elif type == 2: # binary data: encode in hexadecimal
data += binascii.hexlify(segment)
elif type == 3: # end of file
break
else:
raise RuntimeError('Unknown segment type %d in pfb file' %
type)
return data
|
[
"def",
"_read",
"(",
"self",
",",
"file",
")",
":",
"rawdata",
"=",
"file",
".",
"read",
"(",
")",
"if",
"not",
"rawdata",
".",
"startswith",
"(",
"b'\\x80'",
")",
":",
"return",
"rawdata",
"data",
"=",
"b''",
"while",
"rawdata",
":",
"if",
"not",
"rawdata",
".",
"startswith",
"(",
"b'\\x80'",
")",
":",
"raise",
"RuntimeError",
"(",
"'Broken pfb file (expected byte 128, '",
"'got %d)'",
"%",
"rawdata",
"[",
"0",
"]",
")",
"type",
"=",
"rawdata",
"[",
"1",
"]",
"if",
"type",
"in",
"(",
"1",
",",
"2",
")",
":",
"length",
",",
"=",
"struct",
".",
"unpack",
"(",
"'<i'",
",",
"rawdata",
"[",
"2",
":",
"6",
"]",
")",
"segment",
"=",
"rawdata",
"[",
"6",
":",
"6",
"+",
"length",
"]",
"rawdata",
"=",
"rawdata",
"[",
"6",
"+",
"length",
":",
"]",
"if",
"type",
"==",
"1",
":",
"# ASCII text: include verbatim",
"data",
"+=",
"segment",
"elif",
"type",
"==",
"2",
":",
"# binary data: encode in hexadecimal",
"data",
"+=",
"binascii",
".",
"hexlify",
"(",
"segment",
")",
"elif",
"type",
"==",
"3",
":",
"# end of file",
"break",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Unknown segment type %d in pfb file'",
"%",
"type",
")",
"return",
"data"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/type1font.py#L69-L98
|
|
googleads/google-ads-python
|
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
|
google/ads/googleads/v8/services/services/bidding_seasonality_adjustment_service/client.py
|
python
|
BiddingSeasonalityAdjustmentServiceClientMeta.get_transport_class
|
(
cls, label: str = None,
)
|
return next(iter(cls._transport_registry.values()))
|
Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
|
Return an appropriate transport class.
|
[
"Return",
"an",
"appropriate",
"transport",
"class",
"."
] |
def get_transport_class(
cls, label: str = None,
) -> Type[BiddingSeasonalityAdjustmentServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
|
[
"def",
"get_transport_class",
"(",
"cls",
",",
"label",
":",
"str",
"=",
"None",
",",
")",
"->",
"Type",
"[",
"BiddingSeasonalityAdjustmentServiceTransport",
"]",
":",
"# If a specific transport is requested, return that one.",
"if",
"label",
":",
"return",
"cls",
".",
"_transport_registry",
"[",
"label",
"]",
"# No transport is requested; return the default (that is, the first one",
"# in the dictionary).",
"return",
"next",
"(",
"iter",
"(",
"cls",
".",
"_transport_registry",
".",
"values",
"(",
")",
")",
")"
] |
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v8/services/services/bidding_seasonality_adjustment_service/client.py#L61-L79
|
|
Calysto/calysto_scheme
|
15bf81987870bcae1264e5a0a06feb9a8ee12b8b
|
calysto_scheme/scheme.py
|
python
|
b_proc_129_d
|
()
|
[] |
def b_proc_129_d():
if (False if ((not(length_one_q(args_reg))) is False) else True):
GLOBALS['msg_reg'] = "incorrect number of arguments to vector->list"
GLOBALS['pc'] = runtime_error
else:
if (False if ((not(vector_q((args_reg).car))) is False) else True):
GLOBALS['msg_reg'] = format("vector->list called on incorrect vector structure ~s", (args_reg).car)
GLOBALS['pc'] = runtime_error
else:
GLOBALS['value2_reg'] = fail_reg
GLOBALS['value1_reg'] = Apply(vector_to_list, args_reg)
GLOBALS['k_reg'] = k2_reg
GLOBALS['pc'] = apply_cont2
|
[
"def",
"b_proc_129_d",
"(",
")",
":",
"if",
"(",
"False",
"if",
"(",
"(",
"not",
"(",
"length_one_q",
"(",
"args_reg",
")",
")",
")",
"is",
"False",
")",
"else",
"True",
")",
":",
"GLOBALS",
"[",
"'msg_reg'",
"]",
"=",
"\"incorrect number of arguments to vector->list\"",
"GLOBALS",
"[",
"'pc'",
"]",
"=",
"runtime_error",
"else",
":",
"if",
"(",
"False",
"if",
"(",
"(",
"not",
"(",
"vector_q",
"(",
"(",
"args_reg",
")",
".",
"car",
")",
")",
")",
"is",
"False",
")",
"else",
"True",
")",
":",
"GLOBALS",
"[",
"'msg_reg'",
"]",
"=",
"format",
"(",
"\"vector->list called on incorrect vector structure ~s\"",
",",
"(",
"args_reg",
")",
".",
"car",
")",
"GLOBALS",
"[",
"'pc'",
"]",
"=",
"runtime_error",
"else",
":",
"GLOBALS",
"[",
"'value2_reg'",
"]",
"=",
"fail_reg",
"GLOBALS",
"[",
"'value1_reg'",
"]",
"=",
"Apply",
"(",
"vector_to_list",
",",
"args_reg",
")",
"GLOBALS",
"[",
"'k_reg'",
"]",
"=",
"k2_reg",
"GLOBALS",
"[",
"'pc'",
"]",
"=",
"apply_cont2"
] |
https://github.com/Calysto/calysto_scheme/blob/15bf81987870bcae1264e5a0a06feb9a8ee12b8b/calysto_scheme/scheme.py#L4995-L5007
|
||||
zhl2008/awd-platform
|
0416b31abea29743387b10b3914581fbe8e7da5e
|
web_flaskbb/lib/python2.7/site-packages/amqp/serialization.py
|
python
|
_read_item
|
(buf, offset=0, unpack_from=unpack_from, ftype_t=ftype_t)
|
return val, offset
|
[] |
def _read_item(buf, offset=0, unpack_from=unpack_from, ftype_t=ftype_t):
ftype = ftype_t(buf[offset]) if ftype_t else buf[offset]
offset += 1
# 'S': long string
if ftype == 'S':
slen, = unpack_from('>I', buf, offset)
offset += 4
val = pstr_t(buf[offset:offset + slen])
offset += slen
# 's': short string
elif ftype == 's':
slen, = unpack_from('>B', buf, offset)
offset += 1
val = pstr_t(buf[offset:offset + slen])
offset += slen
# 'b': short-short int
elif ftype == 'b':
val, = unpack_from('>B', buf, offset)
offset += 1
# 'B': short-short unsigned int
elif ftype == 'B':
val, = unpack_from('>b', buf, offset)
offset += 1
# 'U': short int
elif ftype == 'U':
val, = unpack_from('>h', buf, offset)
offset += 2
# 'u': short unsigned int
elif ftype == 'u':
val, = unpack_from('>H', buf, offset)
offset += 2
# 'I': long int
elif ftype == 'I':
val, = unpack_from('>i', buf, offset)
offset += 4
# 'i': long unsigned int
elif ftype == 'i':
val, = unpack_from('>I', buf, offset)
offset += 4
# 'L': long long int
elif ftype == 'L':
val, = unpack_from('>q', buf, offset)
offset += 8
# 'l': long long unsigned int
elif ftype == 'l':
val, = unpack_from('>Q', buf, offset)
offset += 8
# 'f': float
elif ftype == 'f':
val, = unpack_from('>f', buf, offset)
offset += 4
# 'd': double
elif ftype == 'd':
val, = unpack_from('>d', buf, offset)
offset += 8
# 'D': decimal
elif ftype == 'D':
d, = unpack_from('>B', buf, offset)
offset += 1
n, = unpack_from('>i', buf, offset)
offset += 4
val = Decimal(n) / Decimal(10 ** d)
# 'F': table
elif ftype == 'F':
tlen, = unpack_from('>I', buf, offset)
offset += 4
limit = offset + tlen
val = {}
while offset < limit:
keylen, = unpack_from('>B', buf, offset)
offset += 1
key = pstr_t(buf[offset:offset + keylen])
offset += keylen
val[key], offset = _read_item(buf, offset)
# 'A': array
elif ftype == 'A':
alen, = unpack_from('>I', buf, offset)
offset += 4
limit = offset + alen
val = []
while offset < limit:
v, offset = _read_item(buf, offset)
val.append(v)
# 't' (bool)
elif ftype == 't':
val, = unpack_from('>B', buf, offset)
val = bool(val)
offset += 1
# 'T': timestamp
elif ftype == 'T':
val, = unpack_from('>Q', buf, offset)
offset += 8
val = datetime.utcfromtimestamp(val)
# 'V': void
elif ftype == 'V':
val = None
else:
raise FrameSyntaxError(
'Unknown value in table: {0!r} ({1!r})'.format(
ftype, type(ftype)))
return val, offset
|
[
"def",
"_read_item",
"(",
"buf",
",",
"offset",
"=",
"0",
",",
"unpack_from",
"=",
"unpack_from",
",",
"ftype_t",
"=",
"ftype_t",
")",
":",
"ftype",
"=",
"ftype_t",
"(",
"buf",
"[",
"offset",
"]",
")",
"if",
"ftype_t",
"else",
"buf",
"[",
"offset",
"]",
"offset",
"+=",
"1",
"# 'S': long string",
"if",
"ftype",
"==",
"'S'",
":",
"slen",
",",
"=",
"unpack_from",
"(",
"'>I'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"4",
"val",
"=",
"pstr_t",
"(",
"buf",
"[",
"offset",
":",
"offset",
"+",
"slen",
"]",
")",
"offset",
"+=",
"slen",
"# 's': short string",
"elif",
"ftype",
"==",
"'s'",
":",
"slen",
",",
"=",
"unpack_from",
"(",
"'>B'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"1",
"val",
"=",
"pstr_t",
"(",
"buf",
"[",
"offset",
":",
"offset",
"+",
"slen",
"]",
")",
"offset",
"+=",
"slen",
"# 'b': short-short int",
"elif",
"ftype",
"==",
"'b'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>B'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"1",
"# 'B': short-short unsigned int",
"elif",
"ftype",
"==",
"'B'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>b'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"1",
"# 'U': short int",
"elif",
"ftype",
"==",
"'U'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>h'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"2",
"# 'u': short unsigned int",
"elif",
"ftype",
"==",
"'u'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>H'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"2",
"# 'I': long int",
"elif",
"ftype",
"==",
"'I'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>i'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"4",
"# 'i': long unsigned int",
"elif",
"ftype",
"==",
"'i'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>I'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"4",
"# 'L': long long int",
"elif",
"ftype",
"==",
"'L'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>q'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"8",
"# 'l': long long unsigned int",
"elif",
"ftype",
"==",
"'l'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>Q'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"8",
"# 'f': float",
"elif",
"ftype",
"==",
"'f'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>f'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"4",
"# 'd': double",
"elif",
"ftype",
"==",
"'d'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>d'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"8",
"# 'D': decimal",
"elif",
"ftype",
"==",
"'D'",
":",
"d",
",",
"=",
"unpack_from",
"(",
"'>B'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"1",
"n",
",",
"=",
"unpack_from",
"(",
"'>i'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"4",
"val",
"=",
"Decimal",
"(",
"n",
")",
"/",
"Decimal",
"(",
"10",
"**",
"d",
")",
"# 'F': table",
"elif",
"ftype",
"==",
"'F'",
":",
"tlen",
",",
"=",
"unpack_from",
"(",
"'>I'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"4",
"limit",
"=",
"offset",
"+",
"tlen",
"val",
"=",
"{",
"}",
"while",
"offset",
"<",
"limit",
":",
"keylen",
",",
"=",
"unpack_from",
"(",
"'>B'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"1",
"key",
"=",
"pstr_t",
"(",
"buf",
"[",
"offset",
":",
"offset",
"+",
"keylen",
"]",
")",
"offset",
"+=",
"keylen",
"val",
"[",
"key",
"]",
",",
"offset",
"=",
"_read_item",
"(",
"buf",
",",
"offset",
")",
"# 'A': array",
"elif",
"ftype",
"==",
"'A'",
":",
"alen",
",",
"=",
"unpack_from",
"(",
"'>I'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"4",
"limit",
"=",
"offset",
"+",
"alen",
"val",
"=",
"[",
"]",
"while",
"offset",
"<",
"limit",
":",
"v",
",",
"offset",
"=",
"_read_item",
"(",
"buf",
",",
"offset",
")",
"val",
".",
"append",
"(",
"v",
")",
"# 't' (bool)",
"elif",
"ftype",
"==",
"'t'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>B'",
",",
"buf",
",",
"offset",
")",
"val",
"=",
"bool",
"(",
"val",
")",
"offset",
"+=",
"1",
"# 'T': timestamp",
"elif",
"ftype",
"==",
"'T'",
":",
"val",
",",
"=",
"unpack_from",
"(",
"'>Q'",
",",
"buf",
",",
"offset",
")",
"offset",
"+=",
"8",
"val",
"=",
"datetime",
".",
"utcfromtimestamp",
"(",
"val",
")",
"# 'V': void",
"elif",
"ftype",
"==",
"'V'",
":",
"val",
"=",
"None",
"else",
":",
"raise",
"FrameSyntaxError",
"(",
"'Unknown value in table: {0!r} ({1!r})'",
".",
"format",
"(",
"ftype",
",",
"type",
"(",
"ftype",
")",
")",
")",
"return",
"val",
",",
"offset"
] |
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/amqp/serialization.py#L51-L152
|
|||
sentinel-hub/eo-learn
|
cf964eaf173668d6a374675dbd7c1d244264c11d
|
core/eolearn/core/utilities.py
|
python
|
FeatureParser._get_features
|
(self, eopatch=None)
|
A generator of parsed features.
:param eopatch: A given EOPatch
:type eopatch: EOPatch or None
:return: One by one feature
:rtype: tuple(FeatureType, str) or tuple(FeatureType, str, str)
|
A generator of parsed features.
|
[
"A",
"generator",
"of",
"parsed",
"features",
"."
] |
def _get_features(self, eopatch=None):
"""A generator of parsed features.
:param eopatch: A given EOPatch
:type eopatch: EOPatch or None
:return: One by one feature
:rtype: tuple(FeatureType, str) or tuple(FeatureType, str, str)
"""
for feature_type, feature_dict in self.feature_collection.items():
if feature_type is None and self.default_feature_type is not None:
feature_type = self.default_feature_type
if feature_type is None:
for feature_name, new_feature_name in feature_dict.items():
if eopatch is None:
yield self._return_feature(..., feature_name, new_feature_name)
else:
found_feature_type = self._find_feature_type(feature_name, eopatch)
if found_feature_type:
yield self._return_feature(found_feature_type, feature_name, new_feature_name)
else:
raise ValueError(
f"Feature with name '{feature_name}' does not exist among features of allowed feature "
f"types in given EOPatch. Allowed feature types are {self.allowed_feature_types}"
)
elif feature_dict is ...:
if not feature_type.has_dict() or eopatch is None:
yield self._return_feature(feature_type, ...)
else:
for feature_name in eopatch[feature_type]:
yield self._return_feature(feature_type, feature_name)
else:
for feature_name, new_feature_name in feature_dict.items():
if eopatch is not None and feature_name not in eopatch[feature_type]:
raise ValueError(f'Feature {feature_name} of type {feature_type} was not found in EOPatch')
yield self._return_feature(feature_type, feature_name, new_feature_name)
|
[
"def",
"_get_features",
"(",
"self",
",",
"eopatch",
"=",
"None",
")",
":",
"for",
"feature_type",
",",
"feature_dict",
"in",
"self",
".",
"feature_collection",
".",
"items",
"(",
")",
":",
"if",
"feature_type",
"is",
"None",
"and",
"self",
".",
"default_feature_type",
"is",
"not",
"None",
":",
"feature_type",
"=",
"self",
".",
"default_feature_type",
"if",
"feature_type",
"is",
"None",
":",
"for",
"feature_name",
",",
"new_feature_name",
"in",
"feature_dict",
".",
"items",
"(",
")",
":",
"if",
"eopatch",
"is",
"None",
":",
"yield",
"self",
".",
"_return_feature",
"(",
"...",
",",
"feature_name",
",",
"new_feature_name",
")",
"else",
":",
"found_feature_type",
"=",
"self",
".",
"_find_feature_type",
"(",
"feature_name",
",",
"eopatch",
")",
"if",
"found_feature_type",
":",
"yield",
"self",
".",
"_return_feature",
"(",
"found_feature_type",
",",
"feature_name",
",",
"new_feature_name",
")",
"else",
":",
"raise",
"ValueError",
"(",
"f\"Feature with name '{feature_name}' does not exist among features of allowed feature \"",
"f\"types in given EOPatch. Allowed feature types are {self.allowed_feature_types}\"",
")",
"elif",
"feature_dict",
"is",
"...",
":",
"if",
"not",
"feature_type",
".",
"has_dict",
"(",
")",
"or",
"eopatch",
"is",
"None",
":",
"yield",
"self",
".",
"_return_feature",
"(",
"feature_type",
",",
"...",
")",
"else",
":",
"for",
"feature_name",
"in",
"eopatch",
"[",
"feature_type",
"]",
":",
"yield",
"self",
".",
"_return_feature",
"(",
"feature_type",
",",
"feature_name",
")",
"else",
":",
"for",
"feature_name",
",",
"new_feature_name",
"in",
"feature_dict",
".",
"items",
"(",
")",
":",
"if",
"eopatch",
"is",
"not",
"None",
"and",
"feature_name",
"not",
"in",
"eopatch",
"[",
"feature_type",
"]",
":",
"raise",
"ValueError",
"(",
"f'Feature {feature_name} of type {feature_type} was not found in EOPatch'",
")",
"yield",
"self",
".",
"_return_feature",
"(",
"feature_type",
",",
"feature_name",
",",
"new_feature_name",
")"
] |
https://github.com/sentinel-hub/eo-learn/blob/cf964eaf173668d6a374675dbd7c1d244264c11d/core/eolearn/core/utilities.py#L309-L344
|
||
ansible/ansible-modules-core
|
00911a75ad6635834b6d28eef41f197b2f73c381
|
utilities/logic/wait_for.py
|
python
|
_create_connection
|
(host, port, connect_timeout)
|
return connect_socket
|
Connect to a 2-tuple (host, port) and return
the socket object.
Args:
2-tuple (host, port) and connection timeout
Returns:
Socket object
|
Connect to a 2-tuple (host, port) and return
the socket object.
|
[
"Connect",
"to",
"a",
"2",
"-",
"tuple",
"(",
"host",
"port",
")",
"and",
"return",
"the",
"socket",
"object",
"."
] |
def _create_connection(host, port, connect_timeout):
"""
Connect to a 2-tuple (host, port) and return
the socket object.
Args:
2-tuple (host, port) and connection timeout
Returns:
Socket object
"""
if sys.version_info < (2, 6):
(family, _) = (_convert_host_to_ip(host))[0]
connect_socket = socket.socket(family, socket.SOCK_STREAM)
connect_socket.settimeout(connect_timeout)
connect_socket.connect( (host, port) )
else:
connect_socket = socket.create_connection( (host, port), connect_timeout)
return connect_socket
|
[
"def",
"_create_connection",
"(",
"host",
",",
"port",
",",
"connect_timeout",
")",
":",
"if",
"sys",
".",
"version_info",
"<",
"(",
"2",
",",
"6",
")",
":",
"(",
"family",
",",
"_",
")",
"=",
"(",
"_convert_host_to_ip",
"(",
"host",
")",
")",
"[",
"0",
"]",
"connect_socket",
"=",
"socket",
".",
"socket",
"(",
"family",
",",
"socket",
".",
"SOCK_STREAM",
")",
"connect_socket",
".",
"settimeout",
"(",
"connect_timeout",
")",
"connect_socket",
".",
"connect",
"(",
"(",
"host",
",",
"port",
")",
")",
"else",
":",
"connect_socket",
"=",
"socket",
".",
"create_connection",
"(",
"(",
"host",
",",
"port",
")",
",",
"connect_timeout",
")",
"return",
"connect_socket"
] |
https://github.com/ansible/ansible-modules-core/blob/00911a75ad6635834b6d28eef41f197b2f73c381/utilities/logic/wait_for.py#L366-L383
|
|
smicallef/spiderfoot
|
fd4bf9394c9ab3ecc90adc3115c56349fb23165b
|
sfcli.py
|
python
|
SpiderFootCli.completedefault
|
(self, text, line, begidx, endidx)
|
return []
|
[] |
def completedefault(self, text, line, begidx, endidx):
return []
|
[
"def",
"completedefault",
"(",
"self",
",",
"text",
",",
"line",
",",
"begidx",
",",
"endidx",
")",
":",
"return",
"[",
"]"
] |
https://github.com/smicallef/spiderfoot/blob/fd4bf9394c9ab3ecc90adc3115c56349fb23165b/sfcli.py#L406-L407
|
|||
OpenEndedGroup/Field
|
4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c
|
Contents/lib/python/dbexts.py
|
python
|
dbexts.typeinfo
|
(self, sqltype=None)
|
display the types available for the database
|
display the types available for the database
|
[
"display",
"the",
"types",
"available",
"for",
"the",
"database"
] |
def typeinfo(self, sqltype=None):
""" display the types available for the database """
cur = self.begin()
cur.gettypeinfo(sqltype)
self.commit(cur)
self.display()
|
[
"def",
"typeinfo",
"(",
"self",
",",
"sqltype",
"=",
"None",
")",
":",
"cur",
"=",
"self",
".",
"begin",
"(",
")",
"cur",
".",
"gettypeinfo",
"(",
"sqltype",
")",
"self",
".",
"commit",
"(",
"cur",
")",
"self",
".",
"display",
"(",
")"
] |
https://github.com/OpenEndedGroup/Field/blob/4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c/Contents/lib/python/dbexts.py#L412-L417
|
||
kvazis/homeassistant
|
aca227a780f806d861342e3611025a52a3bb4366
|
custom_components/hacs/helpers/functions/information.py
|
python
|
get_info_md_content
|
(repository)
|
return ""
|
Get the content of info.md
|
Get the content of info.md
|
[
"Get",
"the",
"content",
"of",
"info",
".",
"md"
] |
async def get_info_md_content(repository):
"""Get the content of info.md"""
filename = info_file(repository)
if not filename:
return ""
try:
info = await repository.repository_object.get_contents(filename, repository.ref)
if info is None:
return ""
info = info.content.replace("<svg", "<disabled").replace("</svg", "</disabled")
return render_template(info, repository)
except (
ValueError,
AIOGitHubAPIException,
Exception, # pylint: disable=broad-except
):
if repository.hacs.system.action:
raise HacsException("::error:: No info file found")
return ""
|
[
"async",
"def",
"get_info_md_content",
"(",
"repository",
")",
":",
"filename",
"=",
"info_file",
"(",
"repository",
")",
"if",
"not",
"filename",
":",
"return",
"\"\"",
"try",
":",
"info",
"=",
"await",
"repository",
".",
"repository_object",
".",
"get_contents",
"(",
"filename",
",",
"repository",
".",
"ref",
")",
"if",
"info",
"is",
"None",
":",
"return",
"\"\"",
"info",
"=",
"info",
".",
"content",
".",
"replace",
"(",
"\"<svg\"",
",",
"\"<disabled\"",
")",
".",
"replace",
"(",
"\"</svg\"",
",",
"\"</disabled\"",
")",
"return",
"render_template",
"(",
"info",
",",
"repository",
")",
"except",
"(",
"ValueError",
",",
"AIOGitHubAPIException",
",",
"Exception",
",",
"# pylint: disable=broad-except",
")",
":",
"if",
"repository",
".",
"hacs",
".",
"system",
".",
"action",
":",
"raise",
"HacsException",
"(",
"\"::error:: No info file found\"",
")",
"return",
"\"\""
] |
https://github.com/kvazis/homeassistant/blob/aca227a780f806d861342e3611025a52a3bb4366/custom_components/hacs/helpers/functions/information.py#L27-L45
|
|
nneonneo/eqgrp-free-file
|
49e457d7ac870d5e00f5a247b94476cd5643f3ba
|
Firewall/EXPLOITS/EXBA/scapy/packet.py
|
python
|
Packet.psdump
|
(self, filename=None, **kargs)
|
psdump(filename=None, layer_shift=0, rebuild=1)
Creates an EPS file describing a packet. If filename is not provided a temporary file is created and gs is called.
|
psdump(filename=None, layer_shift=0, rebuild=1)
Creates an EPS file describing a packet. If filename is not provided a temporary file is created and gs is called.
|
[
"psdump",
"(",
"filename",
"=",
"None",
"layer_shift",
"=",
"0",
"rebuild",
"=",
"1",
")",
"Creates",
"an",
"EPS",
"file",
"describing",
"a",
"packet",
".",
"If",
"filename",
"is",
"not",
"provided",
"a",
"temporary",
"file",
"is",
"created",
"and",
"gs",
"is",
"called",
"."
] |
def psdump(self, filename=None, **kargs):
"""psdump(filename=None, layer_shift=0, rebuild=1)
Creates an EPS file describing a packet. If filename is not provided a temporary file is created and gs is called."""
canvas = self.canvas_dump(**kargs)
if filename is None:
fname = get_temp_file(autoext=".eps")
canvas.writeEPSfile(fname)
subprocess.Popen([conf.prog.psreader, fname+".eps"])
else:
canvas.writeEPSfile(filename)
|
[
"def",
"psdump",
"(",
"self",
",",
"filename",
"=",
"None",
",",
"*",
"*",
"kargs",
")",
":",
"canvas",
"=",
"self",
".",
"canvas_dump",
"(",
"*",
"*",
"kargs",
")",
"if",
"filename",
"is",
"None",
":",
"fname",
"=",
"get_temp_file",
"(",
"autoext",
"=",
"\".eps\"",
")",
"canvas",
".",
"writeEPSfile",
"(",
"fname",
")",
"subprocess",
".",
"Popen",
"(",
"[",
"conf",
".",
"prog",
".",
"psreader",
",",
"fname",
"+",
"\".eps\"",
"]",
")",
"else",
":",
"canvas",
".",
"writeEPSfile",
"(",
"filename",
")"
] |
https://github.com/nneonneo/eqgrp-free-file/blob/49e457d7ac870d5e00f5a247b94476cd5643f3ba/Firewall/EXPLOITS/EXBA/scapy/packet.py#L350-L359
|
||
NUAA-AL/ALiPy
|
bc69062c7129d597a9e54b9eb409c6fcb1f36a3c
|
alipy/index/index_collections.py
|
python
|
IndexCollection.index
|
(self)
|
return copy.deepcopy(self._innercontainer)
|
Get the index of data.
|
Get the index of data.
|
[
"Get",
"the",
"index",
"of",
"data",
"."
] |
def index(self):
"""
Get the index of data.
"""
return copy.deepcopy(self._innercontainer)
|
[
"def",
"index",
"(",
"self",
")",
":",
"return",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"_innercontainer",
")"
] |
https://github.com/NUAA-AL/ALiPy/blob/bc69062c7129d597a9e54b9eb409c6fcb1f36a3c/alipy/index/index_collections.py#L91-L95
|
|
fossasia/x-mario-center
|
fe67afe28d995dcf4e2498e305825a4859566172
|
build/lib.linux-i686-2.7/softwarecenter/utils.py
|
python
|
pnormaldist
|
(qn)
|
Inverse normal distribution, based on the Ruby statistics2.pnormaldist
|
Inverse normal distribution, based on the Ruby statistics2.pnormaldist
|
[
"Inverse",
"normal",
"distribution",
"based",
"on",
"the",
"Ruby",
"statistics2",
".",
"pnormaldist"
] |
def pnormaldist(qn):
"""
Inverse normal distribution, based on the Ruby statistics2.pnormaldist
"""
b = [1.570796288, 0.03706987906, -0.8364353589e-3,
-0.2250947176e-3, 0.6841218299e-5, 0.5824238515e-5,
-0.104527497e-5, 0.8360937017e-7, -0.3231081277e-8,
0.3657763036e-10, 0.6936233982e-12]
if qn < 0 or qn > 1:
raise ValueError("qn must be between 0.0 and 1.0")
if qn == 0.5:
return 0.0
w1 = qn
if qn > 0.5:
w1 = 1.0 - w1
w3 = -math.log(4.0 * w1 * (1.0 - w1))
w1 = b[0]
for i in range(1, 11):
w1 = w1 + (b[i] * math.pow(w3, i))
if qn > 0.5:
return math.sqrt(w1 * w3)
else:
return -math.sqrt(w1 * w3)
|
[
"def",
"pnormaldist",
"(",
"qn",
")",
":",
"b",
"=",
"[",
"1.570796288",
",",
"0.03706987906",
",",
"-",
"0.8364353589e-3",
",",
"-",
"0.2250947176e-3",
",",
"0.6841218299e-5",
",",
"0.5824238515e-5",
",",
"-",
"0.104527497e-5",
",",
"0.8360937017e-7",
",",
"-",
"0.3231081277e-8",
",",
"0.3657763036e-10",
",",
"0.6936233982e-12",
"]",
"if",
"qn",
"<",
"0",
"or",
"qn",
">",
"1",
":",
"raise",
"ValueError",
"(",
"\"qn must be between 0.0 and 1.0\"",
")",
"if",
"qn",
"==",
"0.5",
":",
"return",
"0.0",
"w1",
"=",
"qn",
"if",
"qn",
">",
"0.5",
":",
"w1",
"=",
"1.0",
"-",
"w1",
"w3",
"=",
"-",
"math",
".",
"log",
"(",
"4.0",
"*",
"w1",
"*",
"(",
"1.0",
"-",
"w1",
")",
")",
"w1",
"=",
"b",
"[",
"0",
"]",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"11",
")",
":",
"w1",
"=",
"w1",
"+",
"(",
"b",
"[",
"i",
"]",
"*",
"math",
".",
"pow",
"(",
"w3",
",",
"i",
")",
")",
"if",
"qn",
">",
"0.5",
":",
"return",
"math",
".",
"sqrt",
"(",
"w1",
"*",
"w3",
")",
"else",
":",
"return",
"-",
"math",
".",
"sqrt",
"(",
"w1",
"*",
"w3",
")"
] |
https://github.com/fossasia/x-mario-center/blob/fe67afe28d995dcf4e2498e305825a4859566172/build/lib.linux-i686-2.7/softwarecenter/utils.py#L560-L585
|
||
numba/numba
|
bf480b9e0da858a65508c2b17759a72ee6a44c51
|
numba/core/types/abstract.py
|
python
|
_autoincr
|
()
|
return n
|
[] |
def _autoincr():
n = next(_typecodes)
# 4 billion types should be enough, right?
assert n < 2 ** 32, "Limited to 4 billion types"
return n
|
[
"def",
"_autoincr",
"(",
")",
":",
"n",
"=",
"next",
"(",
"_typecodes",
")",
"# 4 billion types should be enough, right?",
"assert",
"n",
"<",
"2",
"**",
"32",
",",
"\"Limited to 4 billion types\"",
"return",
"n"
] |
https://github.com/numba/numba/blob/bf480b9e0da858a65508c2b17759a72ee6a44c51/numba/core/types/abstract.py#L19-L23
|
|||
python/cpython
|
e13cdca0f5224ec4e23bdd04bb3120506964bc8b
|
Lib/tkinter/__init__.py
|
python
|
Misc.winfo_server
|
(self)
|
return self.tk.call('winfo', 'server', self._w)
|
Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion".
|
Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion".
|
[
"Return",
"information",
"of",
"the",
"X",
"-",
"Server",
"of",
"the",
"screen",
"of",
"this",
"widget",
"in",
"the",
"form",
"XmajorRminor",
"vendor",
"vendorVersion",
"."
] |
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
|
[
"def",
"winfo_server",
"(",
"self",
")",
":",
"return",
"self",
".",
"tk",
".",
"call",
"(",
"'winfo'",
",",
"'server'",
",",
"self",
".",
"_w",
")"
] |
https://github.com/python/cpython/blob/e13cdca0f5224ec4e23bdd04bb3120506964bc8b/Lib/tkinter/__init__.py#L1253-L1256
|
|
robotframework/RIDE
|
6e8a50774ff33dead3a2757a11b0b4418ab205c0
|
src/robotide/editor/contentassist.py
|
python
|
ContentAssistTextEditor.__init__
|
(self, parent, suggestion_source, pos, size=wx.DefaultSize)
|
self.SetBackgroundColour(Colour(200, 222, 40))
self.SetOwnBackgroundColour(Colour(200, 222, 40))
self.SetForegroundColour(Colour(7, 0, 70))
self.SetOwnForegroundColour(Colour(7, 0, 70))
|
self.SetBackgroundColour(Colour(200, 222, 40))
self.SetOwnBackgroundColour(Colour(200, 222, 40))
self.SetForegroundColour(Colour(7, 0, 70))
self.SetOwnForegroundColour(Colour(7, 0, 70))
|
[
"self",
".",
"SetBackgroundColour",
"(",
"Colour",
"(",
"200",
"222",
"40",
"))",
"self",
".",
"SetOwnBackgroundColour",
"(",
"Colour",
"(",
"200",
"222",
"40",
"))",
"self",
".",
"SetForegroundColour",
"(",
"Colour",
"(",
"7",
"0",
"70",
"))",
"self",
".",
"SetOwnForegroundColour",
"(",
"Colour",
"(",
"7",
"0",
"70",
"))"
] |
def __init__(self, parent, suggestion_source, pos, size=wx.DefaultSize):
wx.TextCtrl.__init__(self, parent, -1, "", pos, size=size, style=wx.WANTS_CHARS|wx.BORDER_NONE|wx.WS_EX_TRANSIENT|wx.TE_PROCESS_ENTER|wx.TE_NOHIDESEL)
_ContentAssistTextCtrlBase.__init__(self, suggestion_source)
self.SetBackgroundColour(Colour(self.color_background_help))
self.SetOwnBackgroundColour(Colour(self.color_background_help))
self.SetForegroundColour(Colour(self.color_foreground_text))
self.SetOwnForegroundColour(Colour(self.color_foreground_text))
"""
self.SetBackgroundColour(Colour(200, 222, 40))
self.SetOwnBackgroundColour(Colour(200, 222, 40))
self.SetForegroundColour(Colour(7, 0, 70))
self.SetOwnForegroundColour(Colour(7, 0, 70))
"""
|
[
"def",
"__init__",
"(",
"self",
",",
"parent",
",",
"suggestion_source",
",",
"pos",
",",
"size",
"=",
"wx",
".",
"DefaultSize",
")",
":",
"wx",
".",
"TextCtrl",
".",
"__init__",
"(",
"self",
",",
"parent",
",",
"-",
"1",
",",
"\"\"",
",",
"pos",
",",
"size",
"=",
"size",
",",
"style",
"=",
"wx",
".",
"WANTS_CHARS",
"|",
"wx",
".",
"BORDER_NONE",
"|",
"wx",
".",
"WS_EX_TRANSIENT",
"|",
"wx",
".",
"TE_PROCESS_ENTER",
"|",
"wx",
".",
"TE_NOHIDESEL",
")",
"_ContentAssistTextCtrlBase",
".",
"__init__",
"(",
"self",
",",
"suggestion_source",
")",
"self",
".",
"SetBackgroundColour",
"(",
"Colour",
"(",
"self",
".",
"color_background_help",
")",
")",
"self",
".",
"SetOwnBackgroundColour",
"(",
"Colour",
"(",
"self",
".",
"color_background_help",
")",
")",
"self",
".",
"SetForegroundColour",
"(",
"Colour",
"(",
"self",
".",
"color_foreground_text",
")",
")",
"self",
".",
"SetOwnForegroundColour",
"(",
"Colour",
"(",
"self",
".",
"color_foreground_text",
")",
")"
] |
https://github.com/robotframework/RIDE/blob/6e8a50774ff33dead3a2757a11b0b4418ab205c0/src/robotide/editor/contentassist.py#L228-L240
|
||
bruderstein/PythonScript
|
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
|
PythonLib/full/email/_header_value_parser.py
|
python
|
TokenList.fold
|
(self, *, policy)
|
return _refold_parse_tree(self, policy=policy)
|
[] |
def fold(self, *, policy):
return _refold_parse_tree(self, policy=policy)
|
[
"def",
"fold",
"(",
"self",
",",
"*",
",",
"policy",
")",
":",
"return",
"_refold_parse_tree",
"(",
"self",
",",
"policy",
"=",
"policy",
")"
] |
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/email/_header_value_parser.py#L155-L156
|
|||
huuuuusy/Mask-RCNN-Shiny
|
b59944ae08fda8dfc19d27a22acd59f94d8beb4f
|
mrcnn/visualize.py
|
python
|
display_instances
|
(image, boxes, masks, class_ids, class_names,
scores=None, title="",
figsize=(16, 16), ax=None,
show_mask=True, show_bbox=True,
colors=None, captions=None)
|
boxes: [num_instance, (y1, x1, y2, x2, class_id)] in image coordinates.
masks: [height, width, num_instances]
class_ids: [num_instances]
class_names: list of class names of the dataset
scores: (optional) confidence scores for each box
title: (optional) Figure title
show_mask, show_bbox: To show masks and bounding boxes or not
figsize: (optional) the size of the image
colors: (optional) An array or colors to use with each object
captions: (optional) A list of strings to use as captions for each object
|
boxes: [num_instance, (y1, x1, y2, x2, class_id)] in image coordinates.
masks: [height, width, num_instances]
class_ids: [num_instances]
class_names: list of class names of the dataset
scores: (optional) confidence scores for each box
title: (optional) Figure title
show_mask, show_bbox: To show masks and bounding boxes or not
figsize: (optional) the size of the image
colors: (optional) An array or colors to use with each object
captions: (optional) A list of strings to use as captions for each object
|
[
"boxes",
":",
"[",
"num_instance",
"(",
"y1",
"x1",
"y2",
"x2",
"class_id",
")",
"]",
"in",
"image",
"coordinates",
".",
"masks",
":",
"[",
"height",
"width",
"num_instances",
"]",
"class_ids",
":",
"[",
"num_instances",
"]",
"class_names",
":",
"list",
"of",
"class",
"names",
"of",
"the",
"dataset",
"scores",
":",
"(",
"optional",
")",
"confidence",
"scores",
"for",
"each",
"box",
"title",
":",
"(",
"optional",
")",
"Figure",
"title",
"show_mask",
"show_bbox",
":",
"To",
"show",
"masks",
"and",
"bounding",
"boxes",
"or",
"not",
"figsize",
":",
"(",
"optional",
")",
"the",
"size",
"of",
"the",
"image",
"colors",
":",
"(",
"optional",
")",
"An",
"array",
"or",
"colors",
"to",
"use",
"with",
"each",
"object",
"captions",
":",
"(",
"optional",
")",
"A",
"list",
"of",
"strings",
"to",
"use",
"as",
"captions",
"for",
"each",
"object"
] |
def display_instances(image, boxes, masks, class_ids, class_names,
scores=None, title="",
figsize=(16, 16), ax=None,
show_mask=True, show_bbox=True,
colors=None, captions=None):
"""
boxes: [num_instance, (y1, x1, y2, x2, class_id)] in image coordinates.
masks: [height, width, num_instances]
class_ids: [num_instances]
class_names: list of class names of the dataset
scores: (optional) confidence scores for each box
title: (optional) Figure title
show_mask, show_bbox: To show masks and bounding boxes or not
figsize: (optional) the size of the image
colors: (optional) An array or colors to use with each object
captions: (optional) A list of strings to use as captions for each object
"""
# Number of instances
N = boxes.shape[0]
if not N:
print("\n*** No instances to display *** \n")
else:
assert boxes.shape[0] == masks.shape[-1] == class_ids.shape[0]
# If no axis is passed, create one and automatically call show()
auto_show = False
if not ax:
_, ax = plt.subplots(1, figsize=figsize)
auto_show = True
# Generate random colors
colors = colors or random_colors(N)
# Show area outside image boundaries.
height, width = image.shape[:2]
ax.set_ylim(height + 10, -10)
ax.set_xlim(-10, width + 10)
ax.axis('off')
ax.set_title(title)
masked_image = image.astype(np.uint32).copy()
for i in range(N):
color = colors[i]
# Bounding box
if not np.any(boxes[i]):
# Skip this instance. Has no bbox. Likely lost in image cropping.
continue
y1, x1, y2, x2 = boxes[i]
if show_bbox:
p = patches.Rectangle((x1, y1), x2 - x1, y2 - y1, linewidth=2,
alpha=0.7, linestyle="dashed",
edgecolor=color, facecolor='none')
ax.add_patch(p)
# Label
if not captions:
class_id = class_ids[i]
score = scores[i] if scores is not None else None
label = class_names[class_id]
x = random.randint(x1, (x1 + x2) // 2)
caption = "{} {:.3f}".format(label, score) if score else label
else:
caption = captions[i]
ax.text(x1, y1 + 8, caption,
color='w', size=11, backgroundcolor="none")
# Mask
mask = masks[:, :, i]
if show_mask:
masked_image = apply_mask(masked_image, mask, color)
# Mask Polygon
# Pad to ensure proper polygons for masks that touch image edges.
padded_mask = np.zeros(
(mask.shape[0] + 2, mask.shape[1] + 2), dtype=np.uint8)
padded_mask[1:-1, 1:-1] = mask
contours = find_contours(padded_mask, 0.5)
for verts in contours:
# Subtract the padding and flip (y, x) to (x, y)
verts = np.fliplr(verts) - 1
p = Polygon(verts, facecolor="none", edgecolor=color)
ax.add_patch(p)
ax.imshow(masked_image.astype(np.uint8))
if auto_show:
plt.show()
|
[
"def",
"display_instances",
"(",
"image",
",",
"boxes",
",",
"masks",
",",
"class_ids",
",",
"class_names",
",",
"scores",
"=",
"None",
",",
"title",
"=",
"\"\"",
",",
"figsize",
"=",
"(",
"16",
",",
"16",
")",
",",
"ax",
"=",
"None",
",",
"show_mask",
"=",
"True",
",",
"show_bbox",
"=",
"True",
",",
"colors",
"=",
"None",
",",
"captions",
"=",
"None",
")",
":",
"# Number of instances",
"N",
"=",
"boxes",
".",
"shape",
"[",
"0",
"]",
"if",
"not",
"N",
":",
"print",
"(",
"\"\\n*** No instances to display *** \\n\"",
")",
"else",
":",
"assert",
"boxes",
".",
"shape",
"[",
"0",
"]",
"==",
"masks",
".",
"shape",
"[",
"-",
"1",
"]",
"==",
"class_ids",
".",
"shape",
"[",
"0",
"]",
"# If no axis is passed, create one and automatically call show()",
"auto_show",
"=",
"False",
"if",
"not",
"ax",
":",
"_",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"1",
",",
"figsize",
"=",
"figsize",
")",
"auto_show",
"=",
"True",
"# Generate random colors",
"colors",
"=",
"colors",
"or",
"random_colors",
"(",
"N",
")",
"# Show area outside image boundaries.",
"height",
",",
"width",
"=",
"image",
".",
"shape",
"[",
":",
"2",
"]",
"ax",
".",
"set_ylim",
"(",
"height",
"+",
"10",
",",
"-",
"10",
")",
"ax",
".",
"set_xlim",
"(",
"-",
"10",
",",
"width",
"+",
"10",
")",
"ax",
".",
"axis",
"(",
"'off'",
")",
"ax",
".",
"set_title",
"(",
"title",
")",
"masked_image",
"=",
"image",
".",
"astype",
"(",
"np",
".",
"uint32",
")",
".",
"copy",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"N",
")",
":",
"color",
"=",
"colors",
"[",
"i",
"]",
"# Bounding box",
"if",
"not",
"np",
".",
"any",
"(",
"boxes",
"[",
"i",
"]",
")",
":",
"# Skip this instance. Has no bbox. Likely lost in image cropping.",
"continue",
"y1",
",",
"x1",
",",
"y2",
",",
"x2",
"=",
"boxes",
"[",
"i",
"]",
"if",
"show_bbox",
":",
"p",
"=",
"patches",
".",
"Rectangle",
"(",
"(",
"x1",
",",
"y1",
")",
",",
"x2",
"-",
"x1",
",",
"y2",
"-",
"y1",
",",
"linewidth",
"=",
"2",
",",
"alpha",
"=",
"0.7",
",",
"linestyle",
"=",
"\"dashed\"",
",",
"edgecolor",
"=",
"color",
",",
"facecolor",
"=",
"'none'",
")",
"ax",
".",
"add_patch",
"(",
"p",
")",
"# Label",
"if",
"not",
"captions",
":",
"class_id",
"=",
"class_ids",
"[",
"i",
"]",
"score",
"=",
"scores",
"[",
"i",
"]",
"if",
"scores",
"is",
"not",
"None",
"else",
"None",
"label",
"=",
"class_names",
"[",
"class_id",
"]",
"x",
"=",
"random",
".",
"randint",
"(",
"x1",
",",
"(",
"x1",
"+",
"x2",
")",
"//",
"2",
")",
"caption",
"=",
"\"{} {:.3f}\"",
".",
"format",
"(",
"label",
",",
"score",
")",
"if",
"score",
"else",
"label",
"else",
":",
"caption",
"=",
"captions",
"[",
"i",
"]",
"ax",
".",
"text",
"(",
"x1",
",",
"y1",
"+",
"8",
",",
"caption",
",",
"color",
"=",
"'w'",
",",
"size",
"=",
"11",
",",
"backgroundcolor",
"=",
"\"none\"",
")",
"# Mask",
"mask",
"=",
"masks",
"[",
":",
",",
":",
",",
"i",
"]",
"if",
"show_mask",
":",
"masked_image",
"=",
"apply_mask",
"(",
"masked_image",
",",
"mask",
",",
"color",
")",
"# Mask Polygon",
"# Pad to ensure proper polygons for masks that touch image edges.",
"padded_mask",
"=",
"np",
".",
"zeros",
"(",
"(",
"mask",
".",
"shape",
"[",
"0",
"]",
"+",
"2",
",",
"mask",
".",
"shape",
"[",
"1",
"]",
"+",
"2",
")",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
"padded_mask",
"[",
"1",
":",
"-",
"1",
",",
"1",
":",
"-",
"1",
"]",
"=",
"mask",
"contours",
"=",
"find_contours",
"(",
"padded_mask",
",",
"0.5",
")",
"for",
"verts",
"in",
"contours",
":",
"# Subtract the padding and flip (y, x) to (x, y)",
"verts",
"=",
"np",
".",
"fliplr",
"(",
"verts",
")",
"-",
"1",
"p",
"=",
"Polygon",
"(",
"verts",
",",
"facecolor",
"=",
"\"none\"",
",",
"edgecolor",
"=",
"color",
")",
"ax",
".",
"add_patch",
"(",
"p",
")",
"ax",
".",
"imshow",
"(",
"masked_image",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
")",
"if",
"auto_show",
":",
"plt",
".",
"show",
"(",
")"
] |
https://github.com/huuuuusy/Mask-RCNN-Shiny/blob/b59944ae08fda8dfc19d27a22acd59f94d8beb4f/mrcnn/visualize.py#L84-L169
|
||
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/core/function.py
|
python
|
FunctionClass.__repr__
|
(cls)
|
return cls.__name__
|
[] |
def __repr__(cls):
return cls.__name__
|
[
"def",
"__repr__",
"(",
"cls",
")",
":",
"return",
"cls",
".",
"__name__"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/core/function.py#L170-L171
|
|||
pyparallel/pyparallel
|
11e8c6072d48c8f13641925d17b147bf36ee0ba3
|
Lib/site-packages/Cython-0.23.4-py3.3-win-amd64.egg/Cython/Compiler/Optimize.py
|
python
|
EarlyReplaceBuiltinCalls._handle_simple_function_all
|
(self, node, pos_args)
|
return self._transform_any_all(node, pos_args, False)
|
Transform
_result = all(x for L in LL for x in L)
into
for L in LL:
for x in L:
if not x:
_result = False
break
else:
continue
break
else:
_result = True
|
Transform
|
[
"Transform"
] |
def _handle_simple_function_all(self, node, pos_args):
"""Transform
_result = all(x for L in LL for x in L)
into
for L in LL:
for x in L:
if not x:
_result = False
break
else:
continue
break
else:
_result = True
"""
return self._transform_any_all(node, pos_args, False)
|
[
"def",
"_handle_simple_function_all",
"(",
"self",
",",
"node",
",",
"pos_args",
")",
":",
"return",
"self",
".",
"_transform_any_all",
"(",
"node",
",",
"pos_args",
",",
"False",
")"
] |
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/site-packages/Cython-0.23.4-py3.3-win-amd64.egg/Cython/Compiler/Optimize.py#L1503-L1521
|
|
catalyst-cooperative/pudl
|
40d176313e60dfa9d2481f63842ed23f08f1ad5f
|
src/pudl/analysis/state_demand.py
|
python
|
melt_ferc714_hourly_demand_matrix
|
(
df: pd.DataFrame, tz: pd.DataFrame
)
|
return df
|
Melt FERC 714 hourly demand matrix to long format.
Args:
df: FERC 714 hourly demand matrix,
as described in :func:`load_ferc714_hourly_demand_matrix`.
tz: FERC 714 respondent time zones,
as described in :func:`load_ferc714_hourly_demand_matrix`.
Returns:
Long-format hourly demand with columns
`respondent_id_ferc714`, report `year` (int), `utc_datetime`, and `demand_mwh`.
|
Melt FERC 714 hourly demand matrix to long format.
|
[
"Melt",
"FERC",
"714",
"hourly",
"demand",
"matrix",
"to",
"long",
"format",
"."
] |
def melt_ferc714_hourly_demand_matrix(
df: pd.DataFrame, tz: pd.DataFrame
) -> pd.DataFrame:
"""
Melt FERC 714 hourly demand matrix to long format.
Args:
df: FERC 714 hourly demand matrix,
as described in :func:`load_ferc714_hourly_demand_matrix`.
tz: FERC 714 respondent time zones,
as described in :func:`load_ferc714_hourly_demand_matrix`.
Returns:
Long-format hourly demand with columns
`respondent_id_ferc714`, report `year` (int), `utc_datetime`, and `demand_mwh`.
"""
# Melt demand matrix to long format
df = df.melt(value_name='demand_mwh', ignore_index=False)
df.reset_index(inplace=True)
# Convert local times to UTC
df['year'] = df['datetime'].dt.year
df = df.merge(tz, on=['respondent_id_ferc714', 'year'])
df['utc_datetime'] = local_to_utc(df['datetime'], df['utc_offset'])
df.drop(columns=['utc_offset', 'datetime'], inplace=True)
return df
|
[
"def",
"melt_ferc714_hourly_demand_matrix",
"(",
"df",
":",
"pd",
".",
"DataFrame",
",",
"tz",
":",
"pd",
".",
"DataFrame",
")",
"->",
"pd",
".",
"DataFrame",
":",
"# Melt demand matrix to long format",
"df",
"=",
"df",
".",
"melt",
"(",
"value_name",
"=",
"'demand_mwh'",
",",
"ignore_index",
"=",
"False",
")",
"df",
".",
"reset_index",
"(",
"inplace",
"=",
"True",
")",
"# Convert local times to UTC",
"df",
"[",
"'year'",
"]",
"=",
"df",
"[",
"'datetime'",
"]",
".",
"dt",
".",
"year",
"df",
"=",
"df",
".",
"merge",
"(",
"tz",
",",
"on",
"=",
"[",
"'respondent_id_ferc714'",
",",
"'year'",
"]",
")",
"df",
"[",
"'utc_datetime'",
"]",
"=",
"local_to_utc",
"(",
"df",
"[",
"'datetime'",
"]",
",",
"df",
"[",
"'utc_offset'",
"]",
")",
"df",
".",
"drop",
"(",
"columns",
"=",
"[",
"'utc_offset'",
",",
"'datetime'",
"]",
",",
"inplace",
"=",
"True",
")",
"return",
"df"
] |
https://github.com/catalyst-cooperative/pudl/blob/40d176313e60dfa9d2481f63842ed23f08f1ad5f/src/pudl/analysis/state_demand.py#L464-L488
|
|
zhl2008/awd-platform
|
0416b31abea29743387b10b3914581fbe8e7da5e
|
web_flaskbb/Python-2.7.9/Doc/tools/rstlint.py
|
python
|
checker
|
(*suffixes, **kwds)
|
return deco
|
Decorator to register a function as a checker.
|
Decorator to register a function as a checker.
|
[
"Decorator",
"to",
"register",
"a",
"function",
"as",
"a",
"checker",
"."
] |
def checker(*suffixes, **kwds):
"""Decorator to register a function as a checker."""
def deco(func):
for suffix in suffixes:
checkers.setdefault(suffix, []).append(func)
for prop in checker_props:
setattr(func, prop, kwds.get(prop, checker_props[prop]))
return func
return deco
|
[
"def",
"checker",
"(",
"*",
"suffixes",
",",
"*",
"*",
"kwds",
")",
":",
"def",
"deco",
"(",
"func",
")",
":",
"for",
"suffix",
"in",
"suffixes",
":",
"checkers",
".",
"setdefault",
"(",
"suffix",
",",
"[",
"]",
")",
".",
"append",
"(",
"func",
")",
"for",
"prop",
"in",
"checker_props",
":",
"setattr",
"(",
"func",
",",
"prop",
",",
"kwds",
".",
"get",
"(",
"prop",
",",
"checker_props",
"[",
"prop",
"]",
")",
")",
"return",
"func",
"return",
"deco"
] |
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/Python-2.7.9/Doc/tools/rstlint.py#L54-L62
|
|
TencentCloud/tencentcloud-sdk-python
|
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
|
tencentcloud/vpc/v20170312/models.py
|
python
|
ModifyPrivateIpAddressesAttributeResponse.__init__
|
(self)
|
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
|
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
|
[
"r",
":",
"param",
"RequestId",
":",
"唯一请求",
"ID,每次请求都会返回。定位问题时需要提供该次请求的",
"RequestId。",
":",
"type",
"RequestId",
":",
"str"
] |
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"RequestId",
"=",
"None"
] |
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/vpc/v20170312/models.py#L15311-L15316
|
||
MaybeShewill-CV/attentive-gan-derainnet
|
4ec79993cf3d757741c9c88c1036d87bd46e982d
|
attentive_gan_model/cnn_basenet.py
|
python
|
CNNBaseModel.dropout
|
(inputdata, keep_prob, noise_shape=None, name=None)
|
return tf.nn.dropout(inputdata, keep_prob=keep_prob, noise_shape=noise_shape, name=name)
|
:param name:
:param inputdata:
:param keep_prob:
:param noise_shape:
:return:
|
[] |
def dropout(inputdata, keep_prob, noise_shape=None, name=None):
"""
:param name:
:param inputdata:
:param keep_prob:
:param noise_shape:
:return:
"""
return tf.nn.dropout(inputdata, keep_prob=keep_prob, noise_shape=noise_shape, name=name)
|
[
"def",
"dropout",
"(",
"inputdata",
",",
"keep_prob",
",",
"noise_shape",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"return",
"tf",
".",
"nn",
".",
"dropout",
"(",
"inputdata",
",",
"keep_prob",
"=",
"keep_prob",
",",
"noise_shape",
"=",
"noise_shape",
",",
"name",
"=",
"name",
")"
] |
https://github.com/MaybeShewill-CV/attentive-gan-derainnet/blob/4ec79993cf3d757741c9c88c1036d87bd46e982d/attentive_gan_model/cnn_basenet.py#L264-L273
|
||
mrlesmithjr/Ansible
|
d44f0dc0d942bdf3bf7334b307e6048f0ee16e36
|
roles/ansible-nginx-load-balancer/library/openssl_csr.py
|
python
|
CertificateSigningRequest.dump
|
(self)
|
return result
|
Serialize the object into a dictionary.
|
Serialize the object into a dictionary.
|
[
"Serialize",
"the",
"object",
"into",
"a",
"dictionary",
"."
] |
def dump(self):
'''Serialize the object into a dictionary.'''
result = {
'csr': self.path,
'subject': self.subject,
'subjectAltName': self.subjectAltName,
'changed': self.changed
}
return result
|
[
"def",
"dump",
"(",
"self",
")",
":",
"result",
"=",
"{",
"'csr'",
":",
"self",
".",
"path",
",",
"'subject'",
":",
"self",
".",
"subject",
",",
"'subjectAltName'",
":",
"self",
".",
"subjectAltName",
",",
"'changed'",
":",
"self",
".",
"changed",
"}",
"return",
"result"
] |
https://github.com/mrlesmithjr/Ansible/blob/d44f0dc0d942bdf3bf7334b307e6048f0ee16e36/roles/ansible-nginx-load-balancer/library/openssl_csr.py#L251-L261
|
|
twilio/twilio-python
|
6e1e811ea57a1edfadd5161ace87397c563f6915
|
twilio/rest/studio/v2/flow/execution/__init__.py
|
python
|
ExecutionContext.__repr__
|
(self)
|
return '<Twilio.Studio.V2.ExecutionContext {}>'.format(context)
|
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
|
Provide a friendly representation
|
[
"Provide",
"a",
"friendly",
"representation"
] |
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Studio.V2.ExecutionContext {}>'.format(context)
|
[
"def",
"__repr__",
"(",
"self",
")",
":",
"context",
"=",
"' '",
".",
"join",
"(",
"'{}={}'",
".",
"format",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"self",
".",
"_solution",
".",
"items",
"(",
")",
")",
"return",
"'<Twilio.Studio.V2.ExecutionContext {}>'",
".",
"format",
"(",
"context",
")"
] |
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/studio/v2/flow/execution/__init__.py#L327-L335
|
|
rucio/rucio
|
6d0d358e04f5431f0b9a98ae40f31af0ddff4833
|
lib/rucio/rse/protocols/storm.py
|
python
|
Default.delete
|
(self, pfn)
|
Deletes a file from the connected RSE.
:param pfn Physical file name
:raises ServiceUnavailable, SourceNotFound
|
Deletes a file from the connected RSE.
|
[
"Deletes",
"a",
"file",
"from",
"the",
"connected",
"RSE",
"."
] |
def delete(self, pfn):
""" Deletes a file from the connected RSE.
:param pfn Physical file name
:raises ServiceUnavailable, SourceNotFound
"""
raise NotImplementedError
|
[
"def",
"delete",
"(",
"self",
",",
"pfn",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/rucio/rucio/blob/6d0d358e04f5431f0b9a98ae40f31af0ddff4833/lib/rucio/rse/protocols/storm.py#L198-L205
|
||
holoviz/panel
|
5e25cb09447d8edf0b316f130ee1318a2aeb880f
|
panel/io/state.py
|
python
|
_state.log
|
(self, msg, level='info')
|
Logs user messages to the Panel logger.
Arguments
---------
msg: str
Log message
level: int or str
Log level as a string, i.e. 'debug', 'info', 'warning' or 'error'.
|
Logs user messages to the Panel logger.
|
[
"Logs",
"user",
"messages",
"to",
"the",
"Panel",
"logger",
"."
] |
def log(self, msg, level='info'):
"""
Logs user messages to the Panel logger.
Arguments
---------
msg: str
Log message
level: int or str
Log level as a string, i.e. 'debug', 'info', 'warning' or 'error'.
"""
args = ()
if self.curdoc:
args = (id(self.curdoc),)
msg = LOG_USER_MSG.format(msg=msg)
getattr(_state_logger, level.lower())(msg, *args)
|
[
"def",
"log",
"(",
"self",
",",
"msg",
",",
"level",
"=",
"'info'",
")",
":",
"args",
"=",
"(",
")",
"if",
"self",
".",
"curdoc",
":",
"args",
"=",
"(",
"id",
"(",
"self",
".",
"curdoc",
")",
",",
")",
"msg",
"=",
"LOG_USER_MSG",
".",
"format",
"(",
"msg",
"=",
"msg",
")",
"getattr",
"(",
"_state_logger",
",",
"level",
".",
"lower",
"(",
")",
")",
"(",
"msg",
",",
"*",
"args",
")"
] |
https://github.com/holoviz/panel/blob/5e25cb09447d8edf0b316f130ee1318a2aeb880f/panel/io/state.py#L328-L343
|
||
veusz/veusz
|
5a1e2af5f24df0eb2a2842be51f2997c4999c7fb
|
veusz/widgets/polygon.py
|
python
|
Polygon.addSettings
|
(klass, s)
|
Construct list of settings.
|
Construct list of settings.
|
[
"Construct",
"list",
"of",
"settings",
"."
] |
def addSettings(klass, s):
"""Construct list of settings."""
plotters.FreePlotter.addSettings(s)
s.add( setting.Line(
'Line',
descr=_('Line around polygon'),
usertext=_('Line')),
pixmap='settings_plotline' )
s.add( setting.BrushExtended(
'Fill',
descr=_('Fill within polygon'),
usertext=_('Fill')),
pixmap='settings_plotfillbelow' )
|
[
"def",
"addSettings",
"(",
"klass",
",",
"s",
")",
":",
"plotters",
".",
"FreePlotter",
".",
"addSettings",
"(",
"s",
")",
"s",
".",
"add",
"(",
"setting",
".",
"Line",
"(",
"'Line'",
",",
"descr",
"=",
"_",
"(",
"'Line around polygon'",
")",
",",
"usertext",
"=",
"_",
"(",
"'Line'",
")",
")",
",",
"pixmap",
"=",
"'settings_plotline'",
")",
"s",
".",
"add",
"(",
"setting",
".",
"BrushExtended",
"(",
"'Fill'",
",",
"descr",
"=",
"_",
"(",
"'Fill within polygon'",
")",
",",
"usertext",
"=",
"_",
"(",
"'Fill'",
")",
")",
",",
"pixmap",
"=",
"'settings_plotfillbelow'",
")"
] |
https://github.com/veusz/veusz/blob/5a1e2af5f24df0eb2a2842be51f2997c4999c7fb/veusz/widgets/polygon.py#L43-L56
|
||
astanway/crucible
|
c79a3c25f10465387af69d758973825fe3f244f5
|
src/crucible.py
|
python
|
Crucible.run
|
(self)
|
Called when the process intializes.
|
Called when the process intializes.
|
[
"Called",
"when",
"the",
"process",
"intializes",
"."
] |
def run(self):
"""
Called when the process intializes.
"""
__data__ = abspath(join(dirname( __file__ ), '..', 'data'))
files = [ f for f in listdir(__data__)
if isfile(join(__data__,f)) ]
# Spawn processes
pids = []
for index, ts_name in enumerate(files):
if ts_name == ".DS_Store":
continue
__data__ = abspath(join(dirname( __file__ ), '..', 'data'))
with open(join(__data__ + "/" + ts_name), 'r') as f:
timeseries = json.loads(f.read())
p = Process(target=run_algorithms, args=(timeseries, ts_name))
pids.append(p)
p.start()
# Send wait signal to zombie processes
for p in pids:
p.join()
|
[
"def",
"run",
"(",
"self",
")",
":",
"__data__",
"=",
"abspath",
"(",
"join",
"(",
"dirname",
"(",
"__file__",
")",
",",
"'..'",
",",
"'data'",
")",
")",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"listdir",
"(",
"__data__",
")",
"if",
"isfile",
"(",
"join",
"(",
"__data__",
",",
"f",
")",
")",
"]",
"# Spawn processes",
"pids",
"=",
"[",
"]",
"for",
"index",
",",
"ts_name",
"in",
"enumerate",
"(",
"files",
")",
":",
"if",
"ts_name",
"==",
"\".DS_Store\"",
":",
"continue",
"__data__",
"=",
"abspath",
"(",
"join",
"(",
"dirname",
"(",
"__file__",
")",
",",
"'..'",
",",
"'data'",
")",
")",
"with",
"open",
"(",
"join",
"(",
"__data__",
"+",
"\"/\"",
"+",
"ts_name",
")",
",",
"'r'",
")",
"as",
"f",
":",
"timeseries",
"=",
"json",
".",
"loads",
"(",
"f",
".",
"read",
"(",
")",
")",
"p",
"=",
"Process",
"(",
"target",
"=",
"run_algorithms",
",",
"args",
"=",
"(",
"timeseries",
",",
"ts_name",
")",
")",
"pids",
".",
"append",
"(",
"p",
")",
"p",
".",
"start",
"(",
")",
"# Send wait signal to zombie processes",
"for",
"p",
"in",
"pids",
":",
"p",
".",
"join",
"(",
")"
] |
https://github.com/astanway/crucible/blob/c79a3c25f10465387af69d758973825fe3f244f5/src/crucible.py#L17-L40
|
||
misterch0c/shadowbroker
|
e3a069bea47a2c1009697941ac214adc6f90aa8d
|
windows/Resources/Python/Core/Lib/lib-tk/Tkinter.py
|
python
|
Spinbox.__init__
|
(self, master=None, cnf={}, **kw)
|
Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
|
Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
|
[
"Construct",
"a",
"spinbox",
"widget",
"with",
"the",
"parent",
"MASTER",
".",
"STANDARD",
"OPTIONS",
"activebackground",
"background",
"borderwidth",
"cursor",
"exportselection",
"font",
"foreground",
"highlightbackground",
"highlightcolor",
"highlightthickness",
"insertbackground",
"insertborderwidth",
"insertofftime",
"insertontime",
"insertwidth",
"justify",
"relief",
"repeatdelay",
"repeatinterval",
"selectbackground",
"selectborderwidth",
"selectforeground",
"takefocus",
"textvariable",
"xscrollcommand",
".",
"WIDGET",
"-",
"SPECIFIC",
"OPTIONS",
"buttonbackground",
"buttoncursor",
"buttondownrelief",
"buttonuprelief",
"command",
"disabledbackground",
"disabledforeground",
"format",
"from",
"invalidcommand",
"increment",
"readonlybackground",
"state",
"to",
"validate",
"validatecommand",
"values",
"width",
"wrap"
] |
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
|
[
"def",
"__init__",
"(",
"self",
",",
"master",
"=",
"None",
",",
"cnf",
"=",
"{",
"}",
",",
"*",
"*",
"kw",
")",
":",
"Widget",
".",
"__init__",
"(",
"self",
",",
"master",
",",
"'spinbox'",
",",
"cnf",
",",
"kw",
")"
] |
https://github.com/misterch0c/shadowbroker/blob/e3a069bea47a2c1009697941ac214adc6f90aa8d/windows/Resources/Python/Core/Lib/lib-tk/Tkinter.py#L3761-L3788
|
||
qutip/qutip
|
52d01da181a21b810c3407812c670f35fdc647e8
|
qutip/bloch.py
|
python
|
Bloch.render
|
(self)
|
Render the Bloch sphere and its data sets in on given figure and axes.
|
Render the Bloch sphere and its data sets in on given figure and axes.
|
[
"Render",
"the",
"Bloch",
"sphere",
"and",
"its",
"data",
"sets",
"in",
"on",
"given",
"figure",
"and",
"axes",
"."
] |
def render(self):
"""
Render the Bloch sphere and its data sets in on given figure and axes.
"""
if not self._ext_fig and not self._is_inline_backend():
# If no external figure was supplied, we check to see if the
# figure we created in a previous call to .render() has been
# closed, and re-create if has been. This has the unfortunate
# side effect of losing any modifications made to the axes or
# figure, but the alternative is to crash the matplotlib backend.
#
# The inline backend used by, e.g. jupyter notebooks, is happy to
# use closed figures so we leave those figures intact.
if (
self.fig is not None and
not plt.fignum_exists(self.fig.number)
):
self.fig = None
self.axes = None
if self.fig is None:
self.fig = plt.figure(figsize=self.figsize)
if self._is_inline_backend():
# We immediately close the inline figure do avoid displaying
# the figure twice when .show() calls display.
plt.close(self.fig)
if self.axes is None:
self.axes = _axes3D(self.fig, azim=self.view[0], elev=self.view[1])
# Clearing the axes is horrifically slow and loses a lot of the
# axes state, but matplotlib doesn't seem to provide a better way
# to redraw Axes3D. :/
self.axes.clear()
self.axes.grid(False)
if self.background:
self.axes.set_xlim3d(-1.3, 1.3)
self.axes.set_ylim3d(-1.3, 1.3)
self.axes.set_zlim3d(-1.3, 1.3)
else:
self.axes.set_axis_off()
self.axes.set_xlim3d(-0.7, 0.7)
self.axes.set_ylim3d(-0.7, 0.7)
self.axes.set_zlim3d(-0.7, 0.7)
# Manually set aspect ratio to fit a square bounding box.
# Matplotlib did this stretching for < 3.3.0, but not above.
if parse_version(matplotlib.__version__) >= parse_version('3.3'):
self.axes.set_box_aspect((1, 1, 1))
if not self.background:
self.plot_axes()
self.plot_back()
self.plot_points()
self.plot_vectors()
self.plot_front()
self.plot_axes_labels()
self.plot_annotations()
# Trigger an update of the Bloch sphere if it is already shown:
self.fig.canvas.draw()
|
[
"def",
"render",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_ext_fig",
"and",
"not",
"self",
".",
"_is_inline_backend",
"(",
")",
":",
"# If no external figure was supplied, we check to see if the",
"# figure we created in a previous call to .render() has been",
"# closed, and re-create if has been. This has the unfortunate",
"# side effect of losing any modifications made to the axes or",
"# figure, but the alternative is to crash the matplotlib backend.",
"#",
"# The inline backend used by, e.g. jupyter notebooks, is happy to",
"# use closed figures so we leave those figures intact.",
"if",
"(",
"self",
".",
"fig",
"is",
"not",
"None",
"and",
"not",
"plt",
".",
"fignum_exists",
"(",
"self",
".",
"fig",
".",
"number",
")",
")",
":",
"self",
".",
"fig",
"=",
"None",
"self",
".",
"axes",
"=",
"None",
"if",
"self",
".",
"fig",
"is",
"None",
":",
"self",
".",
"fig",
"=",
"plt",
".",
"figure",
"(",
"figsize",
"=",
"self",
".",
"figsize",
")",
"if",
"self",
".",
"_is_inline_backend",
"(",
")",
":",
"# We immediately close the inline figure do avoid displaying",
"# the figure twice when .show() calls display.",
"plt",
".",
"close",
"(",
"self",
".",
"fig",
")",
"if",
"self",
".",
"axes",
"is",
"None",
":",
"self",
".",
"axes",
"=",
"_axes3D",
"(",
"self",
".",
"fig",
",",
"azim",
"=",
"self",
".",
"view",
"[",
"0",
"]",
",",
"elev",
"=",
"self",
".",
"view",
"[",
"1",
"]",
")",
"# Clearing the axes is horrifically slow and loses a lot of the",
"# axes state, but matplotlib doesn't seem to provide a better way",
"# to redraw Axes3D. :/",
"self",
".",
"axes",
".",
"clear",
"(",
")",
"self",
".",
"axes",
".",
"grid",
"(",
"False",
")",
"if",
"self",
".",
"background",
":",
"self",
".",
"axes",
".",
"set_xlim3d",
"(",
"-",
"1.3",
",",
"1.3",
")",
"self",
".",
"axes",
".",
"set_ylim3d",
"(",
"-",
"1.3",
",",
"1.3",
")",
"self",
".",
"axes",
".",
"set_zlim3d",
"(",
"-",
"1.3",
",",
"1.3",
")",
"else",
":",
"self",
".",
"axes",
".",
"set_axis_off",
"(",
")",
"self",
".",
"axes",
".",
"set_xlim3d",
"(",
"-",
"0.7",
",",
"0.7",
")",
"self",
".",
"axes",
".",
"set_ylim3d",
"(",
"-",
"0.7",
",",
"0.7",
")",
"self",
".",
"axes",
".",
"set_zlim3d",
"(",
"-",
"0.7",
",",
"0.7",
")",
"# Manually set aspect ratio to fit a square bounding box.",
"# Matplotlib did this stretching for < 3.3.0, but not above.",
"if",
"parse_version",
"(",
"matplotlib",
".",
"__version__",
")",
">=",
"parse_version",
"(",
"'3.3'",
")",
":",
"self",
".",
"axes",
".",
"set_box_aspect",
"(",
"(",
"1",
",",
"1",
",",
"1",
")",
")",
"if",
"not",
"self",
".",
"background",
":",
"self",
".",
"plot_axes",
"(",
")",
"self",
".",
"plot_back",
"(",
")",
"self",
".",
"plot_points",
"(",
")",
"self",
".",
"plot_vectors",
"(",
")",
"self",
".",
"plot_front",
"(",
")",
"self",
".",
"plot_axes_labels",
"(",
")",
"self",
".",
"plot_annotations",
"(",
")",
"# Trigger an update of the Bloch sphere if it is already shown:",
"self",
".",
"fig",
".",
"canvas",
".",
"draw",
"(",
")"
] |
https://github.com/qutip/qutip/blob/52d01da181a21b810c3407812c670f35fdc647e8/qutip/bloch.py#L414-L472
|
||
sphinx-doc/sphinx
|
e79681c76843c1339863b365747079b2d662d0c1
|
sphinx/util/docutils.py
|
python
|
is_directive_registered
|
(name: str)
|
return name in directives._directives
|
Check the *name* directive is already registered.
|
Check the *name* directive is already registered.
|
[
"Check",
"the",
"*",
"name",
"*",
"directive",
"is",
"already",
"registered",
"."
] |
def is_directive_registered(name: str) -> bool:
"""Check the *name* directive is already registered."""
return name in directives._directives
|
[
"def",
"is_directive_registered",
"(",
"name",
":",
"str",
")",
"->",
"bool",
":",
"return",
"name",
"in",
"directives",
".",
"_directives"
] |
https://github.com/sphinx-doc/sphinx/blob/e79681c76843c1339863b365747079b2d662d0c1/sphinx/util/docutils.py#L65-L67
|
|
AppScale/gts
|
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
|
AppServer/lib/django-1.5/django/contrib/auth/handlers/modwsgi.py
|
python
|
check_password
|
(environ, username, password)
|
Authenticates against Django's auth database
mod_wsgi docs specify None, True, False as return value depending
on whether the user exists and authenticates.
|
Authenticates against Django's auth database
|
[
"Authenticates",
"against",
"Django",
"s",
"auth",
"database"
] |
def check_password(environ, username, password):
"""
Authenticates against Django's auth database
mod_wsgi docs specify None, True, False as return value depending
on whether the user exists and authenticates.
"""
UserModel = auth.get_user_model()
# db connection state is managed similarly to the wsgi handler
# as mod_wsgi may call these functions outside of a request/response cycle
db.reset_queries()
try:
try:
user = UserModel._default_manager.get_by_natural_key(username)
except UserModel.DoesNotExist:
return None
if not user.is_active:
return None
return user.check_password(password)
finally:
db.close_connection()
|
[
"def",
"check_password",
"(",
"environ",
",",
"username",
",",
"password",
")",
":",
"UserModel",
"=",
"auth",
".",
"get_user_model",
"(",
")",
"# db connection state is managed similarly to the wsgi handler",
"# as mod_wsgi may call these functions outside of a request/response cycle",
"db",
".",
"reset_queries",
"(",
")",
"try",
":",
"try",
":",
"user",
"=",
"UserModel",
".",
"_default_manager",
".",
"get_by_natural_key",
"(",
"username",
")",
"except",
"UserModel",
".",
"DoesNotExist",
":",
"return",
"None",
"if",
"not",
"user",
".",
"is_active",
":",
"return",
"None",
"return",
"user",
".",
"check_password",
"(",
"password",
")",
"finally",
":",
"db",
".",
"close_connection",
"(",
")"
] |
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-1.5/django/contrib/auth/handlers/modwsgi.py#L6-L28
|
||
kubernetes-client/python
|
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
|
kubernetes/client/models/v1_csi_driver_spec.py
|
python
|
V1CSIDriverSpec.__ne__
|
(self, other)
|
return self.to_dict() != other.to_dict()
|
Returns true if both objects are not equal
|
Returns true if both objects are not equal
|
[
"Returns",
"true",
"if",
"both",
"objects",
"are",
"not",
"equal"
] |
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1CSIDriverSpec):
return True
return self.to_dict() != other.to_dict()
|
[
"def",
"__ne__",
"(",
"self",
",",
"other",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"V1CSIDriverSpec",
")",
":",
"return",
"True",
"return",
"self",
".",
"to_dict",
"(",
")",
"!=",
"other",
".",
"to_dict",
"(",
")"
] |
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/models/v1_csi_driver_spec.py#L285-L290
|
|
smart-mobile-software/gitstack
|
d9fee8f414f202143eb6e620529e8e5539a2af56
|
python/Lib/lib-tk/Tkinter.py
|
python
|
PanedWindow.panes
|
(self)
|
return self.tk.call(self._w, 'panes')
|
Returns an ordered list of the child panes.
|
Returns an ordered list of the child panes.
|
[
"Returns",
"an",
"ordered",
"list",
"of",
"the",
"child",
"panes",
"."
] |
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.call(self._w, 'panes')
|
[
"def",
"panes",
"(",
"self",
")",
":",
"return",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'panes'",
")"
] |
https://github.com/smart-mobile-software/gitstack/blob/d9fee8f414f202143eb6e620529e8e5539a2af56/python/Lib/lib-tk/Tkinter.py#L3679-L3681
|
|
phonopy/phonopy
|
816586d0ba8177482ecf40e52f20cbdee2260d51
|
phonopy/structure/tetrahedron_method.py
|
python
|
TetrahedronMethod._n_4
|
(self)
|
return 1.0
|
n4.
omega4 < omega
|
n4.
|
[
"n4",
"."
] |
def _n_4(self):
"""n4.
omega4 < omega
"""
return 1.0
|
[
"def",
"_n_4",
"(",
"self",
")",
":",
"return",
"1.0"
] |
https://github.com/phonopy/phonopy/blob/816586d0ba8177482ecf40e52f20cbdee2260d51/phonopy/structure/tetrahedron_method.py#L461-L467
|
|
eth-brownie/brownie
|
754bda9f0a294b2beb86453d5eca4ff769a877c8
|
brownie/network/contract.py
|
python
|
ContractContainer.get_verification_info
|
(self)
|
Return a dict with flattened source code for this contract
and further information needed for verification
|
Return a dict with flattened source code for this contract
and further information needed for verification
|
[
"Return",
"a",
"dict",
"with",
"flattened",
"source",
"code",
"for",
"this",
"contract",
"and",
"further",
"information",
"needed",
"for",
"verification"
] |
def get_verification_info(self) -> Dict:
"""
Return a dict with flattened source code for this contract
and further information needed for verification
"""
language = self._build["language"]
if language == "Vyper":
raise TypeError(
"Etherscan does not support API verification of source code "
"for vyper contracts. You need to verify the source manually"
)
elif language == "Solidity":
if self._flattener is None:
source_fp = (
Path(self._project._path)
.joinpath(self._build["sourcePath"])
.resolve()
.as_posix()
)
config = self._project._compiler_config
remaps = dict(
map(
lambda s: s.split("=", 1),
compiler._get_solc_remappings(config["solc"]["remappings"]),
)
)
libs = {lib.strip("_") for lib in re.findall("_{1,}[^_]*_{1,}", self.bytecode)}
compiler_settings = {
"evmVersion": self._build["compiler"]["evm_version"],
"optimizer": config["solc"]["optimizer"],
"libraries": {
Path(source_fp).name: {lib: self._project[lib][-1].address for lib in libs}
},
}
self._flattener = Flattener(source_fp, self._name, remaps, compiler_settings)
build_json = self._build
return {
"standard_json_input": self._flattener.standard_input_json,
"contract_name": build_json["contractName"],
"compiler_version": build_json["compiler"]["version"],
"optimizer_enabled": build_json["compiler"]["optimizer"]["enabled"],
"optimizer_runs": build_json["compiler"]["optimizer"]["runs"],
"license_identifier": self._flattener.license,
"bytecode_len": len(build_json["bytecode"]),
}
else:
raise TypeError(f"Unsupported language for source verification: {language}")
|
[
"def",
"get_verification_info",
"(",
"self",
")",
"->",
"Dict",
":",
"language",
"=",
"self",
".",
"_build",
"[",
"\"language\"",
"]",
"if",
"language",
"==",
"\"Vyper\"",
":",
"raise",
"TypeError",
"(",
"\"Etherscan does not support API verification of source code \"",
"\"for vyper contracts. You need to verify the source manually\"",
")",
"elif",
"language",
"==",
"\"Solidity\"",
":",
"if",
"self",
".",
"_flattener",
"is",
"None",
":",
"source_fp",
"=",
"(",
"Path",
"(",
"self",
".",
"_project",
".",
"_path",
")",
".",
"joinpath",
"(",
"self",
".",
"_build",
"[",
"\"sourcePath\"",
"]",
")",
".",
"resolve",
"(",
")",
".",
"as_posix",
"(",
")",
")",
"config",
"=",
"self",
".",
"_project",
".",
"_compiler_config",
"remaps",
"=",
"dict",
"(",
"map",
"(",
"lambda",
"s",
":",
"s",
".",
"split",
"(",
"\"=\"",
",",
"1",
")",
",",
"compiler",
".",
"_get_solc_remappings",
"(",
"config",
"[",
"\"solc\"",
"]",
"[",
"\"remappings\"",
"]",
")",
",",
")",
")",
"libs",
"=",
"{",
"lib",
".",
"strip",
"(",
"\"_\"",
")",
"for",
"lib",
"in",
"re",
".",
"findall",
"(",
"\"_{1,}[^_]*_{1,}\"",
",",
"self",
".",
"bytecode",
")",
"}",
"compiler_settings",
"=",
"{",
"\"evmVersion\"",
":",
"self",
".",
"_build",
"[",
"\"compiler\"",
"]",
"[",
"\"evm_version\"",
"]",
",",
"\"optimizer\"",
":",
"config",
"[",
"\"solc\"",
"]",
"[",
"\"optimizer\"",
"]",
",",
"\"libraries\"",
":",
"{",
"Path",
"(",
"source_fp",
")",
".",
"name",
":",
"{",
"lib",
":",
"self",
".",
"_project",
"[",
"lib",
"]",
"[",
"-",
"1",
"]",
".",
"address",
"for",
"lib",
"in",
"libs",
"}",
"}",
",",
"}",
"self",
".",
"_flattener",
"=",
"Flattener",
"(",
"source_fp",
",",
"self",
".",
"_name",
",",
"remaps",
",",
"compiler_settings",
")",
"build_json",
"=",
"self",
".",
"_build",
"return",
"{",
"\"standard_json_input\"",
":",
"self",
".",
"_flattener",
".",
"standard_input_json",
",",
"\"contract_name\"",
":",
"build_json",
"[",
"\"contractName\"",
"]",
",",
"\"compiler_version\"",
":",
"build_json",
"[",
"\"compiler\"",
"]",
"[",
"\"version\"",
"]",
",",
"\"optimizer_enabled\"",
":",
"build_json",
"[",
"\"compiler\"",
"]",
"[",
"\"optimizer\"",
"]",
"[",
"\"enabled\"",
"]",
",",
"\"optimizer_runs\"",
":",
"build_json",
"[",
"\"compiler\"",
"]",
"[",
"\"optimizer\"",
"]",
"[",
"\"runs\"",
"]",
",",
"\"license_identifier\"",
":",
"self",
".",
"_flattener",
".",
"license",
",",
"\"bytecode_len\"",
":",
"len",
"(",
"build_json",
"[",
"\"bytecode\"",
"]",
")",
",",
"}",
"else",
":",
"raise",
"TypeError",
"(",
"f\"Unsupported language for source verification: {language}\"",
")"
] |
https://github.com/eth-brownie/brownie/blob/754bda9f0a294b2beb86453d5eca4ff769a877c8/brownie/network/contract.py#L261-L309
|
||
tomplus/kubernetes_asyncio
|
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
|
kubernetes_asyncio/client/models/v1_pod_security_context.py
|
python
|
V1PodSecurityContext.windows_options
|
(self)
|
return self._windows_options
|
Gets the windows_options of this V1PodSecurityContext. # noqa: E501
:return: The windows_options of this V1PodSecurityContext. # noqa: E501
:rtype: V1WindowsSecurityContextOptions
|
Gets the windows_options of this V1PodSecurityContext. # noqa: E501
|
[
"Gets",
"the",
"windows_options",
"of",
"this",
"V1PodSecurityContext",
".",
"#",
"noqa",
":",
"E501"
] |
def windows_options(self):
"""Gets the windows_options of this V1PodSecurityContext. # noqa: E501
:return: The windows_options of this V1PodSecurityContext. # noqa: E501
:rtype: V1WindowsSecurityContextOptions
"""
return self._windows_options
|
[
"def",
"windows_options",
"(",
"self",
")",
":",
"return",
"self",
".",
"_windows_options"
] |
https://github.com/tomplus/kubernetes_asyncio/blob/f028cc793e3a2c519be6a52a49fb77ff0b014c9b/kubernetes_asyncio/client/models/v1_pod_security_context.py#L304-L311
|
|
OpenEndedGroup/Field
|
4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c
|
Contents/lib/python/poplib.py
|
python
|
POP3.list
|
(self, which=None)
|
return self._longcmd('LIST')
|
Request listing, return result.
Result without a message number argument is in form
['response', ['mesg_num octets', ...], octets].
Result when a message number argument is given is a
single response: the "scan listing" for that message.
|
Request listing, return result.
|
[
"Request",
"listing",
"return",
"result",
"."
] |
def list(self, which=None):
"""Request listing, return result.
Result without a message number argument is in form
['response', ['mesg_num octets', ...], octets].
Result when a message number argument is given is a
single response: the "scan listing" for that message.
"""
if which is not None:
return self._shortcmd('LIST %s' % which)
return self._longcmd('LIST')
|
[
"def",
"list",
"(",
"self",
",",
"which",
"=",
"None",
")",
":",
"if",
"which",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_shortcmd",
"(",
"'LIST %s'",
"%",
"which",
")",
"return",
"self",
".",
"_longcmd",
"(",
"'LIST'",
")"
] |
https://github.com/OpenEndedGroup/Field/blob/4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c/Contents/lib/python/poplib.py#L218-L229
|
|
adobe/antialiased-cnns
|
b27a34a26f3ab039113d44d83c54d0428598ac9c
|
antialiased_cnns/alexnet.py
|
python
|
alexnet
|
(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs)
|
return model
|
AlexNet model architecture from the
`"One weird trick..." <https://arxiv.org/abs/1404.5997>`_ paper.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
filter_size (int): [4] Antialiasing filter size
|
AlexNet model architecture from the
`"One weird trick..." <https://arxiv.org/abs/1404.5997>`_ paper.
|
[
"AlexNet",
"model",
"architecture",
"from",
"the",
"One",
"weird",
"trick",
"...",
"<https",
":",
"//",
"arxiv",
".",
"org",
"/",
"abs",
"/",
"1404",
".",
"5997",
">",
"_",
"paper",
"."
] |
def alexnet(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs):
"""AlexNet model architecture from the
`"One weird trick..." <https://arxiv.org/abs/1404.5997>`_ paper.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
filter_size (int): [4] Antialiasing filter size
"""
model = AlexNet(filter_size=filter_size, **kwargs)
if pretrained:
if(filter_size==4 and not _force_nonfinetuned):
model.load_state_dict(model_zoo.load_url(model_urls['alexnet_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict'])
else:
model.load_state_dict(model_zoo.load_url(model_urls['alexnet_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict'])
return model
|
[
"def",
"alexnet",
"(",
"pretrained",
"=",
"False",
",",
"filter_size",
"=",
"4",
",",
"_force_nonfinetuned",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"model",
"=",
"AlexNet",
"(",
"filter_size",
"=",
"filter_size",
",",
"*",
"*",
"kwargs",
")",
"if",
"pretrained",
":",
"if",
"(",
"filter_size",
"==",
"4",
"and",
"not",
"_force_nonfinetuned",
")",
":",
"model",
".",
"load_state_dict",
"(",
"model_zoo",
".",
"load_url",
"(",
"model_urls",
"[",
"'alexnet_lpf4_finetune'",
"]",
",",
"map_location",
"=",
"'cpu'",
",",
"check_hash",
"=",
"True",
")",
"[",
"'state_dict'",
"]",
")",
"else",
":",
"model",
".",
"load_state_dict",
"(",
"model_zoo",
".",
"load_url",
"(",
"model_urls",
"[",
"'alexnet_lpf%i'",
"%",
"filter_size",
"]",
",",
"map_location",
"=",
"'cpu'",
",",
"check_hash",
"=",
"True",
")",
"[",
"'state_dict'",
"]",
")",
"return",
"model"
] |
https://github.com/adobe/antialiased-cnns/blob/b27a34a26f3ab039113d44d83c54d0428598ac9c/antialiased_cnns/alexnet.py#L110-L124
|
|
JaniceWuo/MovieRecommend
|
4c86db64ca45598917d304f535413df3bc9fea65
|
movierecommend/venv1/Lib/site-packages/pytz/__init__.py
|
python
|
_FixedOffset.localize
|
(self, dt, is_dst=False)
|
return dt.replace(tzinfo=self)
|
Convert naive time to local time
|
Convert naive time to local time
|
[
"Convert",
"naive",
"time",
"to",
"local",
"time"
] |
def localize(self, dt, is_dst=False):
'''Convert naive time to local time'''
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self)
|
[
"def",
"localize",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"False",
")",
":",
"if",
"dt",
".",
"tzinfo",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"'Not naive datetime (tzinfo is already set)'",
")",
"return",
"dt",
".",
"replace",
"(",
"tzinfo",
"=",
"self",
")"
] |
https://github.com/JaniceWuo/MovieRecommend/blob/4c86db64ca45598917d304f535413df3bc9fea65/movierecommend/venv1/Lib/site-packages/pytz/__init__.py#L395-L399
|
|
haiwen/seahub
|
e92fcd44e3e46260597d8faa9347cb8222b8b10d
|
seahub/base/models.py
|
python
|
UserStarredFilesManager.add_starred_item
|
(self, email, repo_id, path, is_dir, org_id=-1)
|
return starred_item
|
[] |
def add_starred_item(self, email, repo_id, path, is_dir, org_id=-1):
starred_item = UserStarredFiles.objects.create(email=email,
repo_id=repo_id, path=path, is_dir=is_dir, org_id=org_id)
return starred_item
|
[
"def",
"add_starred_item",
"(",
"self",
",",
"email",
",",
"repo_id",
",",
"path",
",",
"is_dir",
",",
"org_id",
"=",
"-",
"1",
")",
":",
"starred_item",
"=",
"UserStarredFiles",
".",
"objects",
".",
"create",
"(",
"email",
"=",
"email",
",",
"repo_id",
"=",
"repo_id",
",",
"path",
"=",
"path",
",",
"is_dir",
"=",
"is_dir",
",",
"org_id",
"=",
"org_id",
")",
"return",
"starred_item"
] |
https://github.com/haiwen/seahub/blob/e92fcd44e3e46260597d8faa9347cb8222b8b10d/seahub/base/models.py#L147-L152
|
|||
feisuzhu/thbattle
|
ac0dee1b2d86de7664289cf432b157ef25427ba1
|
src/pyglet/image/__init__.py
|
python
|
create
|
(width, height, pattern=None)
|
return pattern.create_image(width, height)
|
Create an image optionally filled with the given pattern.
:note: You can make no assumptions about the return type; usually it will
be ImageData or CompressedImageData, but patterns are free to return
any subclass of AbstractImage.
:Parameters:
`width` : int
Width of image to create
`height` : int
Height of image to create
`pattern` : ImagePattern or None
Pattern to fill image with. If unspecified, the image will
initially be transparent.
:rtype: AbstractImage
|
Create an image optionally filled with the given pattern.
|
[
"Create",
"an",
"image",
"optionally",
"filled",
"with",
"the",
"given",
"pattern",
"."
] |
def create(width, height, pattern=None):
'''Create an image optionally filled with the given pattern.
:note: You can make no assumptions about the return type; usually it will
be ImageData or CompressedImageData, but patterns are free to return
any subclass of AbstractImage.
:Parameters:
`width` : int
Width of image to create
`height` : int
Height of image to create
`pattern` : ImagePattern or None
Pattern to fill image with. If unspecified, the image will
initially be transparent.
:rtype: AbstractImage
'''
if not pattern:
pattern = SolidColorImagePattern()
return pattern.create_image(width, height)
|
[
"def",
"create",
"(",
"width",
",",
"height",
",",
"pattern",
"=",
"None",
")",
":",
"if",
"not",
"pattern",
":",
"pattern",
"=",
"SolidColorImagePattern",
"(",
")",
"return",
"pattern",
".",
"create_image",
"(",
"width",
",",
"height",
")"
] |
https://github.com/feisuzhu/thbattle/blob/ac0dee1b2d86de7664289cf432b157ef25427ba1/src/pyglet/image/__init__.py#L208-L228
|
|
itsjohncs/superzippy
|
b62c3ef5d7e065b85e0024a9acf77b5d067650f9
|
superzippy/bootstrapper/zipsite.py
|
python
|
split_zip_path
|
(path)
|
return None, path
|
Takes a path that includes at most a single zip file as a directory and
splits the path between what's outside of the zip file and what's inside.
:param path: The path.
:returns: ``(first_path, second_part)``
>>> zipsite.split_zip_path("/tmp/testing/stuff.zip/hi/bar")
('/tmp/testing/stuff.zip', 'hi/bar')
>>> zipsite.split_zip_path("/tmp/testing/stuff.zip")
('/tmp/testing/stuff.zip', '')
>>> zipsite.split_zip_path("/tmp/testing/stuff.zip/")
('/tmp/testing/stuff.zip', '')
|
Takes a path that includes at most a single zip file as a directory and
splits the path between what's outside of the zip file and what's inside.
|
[
"Takes",
"a",
"path",
"that",
"includes",
"at",
"most",
"a",
"single",
"zip",
"file",
"as",
"a",
"directory",
"and",
"splits",
"the",
"path",
"between",
"what",
"s",
"outside",
"of",
"the",
"zip",
"file",
"and",
"what",
"s",
"inside",
"."
] |
def split_zip_path(path):
"""
Takes a path that includes at most a single zip file as a directory and
splits the path between what's outside of the zip file and what's inside.
:param path: The path.
:returns: ``(first_path, second_part)``
>>> zipsite.split_zip_path("/tmp/testing/stuff.zip/hi/bar")
('/tmp/testing/stuff.zip', 'hi/bar')
>>> zipsite.split_zip_path("/tmp/testing/stuff.zip")
('/tmp/testing/stuff.zip', '')
>>> zipsite.split_zip_path("/tmp/testing/stuff.zip/")
('/tmp/testing/stuff.zip', '')
"""
drive, path = os.path.splitdrive(path)
path_parts = get_path_parts(path)
for i in range(len(path_parts)):
front = os.path.join(drive, *path_parts[:i + 1])
if path_parts[i + 1:]:
tail = os.path.join(*path_parts[i + 1:])
else:
tail = ""
if zipfile.is_zipfile(front):
return front, tail
return None, path
|
[
"def",
"split_zip_path",
"(",
"path",
")",
":",
"drive",
",",
"path",
"=",
"os",
".",
"path",
".",
"splitdrive",
"(",
"path",
")",
"path_parts",
"=",
"get_path_parts",
"(",
"path",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"path_parts",
")",
")",
":",
"front",
"=",
"os",
".",
"path",
".",
"join",
"(",
"drive",
",",
"*",
"path_parts",
"[",
":",
"i",
"+",
"1",
"]",
")",
"if",
"path_parts",
"[",
"i",
"+",
"1",
":",
"]",
":",
"tail",
"=",
"os",
".",
"path",
".",
"join",
"(",
"*",
"path_parts",
"[",
"i",
"+",
"1",
":",
"]",
")",
"else",
":",
"tail",
"=",
"\"\"",
"if",
"zipfile",
".",
"is_zipfile",
"(",
"front",
")",
":",
"return",
"front",
",",
"tail",
"return",
"None",
",",
"path"
] |
https://github.com/itsjohncs/superzippy/blob/b62c3ef5d7e065b85e0024a9acf77b5d067650f9/superzippy/bootstrapper/zipsite.py#L76-L107
|
|
brendano/tweetmotif
|
1b0b1e3a941745cd5a26eba01f554688b7c4b27e
|
everything_else/djfrontend/django-1.0.2/contrib/gis/sitemaps/views.py
|
python
|
kml
|
(request, label, model, field_name=None, compress=False)
|
return render('gis/kml/placemarks.kml', {'places' : placemarks})
|
This view generates KML for the given app label, model, and field name.
The model's default manager must be GeoManager, and the field name
must be that of a geographic field.
|
This view generates KML for the given app label, model, and field name.
|
[
"This",
"view",
"generates",
"KML",
"for",
"the",
"given",
"app",
"label",
"model",
"and",
"field",
"name",
"."
] |
def kml(request, label, model, field_name=None, compress=False):
"""
This view generates KML for the given app label, model, and field name.
The model's default manager must be GeoManager, and the field name
must be that of a geographic field.
"""
placemarks = []
klass = get_model(label, model)
if not klass:
raise Http404('You must supply a valid app label and module name. Got "%s.%s"' % (label, model))
if field_name:
try:
info = klass._meta.get_field_by_name(field_name)
if not isinstance(info[0], GeometryField):
raise Exception
except:
raise Http404('Invalid geometry field.')
if SpatialBackend.postgis:
# PostGIS will take care of transformation.
placemarks = klass._default_manager.kml(field_name=field_name)
else:
# There's no KML method on Oracle or MySQL, so we use the `kml`
# attribute of the lazy geometry instead.
placemarks = []
if SpatialBackend.oracle:
qs = klass._default_manager.transform(4326, field_name=field_name)
else:
qs = klass._default_manager.all()
for mod in qs:
setattr(mod, 'kml', getattr(mod, field_name).kml)
placemarks.append(mod)
# Getting the render function and rendering to the correct.
if compress:
render = render_to_kmz
else:
render = render_to_kml
return render('gis/kml/placemarks.kml', {'places' : placemarks})
|
[
"def",
"kml",
"(",
"request",
",",
"label",
",",
"model",
",",
"field_name",
"=",
"None",
",",
"compress",
"=",
"False",
")",
":",
"placemarks",
"=",
"[",
"]",
"klass",
"=",
"get_model",
"(",
"label",
",",
"model",
")",
"if",
"not",
"klass",
":",
"raise",
"Http404",
"(",
"'You must supply a valid app label and module name. Got \"%s.%s\"'",
"%",
"(",
"label",
",",
"model",
")",
")",
"if",
"field_name",
":",
"try",
":",
"info",
"=",
"klass",
".",
"_meta",
".",
"get_field_by_name",
"(",
"field_name",
")",
"if",
"not",
"isinstance",
"(",
"info",
"[",
"0",
"]",
",",
"GeometryField",
")",
":",
"raise",
"Exception",
"except",
":",
"raise",
"Http404",
"(",
"'Invalid geometry field.'",
")",
"if",
"SpatialBackend",
".",
"postgis",
":",
"# PostGIS will take care of transformation.",
"placemarks",
"=",
"klass",
".",
"_default_manager",
".",
"kml",
"(",
"field_name",
"=",
"field_name",
")",
"else",
":",
"# There's no KML method on Oracle or MySQL, so we use the `kml`",
"# attribute of the lazy geometry instead.",
"placemarks",
"=",
"[",
"]",
"if",
"SpatialBackend",
".",
"oracle",
":",
"qs",
"=",
"klass",
".",
"_default_manager",
".",
"transform",
"(",
"4326",
",",
"field_name",
"=",
"field_name",
")",
"else",
":",
"qs",
"=",
"klass",
".",
"_default_manager",
".",
"all",
"(",
")",
"for",
"mod",
"in",
"qs",
":",
"setattr",
"(",
"mod",
",",
"'kml'",
",",
"getattr",
"(",
"mod",
",",
"field_name",
")",
".",
"kml",
")",
"placemarks",
".",
"append",
"(",
"mod",
")",
"# Getting the render function and rendering to the correct.",
"if",
"compress",
":",
"render",
"=",
"render_to_kmz",
"else",
":",
"render",
"=",
"render_to_kml",
"return",
"render",
"(",
"'gis/kml/placemarks.kml'",
",",
"{",
"'places'",
":",
"placemarks",
"}",
")"
] |
https://github.com/brendano/tweetmotif/blob/1b0b1e3a941745cd5a26eba01f554688b7c4b27e/everything_else/djfrontend/django-1.0.2/contrib/gis/sitemaps/views.py#L62-L102
|
|
apple/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
calendarserver/tools/dashboard.py
|
python
|
JobsWindow.updateRowCount
|
(self)
|
[] |
def updateRowCount(self):
self.rowCount = defaultIfNone(self.readItem("jobcount"), 0)
|
[
"def",
"updateRowCount",
"(",
"self",
")",
":",
"self",
".",
"rowCount",
"=",
"defaultIfNone",
"(",
"self",
".",
"readItem",
"(",
"\"jobcount\"",
")",
",",
"0",
")"
] |
https://github.com/apple/ccs-calendarserver/blob/13c706b985fb728b9aab42dc0fef85aae21921c3/calendarserver/tools/dashboard.py#L1019-L1020
|
||||
pixelogik/NearPy
|
1b534b864d320d875508e95cd2b76b6d8c07a90b
|
nearpy/storage/storage_redis.py
|
python
|
RedisStorage.clean_all_buckets
|
(self)
|
Removes all buckets from all hashes and their content.
|
Removes all buckets from all hashes and their content.
|
[
"Removes",
"all",
"buckets",
"from",
"all",
"hashes",
"and",
"their",
"content",
"."
] |
def clean_all_buckets(self):
"""
Removes all buckets from all hashes and their content.
"""
bucket_keys = self.redis_object.keys(pattern='nearpy_*')
if len(bucket_keys) > 0:
self.redis_object.delete(*bucket_keys)
|
[
"def",
"clean_all_buckets",
"(",
"self",
")",
":",
"bucket_keys",
"=",
"self",
".",
"redis_object",
".",
"keys",
"(",
"pattern",
"=",
"'nearpy_*'",
")",
"if",
"len",
"(",
"bucket_keys",
")",
">",
"0",
":",
"self",
".",
"redis_object",
".",
"delete",
"(",
"*",
"bucket_keys",
")"
] |
https://github.com/pixelogik/NearPy/blob/1b534b864d320d875508e95cd2b76b6d8c07a90b/nearpy/storage/storage_redis.py#L191-L197
|
||
edgewall/trac
|
beb3e4eaf1e0a456d801a50a8614ecab06de29fc
|
trac/ticket/query.py
|
python
|
Query.get_sql
|
(self, req=None, cached_ids=None, authname=None)
|
return "".join(sql), args
|
Return a (sql, params) tuple for the query.
|
Return a (sql, params) tuple for the query.
|
[
"Return",
"a",
"(",
"sql",
"params",
")",
"tuple",
"for",
"the",
"query",
"."
] |
def get_sql(self, req=None, cached_ids=None, authname=None):
"""Return a (sql, params) tuple for the query.
"""
if req is not None:
authname = req.authname
self.get_columns()
# Build the list of actual columns to query
cols = []
def add_cols(*args):
for col in args:
if col not in cols:
cols.append(col)
add_cols(*self.cols) # remove duplicated cols
if self.group and self.group not in cols:
add_cols(self.group)
if self.rows:
add_cols('reporter', *self.rows)
add_cols('status', 'priority', 'time', 'changetime', self.order)
add_cols(*sorted(self.constraint_cols))
custom_fields = {f['name'] for f in self.fields if f.get('custom')}
list_fields = {f['name'] for f in self.fields
if f['type'] == 'text' and
f.get('format') == 'list'}
cols_custom = [k for k in cols if k in custom_fields]
use_joins = len(cols_custom) <= 1
enum_columns = [col for col in ('resolution', 'priority', 'severity',
'type')
if col not in custom_fields and
col in ('priority', self.order, self.group)]
joined_columns = [col for col in ('milestone', 'version')
if col not in custom_fields and
col in (self.order, self.group)]
sql = []
sql.append("SELECT " + ",".join('t.%s AS %s' % (c, c) for c in cols
if c not in custom_fields))
if 'priority' in enum_columns:
sql.append(",priority.value AS _priority_value")
with self.env.db_query as db:
if use_joins:
# Use LEFT OUTER JOIN for ticket_custom table
sql.extend(",%(qk)s.value AS %(qk)s" % {'qk': db.quote(k)}
for k in cols_custom)
sql.append("\nFROM ticket AS t")
sql.extend("\n LEFT OUTER JOIN ticket_custom AS %(qk)s ON "
"(%(qk)s.ticket=t.id AND %(qk)s.name='%(k)s')"
% {'qk': db.quote(k), 'k': k} for k in cols_custom)
else:
# Use MAX(CASE ... END) ... GROUP BY ... for ticket_custom
# table
sql.extend(",c.%(qk)s AS %(qk)s" % {'qk': db.quote(k)}
for k in cols_custom)
sql.append("\nFROM ticket AS t"
"\n LEFT OUTER JOIN (SELECT\n ticket AS id")
sql.extend(",\n MAX(CASE WHEN name='%s' THEN value END) "
"AS %s" % (k, db.quote(k)) for k in cols_custom)
sql.append("\n FROM ticket_custom AS tc")
sql.append("\n WHERE name IN (%s)" %
','.join("'%s'" % k for k in cols_custom))
sql.append("\n GROUP BY tc.ticket) AS c ON c.id=t.id")
# Join with the enum table for proper sorting
sql.extend("\n LEFT OUTER JOIN enum AS %(col)s ON "
"(%(col)s.type='%(type)s' AND %(col)s.name=t.%(col)s)" %
{'col': col,
'type': 'ticket_type' if col == 'type' else col}
for col in enum_columns)
# Join with the version/milestone tables for proper sorting
sql.extend("\n LEFT OUTER JOIN %(col)s ON (%(col)s.name=%(col)s)"
% {'col': col} for col in joined_columns)
def user_parse_date(value):
if value:
try:
return user_time(req, parse_date, value)
except TracError as e:
errors.append(str(e))
return None
def get_constraint_sql(name, value, mode, neg):
is_custom_field = name in custom_fields
if not is_custom_field:
col = 't.' + name
elif use_joins:
col = db.quote(name) + '.value'
else:
col = 'c.' + db.quote(name)
value = value[len(mode) + neg:]
if name in self.time_fields:
if not value:
clause = "COALESCE({0},''){1}=%s" \
.format(col, '!' if neg else '')
args = ['']
return clause, args
if '..' in value:
(start, end) = [each.strip() for each in
value.split('..', 1)]
else:
(start, end) = (value.strip(), '')
start = user_parse_date(start)
end = user_parse_date(end)
clause = args = None
if start is not None and end is not None:
clause = "{0}({1}>=%s AND {1}<%s)" \
.format('NOT ' if neg else '', col)
args = [start, end]
elif start is not None:
clause = "{0}{1}>=%s" \
.format('NOT ' if neg else '', col)
args = [start]
elif end is not None:
clause = "{0}{1}<%s" \
.format('NOT ' if neg else '', col)
args = [end]
else:
return None
if is_custom_field:
args = [_datetime_to_db_str(arg, True) for arg in args]
else:
args = [to_utimestamp(arg) for arg in args]
return clause, args
def split_words(splittable):
return [w.strip() for wl in
([x[1:-1]] if x[0] == x[-1] == '"' else x.split()
for x in re.split('("[^"]+")', splittable) if x)
for w in wl]
if mode == '~' and name in list_fields:
words = split_words(value)
clauses, args = [], []
for word in words:
cneg = ''
if word.startswith('-'):
cneg = 'NOT '
word = word[1:]
if not word:
continue
clauses.append("COALESCE(%s,'') %s%s" % (col, cneg,
db.like()))
args.append('%' + db.like_escape(word) + '%')
if not clauses:
return None
return (('NOT ' if neg else '')
+ '(' + ' AND '.join(clauses) + ')', args)
if mode == '':
return ("COALESCE(%s,'')%s=%%s"
% (col, '!' if neg else ''), (value, ))
if not value:
return None
value = db.like_escape(value)
if mode == '~':
value = '%' + value + '%'
elif mode == '^':
value += '%'
elif mode == '$':
value = '%' + value
return ("COALESCE(%s,'') %s%s" % (col, 'NOT ' if neg else '',
db.like()),
(value, ))
def get_clause_sql(constraints):
clauses = []
for k in sorted(constraints):
v = constraints[k]
if authname is not None:
v = [val.replace('$USER', authname) for val in v]
# Determine the match mode of the constraint (contains,
# starts-with, negation, etc.)
neg = v[0].startswith('!')
mode = ''
if len(v[0]) > neg and v[0][neg] in ('~', '^', '$'):
mode = v[0][neg]
# Special case id ranges
if k == 'id':
ranges = Ranges()
for r in v:
r = r.replace('!', '')
try:
ranges.appendrange(r)
except Exception:
errors.append(_("Invalid ticket id list: "
"%(value)s", value=r))
ids = []
id_clauses = []
for a, b in ranges.pairs:
if a == b:
ids.append(str(a))
else:
id_clauses.append('t.id BETWEEN %s AND %s')
args.append(a)
args.append(b)
if ids:
id_clauses.append('t.id IN (%s)' % (','.join(ids)))
if id_clauses:
clauses.append('%s(%s)'
% ('NOT 'if neg else '',
' OR '.join(id_clauses)))
# Special case for exact matches on multiple values
elif not mode and len(v) > 1 and k not in self.time_fields:
if k not in custom_fields:
col = 't.' + k
elif use_joins:
col = db.quote(k) + '.value'
else:
col = 'c.' + db.quote(k)
clauses.append("COALESCE(%s,'') %sIN (%s)"
% (col, 'NOT ' if neg else '',
','.join('%s' for val in v)))
args.extend([val[neg:] for val in v])
elif v:
constraint_sql = [get_constraint_sql(k, val, mode, neg)
for val in v]
constraint_sql = list(filter(None, constraint_sql))
if not constraint_sql:
continue
if neg:
clauses.append("(" + " AND ".join(
[item[0] for item in constraint_sql]) + ")")
else:
clauses.append("(" + " OR ".join(
[item[0] for item in constraint_sql]) + ")")
for item in constraint_sql:
args.extend(item[1])
return " AND ".join(clauses)
args = []
errors = []
clauses = list(filter(None, map(get_clause_sql, self.constraints)))
if clauses:
sql.append("\nWHERE ")
sql.append(" OR ".join('(%s)' % c for c in clauses))
if cached_ids:
sql.append(" OR ")
sql.append("t.id in (%s)" %
(','.join(str(id) for id in cached_ids)))
sql.append("\nORDER BY ")
order_cols = [(self.order, self.desc)]
if self.group and self.group != self.order:
order_cols.insert(0, (self.group, self.groupdesc))
for name, desc in order_cols:
if name in enum_columns:
col = name + '.value'
elif name not in custom_fields:
col = 't.' + name
elif use_joins:
col = db.quote(name) + '.value'
else:
col = 'c.' + db.quote(name)
desc = ' DESC' if desc else ''
# FIXME: This is a somewhat ugly hack. Can we also have the
# column type for this? If it's an integer, we do
# first one, if text, we do 'else'
if name in custom_fields:
coalesce_arg = "''"
elif name == 'id' or name in self.time_fields:
coalesce_arg = '0'
else:
coalesce_arg = "''"
sql.append("COALESCE(%(col)s,%(arg)s)=%(arg)s%(desc)s," %
{'col': col, 'arg': coalesce_arg, 'desc': desc})
if name in enum_columns:
# These values must be compared as ints, not as strings
sql.append(db.cast(col, 'int') + desc)
elif name == 'milestone' and name not in custom_fields:
sql.append("COALESCE(milestone.completed,0)=0%s,"
"milestone.completed%s,"
"COALESCE(milestone.due,0)=0%s,"
"milestone.due%s,%s%s"
% (desc, desc, desc, desc, col, desc))
elif name == 'version' and name not in custom_fields:
sql.append("COALESCE(version.time,0)=0%s,"
"version.time%s,%s%s"
% (desc, desc, col, desc))
else:
sql.append("%s%s" % (col, desc))
if name == self.group and not name == self.order:
sql.append(",")
if self.order != 'id':
sql.append(",t.id")
if errors:
raise QueryValueError(errors)
return "".join(sql), args
|
[
"def",
"get_sql",
"(",
"self",
",",
"req",
"=",
"None",
",",
"cached_ids",
"=",
"None",
",",
"authname",
"=",
"None",
")",
":",
"if",
"req",
"is",
"not",
"None",
":",
"authname",
"=",
"req",
".",
"authname",
"self",
".",
"get_columns",
"(",
")",
"# Build the list of actual columns to query",
"cols",
"=",
"[",
"]",
"def",
"add_cols",
"(",
"*",
"args",
")",
":",
"for",
"col",
"in",
"args",
":",
"if",
"col",
"not",
"in",
"cols",
":",
"cols",
".",
"append",
"(",
"col",
")",
"add_cols",
"(",
"*",
"self",
".",
"cols",
")",
"# remove duplicated cols",
"if",
"self",
".",
"group",
"and",
"self",
".",
"group",
"not",
"in",
"cols",
":",
"add_cols",
"(",
"self",
".",
"group",
")",
"if",
"self",
".",
"rows",
":",
"add_cols",
"(",
"'reporter'",
",",
"*",
"self",
".",
"rows",
")",
"add_cols",
"(",
"'status'",
",",
"'priority'",
",",
"'time'",
",",
"'changetime'",
",",
"self",
".",
"order",
")",
"add_cols",
"(",
"*",
"sorted",
"(",
"self",
".",
"constraint_cols",
")",
")",
"custom_fields",
"=",
"{",
"f",
"[",
"'name'",
"]",
"for",
"f",
"in",
"self",
".",
"fields",
"if",
"f",
".",
"get",
"(",
"'custom'",
")",
"}",
"list_fields",
"=",
"{",
"f",
"[",
"'name'",
"]",
"for",
"f",
"in",
"self",
".",
"fields",
"if",
"f",
"[",
"'type'",
"]",
"==",
"'text'",
"and",
"f",
".",
"get",
"(",
"'format'",
")",
"==",
"'list'",
"}",
"cols_custom",
"=",
"[",
"k",
"for",
"k",
"in",
"cols",
"if",
"k",
"in",
"custom_fields",
"]",
"use_joins",
"=",
"len",
"(",
"cols_custom",
")",
"<=",
"1",
"enum_columns",
"=",
"[",
"col",
"for",
"col",
"in",
"(",
"'resolution'",
",",
"'priority'",
",",
"'severity'",
",",
"'type'",
")",
"if",
"col",
"not",
"in",
"custom_fields",
"and",
"col",
"in",
"(",
"'priority'",
",",
"self",
".",
"order",
",",
"self",
".",
"group",
")",
"]",
"joined_columns",
"=",
"[",
"col",
"for",
"col",
"in",
"(",
"'milestone'",
",",
"'version'",
")",
"if",
"col",
"not",
"in",
"custom_fields",
"and",
"col",
"in",
"(",
"self",
".",
"order",
",",
"self",
".",
"group",
")",
"]",
"sql",
"=",
"[",
"]",
"sql",
".",
"append",
"(",
"\"SELECT \"",
"+",
"\",\"",
".",
"join",
"(",
"'t.%s AS %s'",
"%",
"(",
"c",
",",
"c",
")",
"for",
"c",
"in",
"cols",
"if",
"c",
"not",
"in",
"custom_fields",
")",
")",
"if",
"'priority'",
"in",
"enum_columns",
":",
"sql",
".",
"append",
"(",
"\",priority.value AS _priority_value\"",
")",
"with",
"self",
".",
"env",
".",
"db_query",
"as",
"db",
":",
"if",
"use_joins",
":",
"# Use LEFT OUTER JOIN for ticket_custom table",
"sql",
".",
"extend",
"(",
"\",%(qk)s.value AS %(qk)s\"",
"%",
"{",
"'qk'",
":",
"db",
".",
"quote",
"(",
"k",
")",
"}",
"for",
"k",
"in",
"cols_custom",
")",
"sql",
".",
"append",
"(",
"\"\\nFROM ticket AS t\"",
")",
"sql",
".",
"extend",
"(",
"\"\\n LEFT OUTER JOIN ticket_custom AS %(qk)s ON \"",
"\"(%(qk)s.ticket=t.id AND %(qk)s.name='%(k)s')\"",
"%",
"{",
"'qk'",
":",
"db",
".",
"quote",
"(",
"k",
")",
",",
"'k'",
":",
"k",
"}",
"for",
"k",
"in",
"cols_custom",
")",
"else",
":",
"# Use MAX(CASE ... END) ... GROUP BY ... for ticket_custom",
"# table",
"sql",
".",
"extend",
"(",
"\",c.%(qk)s AS %(qk)s\"",
"%",
"{",
"'qk'",
":",
"db",
".",
"quote",
"(",
"k",
")",
"}",
"for",
"k",
"in",
"cols_custom",
")",
"sql",
".",
"append",
"(",
"\"\\nFROM ticket AS t\"",
"\"\\n LEFT OUTER JOIN (SELECT\\n ticket AS id\"",
")",
"sql",
".",
"extend",
"(",
"\",\\n MAX(CASE WHEN name='%s' THEN value END) \"",
"\"AS %s\"",
"%",
"(",
"k",
",",
"db",
".",
"quote",
"(",
"k",
")",
")",
"for",
"k",
"in",
"cols_custom",
")",
"sql",
".",
"append",
"(",
"\"\\n FROM ticket_custom AS tc\"",
")",
"sql",
".",
"append",
"(",
"\"\\n WHERE name IN (%s)\"",
"%",
"','",
".",
"join",
"(",
"\"'%s'\"",
"%",
"k",
"for",
"k",
"in",
"cols_custom",
")",
")",
"sql",
".",
"append",
"(",
"\"\\n GROUP BY tc.ticket) AS c ON c.id=t.id\"",
")",
"# Join with the enum table for proper sorting",
"sql",
".",
"extend",
"(",
"\"\\n LEFT OUTER JOIN enum AS %(col)s ON \"",
"\"(%(col)s.type='%(type)s' AND %(col)s.name=t.%(col)s)\"",
"%",
"{",
"'col'",
":",
"col",
",",
"'type'",
":",
"'ticket_type'",
"if",
"col",
"==",
"'type'",
"else",
"col",
"}",
"for",
"col",
"in",
"enum_columns",
")",
"# Join with the version/milestone tables for proper sorting",
"sql",
".",
"extend",
"(",
"\"\\n LEFT OUTER JOIN %(col)s ON (%(col)s.name=%(col)s)\"",
"%",
"{",
"'col'",
":",
"col",
"}",
"for",
"col",
"in",
"joined_columns",
")",
"def",
"user_parse_date",
"(",
"value",
")",
":",
"if",
"value",
":",
"try",
":",
"return",
"user_time",
"(",
"req",
",",
"parse_date",
",",
"value",
")",
"except",
"TracError",
"as",
"e",
":",
"errors",
".",
"append",
"(",
"str",
"(",
"e",
")",
")",
"return",
"None",
"def",
"get_constraint_sql",
"(",
"name",
",",
"value",
",",
"mode",
",",
"neg",
")",
":",
"is_custom_field",
"=",
"name",
"in",
"custom_fields",
"if",
"not",
"is_custom_field",
":",
"col",
"=",
"'t.'",
"+",
"name",
"elif",
"use_joins",
":",
"col",
"=",
"db",
".",
"quote",
"(",
"name",
")",
"+",
"'.value'",
"else",
":",
"col",
"=",
"'c.'",
"+",
"db",
".",
"quote",
"(",
"name",
")",
"value",
"=",
"value",
"[",
"len",
"(",
"mode",
")",
"+",
"neg",
":",
"]",
"if",
"name",
"in",
"self",
".",
"time_fields",
":",
"if",
"not",
"value",
":",
"clause",
"=",
"\"COALESCE({0},''){1}=%s\"",
".",
"format",
"(",
"col",
",",
"'!'",
"if",
"neg",
"else",
"''",
")",
"args",
"=",
"[",
"''",
"]",
"return",
"clause",
",",
"args",
"if",
"'..'",
"in",
"value",
":",
"(",
"start",
",",
"end",
")",
"=",
"[",
"each",
".",
"strip",
"(",
")",
"for",
"each",
"in",
"value",
".",
"split",
"(",
"'..'",
",",
"1",
")",
"]",
"else",
":",
"(",
"start",
",",
"end",
")",
"=",
"(",
"value",
".",
"strip",
"(",
")",
",",
"''",
")",
"start",
"=",
"user_parse_date",
"(",
"start",
")",
"end",
"=",
"user_parse_date",
"(",
"end",
")",
"clause",
"=",
"args",
"=",
"None",
"if",
"start",
"is",
"not",
"None",
"and",
"end",
"is",
"not",
"None",
":",
"clause",
"=",
"\"{0}({1}>=%s AND {1}<%s)\"",
".",
"format",
"(",
"'NOT '",
"if",
"neg",
"else",
"''",
",",
"col",
")",
"args",
"=",
"[",
"start",
",",
"end",
"]",
"elif",
"start",
"is",
"not",
"None",
":",
"clause",
"=",
"\"{0}{1}>=%s\"",
".",
"format",
"(",
"'NOT '",
"if",
"neg",
"else",
"''",
",",
"col",
")",
"args",
"=",
"[",
"start",
"]",
"elif",
"end",
"is",
"not",
"None",
":",
"clause",
"=",
"\"{0}{1}<%s\"",
".",
"format",
"(",
"'NOT '",
"if",
"neg",
"else",
"''",
",",
"col",
")",
"args",
"=",
"[",
"end",
"]",
"else",
":",
"return",
"None",
"if",
"is_custom_field",
":",
"args",
"=",
"[",
"_datetime_to_db_str",
"(",
"arg",
",",
"True",
")",
"for",
"arg",
"in",
"args",
"]",
"else",
":",
"args",
"=",
"[",
"to_utimestamp",
"(",
"arg",
")",
"for",
"arg",
"in",
"args",
"]",
"return",
"clause",
",",
"args",
"def",
"split_words",
"(",
"splittable",
")",
":",
"return",
"[",
"w",
".",
"strip",
"(",
")",
"for",
"wl",
"in",
"(",
"[",
"x",
"[",
"1",
":",
"-",
"1",
"]",
"]",
"if",
"x",
"[",
"0",
"]",
"==",
"x",
"[",
"-",
"1",
"]",
"==",
"'\"'",
"else",
"x",
".",
"split",
"(",
")",
"for",
"x",
"in",
"re",
".",
"split",
"(",
"'(\"[^\"]+\")'",
",",
"splittable",
")",
"if",
"x",
")",
"for",
"w",
"in",
"wl",
"]",
"if",
"mode",
"==",
"'~'",
"and",
"name",
"in",
"list_fields",
":",
"words",
"=",
"split_words",
"(",
"value",
")",
"clauses",
",",
"args",
"=",
"[",
"]",
",",
"[",
"]",
"for",
"word",
"in",
"words",
":",
"cneg",
"=",
"''",
"if",
"word",
".",
"startswith",
"(",
"'-'",
")",
":",
"cneg",
"=",
"'NOT '",
"word",
"=",
"word",
"[",
"1",
":",
"]",
"if",
"not",
"word",
":",
"continue",
"clauses",
".",
"append",
"(",
"\"COALESCE(%s,'') %s%s\"",
"%",
"(",
"col",
",",
"cneg",
",",
"db",
".",
"like",
"(",
")",
")",
")",
"args",
".",
"append",
"(",
"'%'",
"+",
"db",
".",
"like_escape",
"(",
"word",
")",
"+",
"'%'",
")",
"if",
"not",
"clauses",
":",
"return",
"None",
"return",
"(",
"(",
"'NOT '",
"if",
"neg",
"else",
"''",
")",
"+",
"'('",
"+",
"' AND '",
".",
"join",
"(",
"clauses",
")",
"+",
"')'",
",",
"args",
")",
"if",
"mode",
"==",
"''",
":",
"return",
"(",
"\"COALESCE(%s,'')%s=%%s\"",
"%",
"(",
"col",
",",
"'!'",
"if",
"neg",
"else",
"''",
")",
",",
"(",
"value",
",",
")",
")",
"if",
"not",
"value",
":",
"return",
"None",
"value",
"=",
"db",
".",
"like_escape",
"(",
"value",
")",
"if",
"mode",
"==",
"'~'",
":",
"value",
"=",
"'%'",
"+",
"value",
"+",
"'%'",
"elif",
"mode",
"==",
"'^'",
":",
"value",
"+=",
"'%'",
"elif",
"mode",
"==",
"'$'",
":",
"value",
"=",
"'%'",
"+",
"value",
"return",
"(",
"\"COALESCE(%s,'') %s%s\"",
"%",
"(",
"col",
",",
"'NOT '",
"if",
"neg",
"else",
"''",
",",
"db",
".",
"like",
"(",
")",
")",
",",
"(",
"value",
",",
")",
")",
"def",
"get_clause_sql",
"(",
"constraints",
")",
":",
"clauses",
"=",
"[",
"]",
"for",
"k",
"in",
"sorted",
"(",
"constraints",
")",
":",
"v",
"=",
"constraints",
"[",
"k",
"]",
"if",
"authname",
"is",
"not",
"None",
":",
"v",
"=",
"[",
"val",
".",
"replace",
"(",
"'$USER'",
",",
"authname",
")",
"for",
"val",
"in",
"v",
"]",
"# Determine the match mode of the constraint (contains,",
"# starts-with, negation, etc.)",
"neg",
"=",
"v",
"[",
"0",
"]",
".",
"startswith",
"(",
"'!'",
")",
"mode",
"=",
"''",
"if",
"len",
"(",
"v",
"[",
"0",
"]",
")",
">",
"neg",
"and",
"v",
"[",
"0",
"]",
"[",
"neg",
"]",
"in",
"(",
"'~'",
",",
"'^'",
",",
"'$'",
")",
":",
"mode",
"=",
"v",
"[",
"0",
"]",
"[",
"neg",
"]",
"# Special case id ranges",
"if",
"k",
"==",
"'id'",
":",
"ranges",
"=",
"Ranges",
"(",
")",
"for",
"r",
"in",
"v",
":",
"r",
"=",
"r",
".",
"replace",
"(",
"'!'",
",",
"''",
")",
"try",
":",
"ranges",
".",
"appendrange",
"(",
"r",
")",
"except",
"Exception",
":",
"errors",
".",
"append",
"(",
"_",
"(",
"\"Invalid ticket id list: \"",
"\"%(value)s\"",
",",
"value",
"=",
"r",
")",
")",
"ids",
"=",
"[",
"]",
"id_clauses",
"=",
"[",
"]",
"for",
"a",
",",
"b",
"in",
"ranges",
".",
"pairs",
":",
"if",
"a",
"==",
"b",
":",
"ids",
".",
"append",
"(",
"str",
"(",
"a",
")",
")",
"else",
":",
"id_clauses",
".",
"append",
"(",
"'t.id BETWEEN %s AND %s'",
")",
"args",
".",
"append",
"(",
"a",
")",
"args",
".",
"append",
"(",
"b",
")",
"if",
"ids",
":",
"id_clauses",
".",
"append",
"(",
"'t.id IN (%s)'",
"%",
"(",
"','",
".",
"join",
"(",
"ids",
")",
")",
")",
"if",
"id_clauses",
":",
"clauses",
".",
"append",
"(",
"'%s(%s)'",
"%",
"(",
"'NOT '",
"if",
"neg",
"else",
"''",
",",
"' OR '",
".",
"join",
"(",
"id_clauses",
")",
")",
")",
"# Special case for exact matches on multiple values",
"elif",
"not",
"mode",
"and",
"len",
"(",
"v",
")",
">",
"1",
"and",
"k",
"not",
"in",
"self",
".",
"time_fields",
":",
"if",
"k",
"not",
"in",
"custom_fields",
":",
"col",
"=",
"'t.'",
"+",
"k",
"elif",
"use_joins",
":",
"col",
"=",
"db",
".",
"quote",
"(",
"k",
")",
"+",
"'.value'",
"else",
":",
"col",
"=",
"'c.'",
"+",
"db",
".",
"quote",
"(",
"k",
")",
"clauses",
".",
"append",
"(",
"\"COALESCE(%s,'') %sIN (%s)\"",
"%",
"(",
"col",
",",
"'NOT '",
"if",
"neg",
"else",
"''",
",",
"','",
".",
"join",
"(",
"'%s'",
"for",
"val",
"in",
"v",
")",
")",
")",
"args",
".",
"extend",
"(",
"[",
"val",
"[",
"neg",
":",
"]",
"for",
"val",
"in",
"v",
"]",
")",
"elif",
"v",
":",
"constraint_sql",
"=",
"[",
"get_constraint_sql",
"(",
"k",
",",
"val",
",",
"mode",
",",
"neg",
")",
"for",
"val",
"in",
"v",
"]",
"constraint_sql",
"=",
"list",
"(",
"filter",
"(",
"None",
",",
"constraint_sql",
")",
")",
"if",
"not",
"constraint_sql",
":",
"continue",
"if",
"neg",
":",
"clauses",
".",
"append",
"(",
"\"(\"",
"+",
"\" AND \"",
".",
"join",
"(",
"[",
"item",
"[",
"0",
"]",
"for",
"item",
"in",
"constraint_sql",
"]",
")",
"+",
"\")\"",
")",
"else",
":",
"clauses",
".",
"append",
"(",
"\"(\"",
"+",
"\" OR \"",
".",
"join",
"(",
"[",
"item",
"[",
"0",
"]",
"for",
"item",
"in",
"constraint_sql",
"]",
")",
"+",
"\")\"",
")",
"for",
"item",
"in",
"constraint_sql",
":",
"args",
".",
"extend",
"(",
"item",
"[",
"1",
"]",
")",
"return",
"\" AND \"",
".",
"join",
"(",
"clauses",
")",
"args",
"=",
"[",
"]",
"errors",
"=",
"[",
"]",
"clauses",
"=",
"list",
"(",
"filter",
"(",
"None",
",",
"map",
"(",
"get_clause_sql",
",",
"self",
".",
"constraints",
")",
")",
")",
"if",
"clauses",
":",
"sql",
".",
"append",
"(",
"\"\\nWHERE \"",
")",
"sql",
".",
"append",
"(",
"\" OR \"",
".",
"join",
"(",
"'(%s)'",
"%",
"c",
"for",
"c",
"in",
"clauses",
")",
")",
"if",
"cached_ids",
":",
"sql",
".",
"append",
"(",
"\" OR \"",
")",
"sql",
".",
"append",
"(",
"\"t.id in (%s)\"",
"%",
"(",
"','",
".",
"join",
"(",
"str",
"(",
"id",
")",
"for",
"id",
"in",
"cached_ids",
")",
")",
")",
"sql",
".",
"append",
"(",
"\"\\nORDER BY \"",
")",
"order_cols",
"=",
"[",
"(",
"self",
".",
"order",
",",
"self",
".",
"desc",
")",
"]",
"if",
"self",
".",
"group",
"and",
"self",
".",
"group",
"!=",
"self",
".",
"order",
":",
"order_cols",
".",
"insert",
"(",
"0",
",",
"(",
"self",
".",
"group",
",",
"self",
".",
"groupdesc",
")",
")",
"for",
"name",
",",
"desc",
"in",
"order_cols",
":",
"if",
"name",
"in",
"enum_columns",
":",
"col",
"=",
"name",
"+",
"'.value'",
"elif",
"name",
"not",
"in",
"custom_fields",
":",
"col",
"=",
"'t.'",
"+",
"name",
"elif",
"use_joins",
":",
"col",
"=",
"db",
".",
"quote",
"(",
"name",
")",
"+",
"'.value'",
"else",
":",
"col",
"=",
"'c.'",
"+",
"db",
".",
"quote",
"(",
"name",
")",
"desc",
"=",
"' DESC'",
"if",
"desc",
"else",
"''",
"# FIXME: This is a somewhat ugly hack. Can we also have the",
"# column type for this? If it's an integer, we do",
"# first one, if text, we do 'else'",
"if",
"name",
"in",
"custom_fields",
":",
"coalesce_arg",
"=",
"\"''\"",
"elif",
"name",
"==",
"'id'",
"or",
"name",
"in",
"self",
".",
"time_fields",
":",
"coalesce_arg",
"=",
"'0'",
"else",
":",
"coalesce_arg",
"=",
"\"''\"",
"sql",
".",
"append",
"(",
"\"COALESCE(%(col)s,%(arg)s)=%(arg)s%(desc)s,\"",
"%",
"{",
"'col'",
":",
"col",
",",
"'arg'",
":",
"coalesce_arg",
",",
"'desc'",
":",
"desc",
"}",
")",
"if",
"name",
"in",
"enum_columns",
":",
"# These values must be compared as ints, not as strings",
"sql",
".",
"append",
"(",
"db",
".",
"cast",
"(",
"col",
",",
"'int'",
")",
"+",
"desc",
")",
"elif",
"name",
"==",
"'milestone'",
"and",
"name",
"not",
"in",
"custom_fields",
":",
"sql",
".",
"append",
"(",
"\"COALESCE(milestone.completed,0)=0%s,\"",
"\"milestone.completed%s,\"",
"\"COALESCE(milestone.due,0)=0%s,\"",
"\"milestone.due%s,%s%s\"",
"%",
"(",
"desc",
",",
"desc",
",",
"desc",
",",
"desc",
",",
"col",
",",
"desc",
")",
")",
"elif",
"name",
"==",
"'version'",
"and",
"name",
"not",
"in",
"custom_fields",
":",
"sql",
".",
"append",
"(",
"\"COALESCE(version.time,0)=0%s,\"",
"\"version.time%s,%s%s\"",
"%",
"(",
"desc",
",",
"desc",
",",
"col",
",",
"desc",
")",
")",
"else",
":",
"sql",
".",
"append",
"(",
"\"%s%s\"",
"%",
"(",
"col",
",",
"desc",
")",
")",
"if",
"name",
"==",
"self",
".",
"group",
"and",
"not",
"name",
"==",
"self",
".",
"order",
":",
"sql",
".",
"append",
"(",
"\",\"",
")",
"if",
"self",
".",
"order",
"!=",
"'id'",
":",
"sql",
".",
"append",
"(",
"\",t.id\"",
")",
"if",
"errors",
":",
"raise",
"QueryValueError",
"(",
"errors",
")",
"return",
"\"\"",
".",
"join",
"(",
"sql",
")",
",",
"args"
] |
https://github.com/edgewall/trac/blob/beb3e4eaf1e0a456d801a50a8614ecab06de29fc/trac/ticket/query.py#L401-L694
|
|
dimagi/commcare-hq
|
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
|
corehq/motech/dhis2/models.py
|
python
|
get_date_params
|
(slug, date_span)
|
return {
slug: "{}+to+{}".format(startdate, enddate),
slug + '-start': startdate,
slug + '-end': enddate,
}
|
Mimics date filter request parameters
|
Mimics date filter request parameters
|
[
"Mimics",
"date",
"filter",
"request",
"parameters"
] |
def get_date_params(slug, date_span):
"""
Mimics date filter request parameters
"""
startdate = date_span.startdate.strftime('%Y-%m-%d')
enddate = date_span.enddate.strftime('%Y-%m-%d')
return {
slug: "{}+to+{}".format(startdate, enddate),
slug + '-start': startdate,
slug + '-end': enddate,
}
|
[
"def",
"get_date_params",
"(",
"slug",
",",
"date_span",
")",
":",
"startdate",
"=",
"date_span",
".",
"startdate",
".",
"strftime",
"(",
"'%Y-%m-%d'",
")",
"enddate",
"=",
"date_span",
".",
"enddate",
".",
"strftime",
"(",
"'%Y-%m-%d'",
")",
"return",
"{",
"slug",
":",
"\"{}+to+{}\"",
".",
"format",
"(",
"startdate",
",",
"enddate",
")",
",",
"slug",
"+",
"'-start'",
":",
"startdate",
",",
"slug",
"+",
"'-end'",
":",
"enddate",
",",
"}"
] |
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/motech/dhis2/models.py#L493-L503
|
|
khanhnamle1994/natural-language-processing
|
01d450d5ac002b0156ef4cf93a07cb508c1bcdc5
|
assignment1/.env/lib/python2.7/site-packages/scipy/optimize/_trustregion.py
|
python
|
BaseQuadraticSubproblem.get_boundaries_intersections
|
(self, z, d, trust_radius)
|
return ta, tb
|
Solve the scalar quadratic equation ||z + t d|| == trust_radius.
This is like a line-sphere intersection.
Return the two values of t, sorted from low to high.
|
Solve the scalar quadratic equation ||z + t d|| == trust_radius.
This is like a line-sphere intersection.
Return the two values of t, sorted from low to high.
|
[
"Solve",
"the",
"scalar",
"quadratic",
"equation",
"||z",
"+",
"t",
"d||",
"==",
"trust_radius",
".",
"This",
"is",
"like",
"a",
"line",
"-",
"sphere",
"intersection",
".",
"Return",
"the",
"two",
"values",
"of",
"t",
"sorted",
"from",
"low",
"to",
"high",
"."
] |
def get_boundaries_intersections(self, z, d, trust_radius):
"""
Solve the scalar quadratic equation ||z + t d|| == trust_radius.
This is like a line-sphere intersection.
Return the two values of t, sorted from low to high.
"""
a = np.dot(d, d)
b = 2 * np.dot(z, d)
c = np.dot(z, z) - trust_radius**2
sqrt_discriminant = math.sqrt(b*b - 4*a*c)
ta = (-b - sqrt_discriminant) / (2*a)
tb = (-b + sqrt_discriminant) / (2*a)
return ta, tb
|
[
"def",
"get_boundaries_intersections",
"(",
"self",
",",
"z",
",",
"d",
",",
"trust_radius",
")",
":",
"a",
"=",
"np",
".",
"dot",
"(",
"d",
",",
"d",
")",
"b",
"=",
"2",
"*",
"np",
".",
"dot",
"(",
"z",
",",
"d",
")",
"c",
"=",
"np",
".",
"dot",
"(",
"z",
",",
"z",
")",
"-",
"trust_radius",
"**",
"2",
"sqrt_discriminant",
"=",
"math",
".",
"sqrt",
"(",
"b",
"*",
"b",
"-",
"4",
"*",
"a",
"*",
"c",
")",
"ta",
"=",
"(",
"-",
"b",
"-",
"sqrt_discriminant",
")",
"/",
"(",
"2",
"*",
"a",
")",
"tb",
"=",
"(",
"-",
"b",
"+",
"sqrt_discriminant",
")",
"/",
"(",
"2",
"*",
"a",
")",
"return",
"ta",
",",
"tb"
] |
https://github.com/khanhnamle1994/natural-language-processing/blob/01d450d5ac002b0156ef4cf93a07cb508c1bcdc5/assignment1/.env/lib/python2.7/site-packages/scipy/optimize/_trustregion.py#L74-L86
|
|
AstarLight/Satellite-Segmentation
|
28626b6f2351ea8e50da32807d1fa29784eb0e1d
|
deprecated/unet.py
|
python
|
load_img
|
(path, grayscale=False)
|
return img
|
[] |
def load_img(path, grayscale=False):
if grayscale:
img = cv2.imread(path,cv2.IMREAD_GRAYSCALE)
else:
img = cv2.imread(path)
img = np.array(img,dtype="float") / 255.0
return img
|
[
"def",
"load_img",
"(",
"path",
",",
"grayscale",
"=",
"False",
")",
":",
"if",
"grayscale",
":",
"img",
"=",
"cv2",
".",
"imread",
"(",
"path",
",",
"cv2",
".",
"IMREAD_GRAYSCALE",
")",
"else",
":",
"img",
"=",
"cv2",
".",
"imread",
"(",
"path",
")",
"img",
"=",
"np",
".",
"array",
"(",
"img",
",",
"dtype",
"=",
"\"float\"",
")",
"/",
"255.0",
"return",
"img"
] |
https://github.com/AstarLight/Satellite-Segmentation/blob/28626b6f2351ea8e50da32807d1fa29784eb0e1d/deprecated/unet.py#L33-L39
|
|||
SeldomQA/poium
|
b95b6d49f31084d9a213de2d51e35803733ca136
|
poium/wda/__init__.py
|
python
|
Page.native_resolution
|
(self)
|
return multiple * w, multiple * h
|
获取屏幕原始分辨率
|
获取屏幕原始分辨率
|
[
"获取屏幕原始分辨率"
] |
def native_resolution(self):
"""
获取屏幕原始分辨率
"""
multiple = self.driver.scale
w, h = self.driver.window_size()
return multiple * w, multiple * h
|
[
"def",
"native_resolution",
"(",
"self",
")",
":",
"multiple",
"=",
"self",
".",
"driver",
".",
"scale",
"w",
",",
"h",
"=",
"self",
".",
"driver",
".",
"window_size",
"(",
")",
"return",
"multiple",
"*",
"w",
",",
"multiple",
"*",
"h"
] |
https://github.com/SeldomQA/poium/blob/b95b6d49f31084d9a213de2d51e35803733ca136/poium/wda/__init__.py#L27-L33
|
|
enthought/traitsui
|
b7c38c7a47bf6ae7971f9ddab70c8a358647dd25
|
traitsui/qt4/tabular_editor.py
|
python
|
TabularEditor._on_click
|
(self, index)
|
Handle a cell being clicked.
|
Handle a cell being clicked.
|
[
"Handle",
"a",
"cell",
"being",
"clicked",
"."
] |
def _on_click(self, index):
"""Handle a cell being clicked."""
self._mouse_click(index, "clicked")
|
[
"def",
"_on_click",
"(",
"self",
",",
"index",
")",
":",
"self",
".",
"_mouse_click",
"(",
"index",
",",
"\"clicked\"",
")"
] |
https://github.com/enthought/traitsui/blob/b7c38c7a47bf6ae7971f9ddab70c8a358647dd25/traitsui/qt4/tabular_editor.py#L531-L533
|
||
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
ansible/roles/lib_zabbix/library/zbx_action.py
|
python
|
get_host_id_by_name
|
(zapi, host_name)
|
return content['result'][0]['hostid']
|
Get host id by name
|
Get host id by name
|
[
"Get",
"host",
"id",
"by",
"name"
] |
def get_host_id_by_name(zapi, host_name):
'''Get host id by name'''
content = zapi.get_content('host',
'get',
{'filter': {'name': host_name}})
return content['result'][0]['hostid']
|
[
"def",
"get_host_id_by_name",
"(",
"zapi",
",",
"host_name",
")",
":",
"content",
"=",
"zapi",
".",
"get_content",
"(",
"'host'",
",",
"'get'",
",",
"{",
"'filter'",
":",
"{",
"'name'",
":",
"host_name",
"}",
"}",
")",
"return",
"content",
"[",
"'result'",
"]",
"[",
"0",
"]",
"[",
"'hostid'",
"]"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/ansible/roles/lib_zabbix/library/zbx_action.py#L298-L304
|
|
safe-graph/DGFraud
|
4d017a5ae9c44287215f52470e1aef5ee99a5d9d
|
algorithms/GraphConsis/utils.py
|
python
|
load_mat_full
|
(prefix='./example_data/', file_name = 'YelpChi.mat', relations=['net_rur'], train_size=0.8)
|
return adj_mat, features, truelabels, train_idx, test_idx
|
[] |
def load_mat_full(prefix='./example_data/', file_name = 'YelpChi.mat', relations=['net_rur'], train_size=0.8):
data = sio.loadmat(prefix + file_name)
truelabels, features = data['label'], data['features'].astype(float)
truelabels = truelabels.tolist()[0]
features = features.todense()
N = features.shape[0]
adj_mat = [data[relation] for relation in relations]
index = range(len(truelabels))
train_num = int(len(truelabels) * 0.8)
train_idx = set(np.random.choice(index, train_num, replace=False))
test_idx = set(index).difference(train_idx)
train_num = int(len(truelabels) * train_size)
train_idx = set(list(train_idx)[:train_num])
return adj_mat, features, truelabels, train_idx, test_idx
|
[
"def",
"load_mat_full",
"(",
"prefix",
"=",
"'./example_data/'",
",",
"file_name",
"=",
"'YelpChi.mat'",
",",
"relations",
"=",
"[",
"'net_rur'",
"]",
",",
"train_size",
"=",
"0.8",
")",
":",
"data",
"=",
"sio",
".",
"loadmat",
"(",
"prefix",
"+",
"file_name",
")",
"truelabels",
",",
"features",
"=",
"data",
"[",
"'label'",
"]",
",",
"data",
"[",
"'features'",
"]",
".",
"astype",
"(",
"float",
")",
"truelabels",
"=",
"truelabels",
".",
"tolist",
"(",
")",
"[",
"0",
"]",
"features",
"=",
"features",
".",
"todense",
"(",
")",
"N",
"=",
"features",
".",
"shape",
"[",
"0",
"]",
"adj_mat",
"=",
"[",
"data",
"[",
"relation",
"]",
"for",
"relation",
"in",
"relations",
"]",
"index",
"=",
"range",
"(",
"len",
"(",
"truelabels",
")",
")",
"train_num",
"=",
"int",
"(",
"len",
"(",
"truelabels",
")",
"*",
"0.8",
")",
"train_idx",
"=",
"set",
"(",
"np",
".",
"random",
".",
"choice",
"(",
"index",
",",
"train_num",
",",
"replace",
"=",
"False",
")",
")",
"test_idx",
"=",
"set",
"(",
"index",
")",
".",
"difference",
"(",
"train_idx",
")",
"train_num",
"=",
"int",
"(",
"len",
"(",
"truelabels",
")",
"*",
"train_size",
")",
"train_idx",
"=",
"set",
"(",
"list",
"(",
"train_idx",
")",
"[",
":",
"train_num",
"]",
")",
"return",
"adj_mat",
",",
"features",
",",
"truelabels",
",",
"train_idx",
",",
"test_idx"
] |
https://github.com/safe-graph/DGFraud/blob/4d017a5ae9c44287215f52470e1aef5ee99a5d9d/algorithms/GraphConsis/utils.py#L21-L34
|
|||
kuri65536/python-for-android
|
26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891
|
python-modules/twisted/twisted/protocols/sip.py
|
python
|
parseAddress
|
(address, host=None, port=None, clean=0)
|
return name, url, params
|
Return (name, uri, params) for From/To/Contact header.
@param clean: remove unnecessary info, usually for From and To headers.
|
Return (name, uri, params) for From/To/Contact header.
|
[
"Return",
"(",
"name",
"uri",
"params",
")",
"for",
"From",
"/",
"To",
"/",
"Contact",
"header",
"."
] |
def parseAddress(address, host=None, port=None, clean=0):
"""Return (name, uri, params) for From/To/Contact header.
@param clean: remove unnecessary info, usually for From and To headers.
"""
address = address.strip()
# simple 'sip:foo' case
if address.startswith("sip:"):
return "", parseURL(address, host=host, port=port), {}
params = {}
name, url = address.split("<", 1)
name = name.strip()
if name.startswith('"'):
name = name[1:]
if name.endswith('"'):
name = name[:-1]
url, paramstring = url.split(">", 1)
url = parseURL(url, host=host, port=port)
paramstring = paramstring.strip()
if paramstring:
for l in paramstring.split(";"):
if not l:
continue
k, v = l.split("=")
params[k] = v
if clean:
# rfc 2543 6.21
url.ttl = None
url.headers = {}
url.transport = None
url.maddr = None
return name, url, params
|
[
"def",
"parseAddress",
"(",
"address",
",",
"host",
"=",
"None",
",",
"port",
"=",
"None",
",",
"clean",
"=",
"0",
")",
":",
"address",
"=",
"address",
".",
"strip",
"(",
")",
"# simple 'sip:foo' case",
"if",
"address",
".",
"startswith",
"(",
"\"sip:\"",
")",
":",
"return",
"\"\"",
",",
"parseURL",
"(",
"address",
",",
"host",
"=",
"host",
",",
"port",
"=",
"port",
")",
",",
"{",
"}",
"params",
"=",
"{",
"}",
"name",
",",
"url",
"=",
"address",
".",
"split",
"(",
"\"<\"",
",",
"1",
")",
"name",
"=",
"name",
".",
"strip",
"(",
")",
"if",
"name",
".",
"startswith",
"(",
"'\"'",
")",
":",
"name",
"=",
"name",
"[",
"1",
":",
"]",
"if",
"name",
".",
"endswith",
"(",
"'\"'",
")",
":",
"name",
"=",
"name",
"[",
":",
"-",
"1",
"]",
"url",
",",
"paramstring",
"=",
"url",
".",
"split",
"(",
"\">\"",
",",
"1",
")",
"url",
"=",
"parseURL",
"(",
"url",
",",
"host",
"=",
"host",
",",
"port",
"=",
"port",
")",
"paramstring",
"=",
"paramstring",
".",
"strip",
"(",
")",
"if",
"paramstring",
":",
"for",
"l",
"in",
"paramstring",
".",
"split",
"(",
"\";\"",
")",
":",
"if",
"not",
"l",
":",
"continue",
"k",
",",
"v",
"=",
"l",
".",
"split",
"(",
"\"=\"",
")",
"params",
"[",
"k",
"]",
"=",
"v",
"if",
"clean",
":",
"# rfc 2543 6.21",
"url",
".",
"ttl",
"=",
"None",
"url",
".",
"headers",
"=",
"{",
"}",
"url",
".",
"transport",
"=",
"None",
"url",
".",
"maddr",
"=",
"None",
"return",
"name",
",",
"url",
",",
"params"
] |
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-modules/twisted/twisted/protocols/sip.py#L490-L521
|
|
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
|
cb692f527e4e819b6c228187c5702d990a180043
|
external/Scripting Engine/Xenotix Python Scripting Engine/bin/x86/Debug/Lib/numbers.py
|
python
|
Complex.imag
|
(self)
|
Retrieve the imaginary component of this number.
This should subclass Real.
|
Retrieve the imaginary component of this number.
|
[
"Retrieve",
"the",
"imaginary",
"component",
"of",
"this",
"number",
"."
] |
def imag(self):
"""Retrieve the imaginary component of this number.
This should subclass Real.
"""
raise NotImplementedError
|
[
"def",
"imag",
"(",
"self",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/external/Scripting Engine/Xenotix Python Scripting Engine/bin/x86/Debug/Lib/numbers.py#L65-L70
|
||
bruderstein/PythonScript
|
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
|
PythonLib/full/xml/sax/xmlreader.py
|
python
|
InputSource.getEncoding
|
(self)
|
return self.__encoding
|
Get the character encoding of this InputSource.
|
Get the character encoding of this InputSource.
|
[
"Get",
"the",
"character",
"encoding",
"of",
"this",
"InputSource",
"."
] |
def getEncoding(self):
"Get the character encoding of this InputSource."
return self.__encoding
|
[
"def",
"getEncoding",
"(",
"self",
")",
":",
"return",
"self",
".",
"__encoding"
] |
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/xml/sax/xmlreader.py#L238-L240
|
|
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/polys/agca/ideals.py
|
python
|
Ideal.reduce_element
|
(self, x)
|
return x
|
Reduce the element ``x`` of our ring modulo the ideal ``self``.
Here "reduce" has no specific meaning: it could return a unique normal
form, simplify the expression a bit, or just do nothing.
|
Reduce the element ``x`` of our ring modulo the ideal ``self``.
|
[
"Reduce",
"the",
"element",
"x",
"of",
"our",
"ring",
"modulo",
"the",
"ideal",
"self",
"."
] |
def reduce_element(self, x):
"""
Reduce the element ``x`` of our ring modulo the ideal ``self``.
Here "reduce" has no specific meaning: it could return a unique normal
form, simplify the expression a bit, or just do nothing.
"""
return x
|
[
"def",
"reduce_element",
"(",
"self",
",",
"x",
")",
":",
"return",
"x"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/polys/agca/ideals.py#L217-L224
|
|
PrefectHQ/prefect
|
67bdc94e2211726d99561f6f52614bec8970e981
|
src/prefect/utilities/graphql.py
|
python
|
multiline_indent
|
(string: str, spaces: int)
|
return string.replace("\n", "\n" + " " * spaces)
|
Utility to indent all but the first line in a string to the specified level
|
Utility to indent all but the first line in a string to the specified level
|
[
"Utility",
"to",
"indent",
"all",
"but",
"the",
"first",
"line",
"in",
"a",
"string",
"to",
"the",
"specified",
"level"
] |
def multiline_indent(string: str, spaces: int) -> str:
"""
Utility to indent all but the first line in a string to the specified level
"""
return string.replace("\n", "\n" + " " * spaces)
|
[
"def",
"multiline_indent",
"(",
"string",
":",
"str",
",",
"spaces",
":",
"int",
")",
"->",
"str",
":",
"return",
"string",
".",
"replace",
"(",
"\"\\n\"",
",",
"\"\\n\"",
"+",
"\" \"",
"*",
"spaces",
")"
] |
https://github.com/PrefectHQ/prefect/blob/67bdc94e2211726d99561f6f52614bec8970e981/src/prefect/utilities/graphql.py#L24-L28
|
|
Chaffelson/nipyapi
|
d3b186fd701ce308c2812746d98af9120955e810
|
nipyapi/nifi/models/documented_type_dto.py
|
python
|
DocumentedTypeDTO.usage_restriction
|
(self, usage_restriction)
|
Sets the usage_restriction of this DocumentedTypeDTO.
The optional description of why the usage of this component is restricted.
:param usage_restriction: The usage_restriction of this DocumentedTypeDTO.
:type: str
|
Sets the usage_restriction of this DocumentedTypeDTO.
The optional description of why the usage of this component is restricted.
|
[
"Sets",
"the",
"usage_restriction",
"of",
"this",
"DocumentedTypeDTO",
".",
"The",
"optional",
"description",
"of",
"why",
"the",
"usage",
"of",
"this",
"component",
"is",
"restricted",
"."
] |
def usage_restriction(self, usage_restriction):
"""
Sets the usage_restriction of this DocumentedTypeDTO.
The optional description of why the usage of this component is restricted.
:param usage_restriction: The usage_restriction of this DocumentedTypeDTO.
:type: str
"""
self._usage_restriction = usage_restriction
|
[
"def",
"usage_restriction",
"(",
"self",
",",
"usage_restriction",
")",
":",
"self",
".",
"_usage_restriction",
"=",
"usage_restriction"
] |
https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/nifi/models/documented_type_dto.py#L218-L227
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.