text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Assign a clipper that is inherited from a parent node.
<END_TASK>
<USER_TASK:>
Description:
def _set_clipper(self, node, clipper):
"""Assign a clipper that is inherited from a parent node.
If *clipper* is None, then remove any clippers for *node*.
""" |
if node in self._clippers:
self.detach(self._clippers.pop(node))
if clipper is not None:
self.attach(clipper)
self._clippers[node] = clipper |
<SYSTEM_TASK:>
Convert CFNumber to python int or float.
<END_TASK>
<USER_TASK:>
Description:
def cfnumber_to_number(cfnumber):
"""Convert CFNumber to python int or float.""" |
numeric_type = cf.CFNumberGetType(cfnumber)
cfnum_to_ctype = {kCFNumberSInt8Type: c_int8, kCFNumberSInt16Type: c_int16,
kCFNumberSInt32Type: c_int32,
kCFNumberSInt64Type: c_int64,
kCFNumberFloat32Type: c_float,
kCFNumberFloat64Type: c_double,
kCFNumberCharType: c_byte, kCFNumberShortType: c_short,
kCFNumberIntType: c_int, kCFNumberLongType: c_long,
kCFNumberLongLongType: c_longlong,
kCFNumberFloatType: c_float,
kCFNumberDoubleType: c_double,
kCFNumberCFIndexType: CFIndex,
kCFNumberCGFloatType: CGFloat}
if numeric_type in cfnum_to_ctype:
t = cfnum_to_ctype[numeric_type]
result = t()
if cf.CFNumberGetValue(cfnumber, numeric_type, byref(result)):
return result.value
else:
raise Exception(
'cfnumber_to_number: unhandled CFNumber type %d' % numeric_type) |
<SYSTEM_TASK:>
Convert a CFType into an equivalent python type.
<END_TASK>
<USER_TASK:>
Description:
def cftype_to_value(cftype):
"""Convert a CFType into an equivalent python type.
The convertible CFTypes are taken from the known_cftypes
dictionary, which may be added to if another library implements
its own conversion methods.""" |
if not cftype:
return None
typeID = cf.CFGetTypeID(cftype)
if typeID in known_cftypes:
convert_function = known_cftypes[typeID]
return convert_function(cftype)
else:
return cftype |
<SYSTEM_TASK:>
Return ctypes type for an encoded Objective-C type.
<END_TASK>
<USER_TASK:>
Description:
def ctype_for_encoding(self, encoding):
"""Return ctypes type for an encoded Objective-C type.""" |
if encoding in self.typecodes:
return self.typecodes[encoding]
elif encoding[0:1] == b'^' and encoding[1:] in self.typecodes:
return POINTER(self.typecodes[encoding[1:]])
elif encoding[0:1] == b'^' and encoding[1:] in [CGImageEncoding,
NSZoneEncoding]:
return c_void_p
elif encoding[0:1] == b'r' and encoding[1:] in self.typecodes:
return self.typecodes[encoding[1:]]
elif encoding[0:2] == b'r^' and encoding[2:] in self.typecodes:
return POINTER(self.typecodes[encoding[2:]])
else:
raise Exception('unknown encoding for %s: %s'
% (self.name, encoding)) |
<SYSTEM_TASK:>
Function decorator for class methods.
<END_TASK>
<USER_TASK:>
Description:
def classmethod(self, encoding):
"""Function decorator for class methods.""" |
# Add encodings for hidden self and cmd arguments.
encoding = ensure_bytes(encoding)
typecodes = parse_type_encoding(encoding)
typecodes.insert(1, b'@:')
encoding = b''.join(typecodes)
def decorator(f):
def objc_class_method(objc_cls, objc_cmd, *args):
py_cls = ObjCClass(objc_cls)
py_cls.objc_cmd = objc_cmd
args = convert_method_arguments(encoding, args)
result = f(py_cls, *args)
if isinstance(result, ObjCClass):
result = result.ptr.value
elif isinstance(result, ObjCInstance):
result = result.ptr.value
return result
name = f.__name__.replace('_', ':')
self.add_class_method(objc_class_method, name, encoding)
return objc_class_method
return decorator |
<SYSTEM_TASK:>
Get the fragment shader code - we use the shader_program object to determine
<END_TASK>
<USER_TASK:>
Description:
def get_frag_shader(volumes, clipped=False, n_volume_max=5):
"""
Get the fragment shader code - we use the shader_program object to determine
which layers are enabled and therefore what to include in the shader code.
""" |
declarations = ""
before_loop = ""
in_loop = ""
after_loop = ""
for index in range(n_volume_max):
declarations += "uniform $sampler_type u_volumetex_{0:d};\n".format(index)
before_loop += "dummy = $sample(u_volumetex_{0:d}, loc).g;\n".format(index)
declarations += "uniform $sampler_type dummy1;\n"
declarations += "float dummy;\n"
for label in sorted(volumes):
index = volumes[label]['index']
# Global declarations
declarations += "uniform float u_weight_{0:d};\n".format(index)
declarations += "uniform int u_enabled_{0:d};\n".format(index)
# Declarations before the raytracing loop
before_loop += "float max_val_{0:d} = 0;\n".format(index)
# Calculation inside the main raytracing loop
in_loop += "if(u_enabled_{0:d} == 1) {{\n\n".format(index)
if clipped:
in_loop += ("if(loc.r > u_clip_min.r && loc.r < u_clip_max.r &&\n"
" loc.g > u_clip_min.g && loc.g < u_clip_max.g &&\n"
" loc.b > u_clip_min.b && loc.b < u_clip_max.b) {\n\n")
in_loop += "// Sample texture for layer {0}\n".format(label)
in_loop += "val = $sample(u_volumetex_{0:d}, loc).g;\n".format(index)
if volumes[label].get('multiply') is not None:
index_other = volumes[volumes[label]['multiply']]['index']
in_loop += ("if (val != 0) {{ val *= $sample(u_volumetex_{0:d}, loc).g; }}\n"
.format(index_other))
in_loop += "max_val_{0:d} = max(val, max_val_{0:d});\n\n".format(index)
if clipped:
in_loop += "}\n\n"
in_loop += "}\n\n"
# Calculation after the main loop
after_loop += "// Compute final color for layer {0}\n".format(label)
after_loop += ("color = $cmap{0:d}(max_val_{0:d});\n"
"color.a *= u_weight_{0:d};\n"
"total_color += color.a * color;\n"
"max_alpha = max(color.a, max_alpha);\n"
"count += color.a;\n\n").format(index)
if not clipped:
before_loop += "\nfloat val3 = u_clip_min.g + u_clip_max.g;\n\n"
# Code esthetics
before_loop = indent(before_loop, " " * 4).strip()
in_loop = indent(in_loop, " " * 16).strip()
after_loop = indent(after_loop, " " * 4).strip()
return FRAG_SHADER.format(declarations=declarations,
before_loop=before_loop,
in_loop=in_loop,
after_loop=after_loop) |
<SYSTEM_TASK:>
Query string from display
<END_TASK>
<USER_TASK:>
Description:
def eglQueryString(display, name):
""" Query string from display
""" |
out = _lib.eglQueryString(display, name)
if not out:
raise RuntimeError('Could not query %s' % name)
return out |
<SYSTEM_TASK:>
Set the faces
<END_TASK>
<USER_TASK:>
Description:
def set_faces(self, faces):
"""Set the faces
Parameters
----------
faces : ndarray
(Nf, 3) array of faces. Each row in the array contains
three indices into the vertex array, specifying the three corners
of a triangular face.
""" |
self._faces = faces
self._edges = None
self._edges_indexed_by_faces = None
self._vertex_faces = None
self._vertices_indexed_by_faces = None
self.reset_normals()
self._vertex_colors_indexed_by_faces = None
self._face_colors_indexed_by_faces = None |
<SYSTEM_TASK:>
Get the vertices
<END_TASK>
<USER_TASK:>
Description:
def get_vertices(self, indexed=None):
"""Get the vertices
Parameters
----------
indexed : str | None
If Note, return an array (N,3) of the positions of vertices in
the mesh. By default, each unique vertex appears only once.
If indexed is 'faces', then the array will instead contain three
vertices per face in the mesh (and a single vertex may appear more
than once in the array).
Returns
-------
vertices : ndarray
The vertices.
""" |
if indexed is None:
if (self._vertices is None and
self._vertices_indexed_by_faces is not None):
self._compute_unindexed_vertices()
return self._vertices
elif indexed == 'faces':
if (self._vertices_indexed_by_faces is None and
self._vertices is not None):
self._vertices_indexed_by_faces = \
self._vertices[self.get_faces()]
return self._vertices_indexed_by_faces
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'") |
<SYSTEM_TASK:>
Get the mesh bounds
<END_TASK>
<USER_TASK:>
Description:
def get_bounds(self):
"""Get the mesh bounds
Returns
-------
bounds : list
A list of tuples of mesh bounds.
""" |
if self._vertices_indexed_by_faces is not None:
v = self._vertices_indexed_by_faces
elif self._vertices is not None:
v = self._vertices
else:
return None
bounds = [(v[:, ax].min(), v[:, ax].max()) for ax in range(v.shape[1])]
return bounds |
<SYSTEM_TASK:>
Set the mesh vertices
<END_TASK>
<USER_TASK:>
Description:
def set_vertices(self, verts=None, indexed=None, reset_normals=True):
"""Set the mesh vertices
Parameters
----------
verts : ndarray | None
The array (Nv, 3) of vertex coordinates.
indexed : str | None
If indexed=='faces', then the data must have shape (Nf, 3, 3) and
is assumed to be already indexed as a list of faces. This will
cause any pre-existing normal vectors to be cleared unless
reset_normals=False.
reset_normals : bool
If True, reset the normals.
""" |
if indexed is None:
if verts is not None:
self._vertices = verts
self._vertices_indexed_by_faces = None
elif indexed == 'faces':
self._vertices = None
if verts is not None:
self._vertices_indexed_by_faces = verts
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'")
if reset_normals:
self.reset_normals() |
<SYSTEM_TASK:>
Return True if this data set has vertex color information
<END_TASK>
<USER_TASK:>
Description:
def has_vertex_color(self):
"""Return True if this data set has vertex color information""" |
for v in (self._vertex_colors, self._vertex_colors_indexed_by_faces,
self._vertex_colors_indexed_by_edges):
if v is not None:
return True
return False |
<SYSTEM_TASK:>
Return True if this data set has face color information
<END_TASK>
<USER_TASK:>
Description:
def has_face_color(self):
"""Return True if this data set has face color information""" |
for v in (self._face_colors, self._face_colors_indexed_by_faces,
self._face_colors_indexed_by_edges):
if v is not None:
return True
return False |
<SYSTEM_TASK:>
Get face normals
<END_TASK>
<USER_TASK:>
Description:
def get_face_normals(self, indexed=None):
"""Get face normals
Parameters
----------
indexed : str | None
If None, return an array (Nf, 3) of normal vectors for each face.
If 'faces', then instead return an indexed array (Nf, 3, 3)
(this is just the same array with each vector copied three times).
Returns
-------
normals : ndarray
The normals.
""" |
if self._face_normals is None:
v = self.get_vertices(indexed='faces')
self._face_normals = np.cross(v[:, 1] - v[:, 0],
v[:, 2] - v[:, 0])
if indexed is None:
return self._face_normals
elif indexed == 'faces':
if self._face_normals_indexed_by_faces is None:
norms = np.empty((self._face_normals.shape[0], 3, 3),
dtype=np.float32)
norms[:] = self._face_normals[:, np.newaxis, :]
self._face_normals_indexed_by_faces = norms
return self._face_normals_indexed_by_faces
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'") |
<SYSTEM_TASK:>
Get vertex normals
<END_TASK>
<USER_TASK:>
Description:
def get_vertex_normals(self, indexed=None):
"""Get vertex normals
Parameters
----------
indexed : str | None
If None, return an (N, 3) array of normal vectors with one entry
per unique vertex in the mesh. If indexed is 'faces', then the
array will contain three normal vectors per face (and some
vertices may be repeated).
Returns
-------
normals : ndarray
The normals.
""" |
if self._vertex_normals is None:
faceNorms = self.get_face_normals()
vertFaces = self.get_vertex_faces()
self._vertex_normals = np.empty(self._vertices.shape,
dtype=np.float32)
for vindex in xrange(self._vertices.shape[0]):
faces = vertFaces[vindex]
if len(faces) == 0:
self._vertex_normals[vindex] = (0, 0, 0)
continue
norms = faceNorms[faces] # get all face normals
norm = norms.sum(axis=0) # sum normals
renorm = (norm**2).sum()**0.5
if renorm > 0:
norm /= renorm
self._vertex_normals[vindex] = norm
if indexed is None:
return self._vertex_normals
elif indexed == 'faces':
return self._vertex_normals[self.get_faces()]
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'") |
<SYSTEM_TASK:>
Set the vertex color array
<END_TASK>
<USER_TASK:>
Description:
def set_vertex_colors(self, colors, indexed=None):
"""Set the vertex color array
Parameters
----------
colors : array
Array of colors. Must have shape (Nv, 4) (indexing by vertex)
or shape (Nf, 3, 4) (vertices indexed by face).
indexed : str | None
Should be 'faces' if colors are indexed by faces.
""" |
colors = _fix_colors(np.asarray(colors))
if indexed is None:
if colors.ndim != 2:
raise ValueError('colors must be 2D if indexed is None')
if colors.shape[0] != self.n_vertices:
raise ValueError('incorrect number of colors %s, expected %s'
% (colors.shape[0], self.n_vertices))
self._vertex_colors = colors
self._vertex_colors_indexed_by_faces = None
elif indexed == 'faces':
if colors.ndim != 3:
raise ValueError('colors must be 3D if indexed is "faces"')
if colors.shape[0] != self.n_faces:
raise ValueError('incorrect number of faces')
self._vertex_colors = None
self._vertex_colors_indexed_by_faces = colors
else:
raise ValueError('indexed must be None or "faces"') |
<SYSTEM_TASK:>
Get the face colors
<END_TASK>
<USER_TASK:>
Description:
def get_face_colors(self, indexed=None):
"""Get the face colors
Parameters
----------
indexed : str | None
If indexed is None, return (Nf, 4) array of face colors.
If indexed=='faces', then instead return an indexed array
(Nf, 3, 4) (note this is just the same array with each color
repeated three times).
Returns
-------
colors : ndarray
The colors.
""" |
if indexed is None:
return self._face_colors
elif indexed == 'faces':
if (self._face_colors_indexed_by_faces is None and
self._face_colors is not None):
Nf = self._face_colors.shape[0]
self._face_colors_indexed_by_faces = \
np.empty((Nf, 3, 4), dtype=self._face_colors.dtype)
self._face_colors_indexed_by_faces[:] = \
self._face_colors.reshape(Nf, 1, 4)
return self._face_colors_indexed_by_faces
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'") |
<SYSTEM_TASK:>
Set the face color array
<END_TASK>
<USER_TASK:>
Description:
def set_face_colors(self, colors, indexed=None):
"""Set the face color array
Parameters
----------
colors : array
Array of colors. Must have shape (Nf, 4) (indexed by face),
or shape (Nf, 3, 4) (face colors indexed by faces).
indexed : str | None
Should be 'faces' if colors are indexed by faces.
""" |
colors = _fix_colors(colors)
if colors.shape[0] != self.n_faces:
raise ValueError('incorrect number of colors %s, expected %s'
% (colors.shape[0], self.n_faces))
if indexed is None:
if colors.ndim != 2:
raise ValueError('colors must be 2D if indexed is None')
self._face_colors = colors
self._face_colors_indexed_by_faces = None
elif indexed == 'faces':
if colors.ndim != 3:
raise ValueError('colors must be 3D if indexed is "faces"')
self._face_colors = None
self._face_colors_indexed_by_faces = colors
else:
raise ValueError('indexed must be None or "faces"') |
<SYSTEM_TASK:>
List mapping each vertex index to a list of face indices that use it.
<END_TASK>
<USER_TASK:>
Description:
def get_vertex_faces(self):
"""
List mapping each vertex index to a list of face indices that use it.
""" |
if self._vertex_faces is None:
self._vertex_faces = [[] for i in xrange(len(self.get_vertices()))]
for i in xrange(self._faces.shape[0]):
face = self._faces[i]
for ind in face:
self._vertex_faces[ind].append(i)
return self._vertex_faces |
<SYSTEM_TASK:>
Serialize this mesh to a string appropriate for disk storage
<END_TASK>
<USER_TASK:>
Description:
def save(self):
"""Serialize this mesh to a string appropriate for disk storage
Returns
-------
state : dict
The state.
""" |
import pickle
if self._faces is not None:
names = ['_vertices', '_faces']
else:
names = ['_vertices_indexed_by_faces']
if self._vertex_colors is not None:
names.append('_vertex_colors')
elif self._vertex_colors_indexed_by_faces is not None:
names.append('_vertex_colors_indexed_by_faces')
if self._face_colors is not None:
names.append('_face_colors')
elif self._face_colors_indexed_by_faces is not None:
names.append('_face_colors_indexed_by_faces')
state = dict([(n, getattr(self, n)) for n in names])
return pickle.dumps(state) |
<SYSTEM_TASK:>
A full implementation of Dave Green's "cubehelix" for Matplotlib.
<END_TASK>
<USER_TASK:>
Description:
def cubehelix(start=0.5, rot=1, gamma=1.0, reverse=True, nlev=256.,
minSat=1.2, maxSat=1.2, minLight=0., maxLight=1.,
**kwargs):
"""
A full implementation of Dave Green's "cubehelix" for Matplotlib.
Based on the FORTRAN 77 code provided in
D.A. Green, 2011, BASI, 39, 289.
http://adsabs.harvard.edu/abs/2011arXiv1108.5083G
User can adjust all parameters of the cubehelix algorithm.
This enables much greater flexibility in choosing color maps, while
always ensuring the color map scales in intensity from black
to white. A few simple examples:
Default color map settings produce the standard "cubehelix".
Create color map in only blues by setting rot=0 and start=0.
Create reverse (white to black) backwards through the rainbow once
by setting rot=1 and reverse=True.
Parameters
----------
start : scalar, optional
Sets the starting position in the color space. 0=blue, 1=red,
2=green. Defaults to 0.5.
rot : scalar, optional
The number of rotations through the rainbow. Can be positive
or negative, indicating direction of rainbow. Negative values
correspond to Blue->Red direction. Defaults to -1.5
gamma : scalar, optional
The gamma correction for intensity. Defaults to 1.0
reverse : boolean, optional
Set to True to reverse the color map. Will go from black to
white. Good for density plots where shade~density. Defaults to False
nlev : scalar, optional
Defines the number of discrete levels to render colors at.
Defaults to 256.
sat : scalar, optional
The saturation intensity factor. Defaults to 1.2
NOTE: this was formerly known as "hue" parameter
minSat : scalar, optional
Sets the minimum-level saturation. Defaults to 1.2
maxSat : scalar, optional
Sets the maximum-level saturation. Defaults to 1.2
startHue : scalar, optional
Sets the starting color, ranging from [0, 360], as in
D3 version by @mbostock
NOTE: overrides values in start parameter
endHue : scalar, optional
Sets the ending color, ranging from [0, 360], as in
D3 version by @mbostock
NOTE: overrides values in rot parameter
minLight : scalar, optional
Sets the minimum lightness value. Defaults to 0.
maxLight : scalar, optional
Sets the maximum lightness value. Defaults to 1.
Returns
-------
data : ndarray, shape (N, 3)
Control points.
""" |
# override start and rot if startHue and endHue are set
if kwargs is not None:
if 'startHue' in kwargs:
start = (kwargs.get('startHue') / 360. - 1.) * 3.
if 'endHue' in kwargs:
rot = kwargs.get('endHue') / 360. - start / 3. - 1.
if 'sat' in kwargs:
minSat = kwargs.get('sat')
maxSat = kwargs.get('sat')
# set up the parameters
fract = np.linspace(minLight, maxLight, nlev)
angle = 2.0 * pi * (start / 3.0 + rot * fract + 1.)
fract = fract**gamma
satar = np.linspace(minSat, maxSat, nlev)
amp = satar * fract * (1. - fract) / 2.
# compute the RGB vectors according to main equations
red = fract + amp * (-0.14861 * np.cos(angle) + 1.78277 * np.sin(angle))
grn = fract + amp * (-0.29227 * np.cos(angle) - 0.90649 * np.sin(angle))
blu = fract + amp * (1.97294 * np.cos(angle))
# find where RBB are outside the range [0,1], clip
red[np.where((red > 1.))] = 1.
grn[np.where((grn > 1.))] = 1.
blu[np.where((blu > 1.))] = 1.
red[np.where((red < 0.))] = 0.
grn[np.where((grn < 0.))] = 0.
blu[np.where((blu < 0.))] = 0.
# optional color reverse
if reverse is True:
red = red[::-1]
blu = blu[::-1]
grn = grn[::-1]
return np.array((red, grn, blu)).T |
<SYSTEM_TASK:>
Convert a many-to-one mapping to a one-to-one mapping
<END_TASK>
<USER_TASK:>
Description:
def _many_to_one(input_dict):
"""Convert a many-to-one mapping to a one-to-one mapping""" |
return dict((key, val)
for keys, val in input_dict.items()
for key in keys) |
<SYSTEM_TASK:>
Get an SVG dash array for the given matplotlib linestyle
<END_TASK>
<USER_TASK:>
Description:
def get_dasharray(obj):
"""Get an SVG dash array for the given matplotlib linestyle
Parameters
----------
obj : matplotlib object
The matplotlib line or path object, which must have a get_linestyle()
method which returns a valid matplotlib line code
Returns
-------
dasharray : string
The HTML/SVG dasharray code associated with the object.
""" |
if obj.__dict__.get('_dashSeq', None) is not None:
return ','.join(map(str, obj._dashSeq))
else:
ls = obj.get_linestyle()
dasharray = LINESTYLES.get(ls, 'not found')
if dasharray == 'not found':
warnings.warn("line style '{0}' not understood: "
"defaulting to solid line.".format(ls))
dasharray = LINESTYLES['solid']
return dasharray |
<SYSTEM_TASK:>
Construct the vertices and SVG codes for the path
<END_TASK>
<USER_TASK:>
Description:
def SVG_path(path, transform=None, simplify=False):
"""Construct the vertices and SVG codes for the path
Parameters
----------
path : matplotlib.Path object
transform : matplotlib transform (optional)
if specified, the path will be transformed before computing the output.
Returns
-------
vertices : array
The shape (M, 2) array of vertices of the Path. Note that some Path
codes require multiple vertices, so the length of these vertices may
be longer than the list of path codes.
path_codes : list
A length N list of single-character path codes, N <= M. Each code is
a single character, in ['L','M','S','C','Z']. See the standard SVG
path specification for a description of these.
""" |
if transform is not None:
path = path.transformed(transform)
vc_tuples = [(vertices if path_code != Path.CLOSEPOLY else [],
PATH_DICT[path_code])
for (vertices, path_code)
in path.iter_segments(simplify=simplify)]
if not vc_tuples:
# empty path is a special case
return np.zeros((0, 2)), []
else:
vertices, codes = zip(*vc_tuples)
vertices = np.array(list(itertools.chain(*vertices))).reshape(-1, 2)
return vertices, list(codes) |
<SYSTEM_TASK:>
Get the style dictionary for matplotlib path objects
<END_TASK>
<USER_TASK:>
Description:
def get_path_style(path, fill=True):
"""Get the style dictionary for matplotlib path objects""" |
style = {}
style['alpha'] = path.get_alpha()
if style['alpha'] is None:
style['alpha'] = 1
style['edgecolor'] = color_to_hex(path.get_edgecolor())
if fill:
style['facecolor'] = color_to_hex(path.get_facecolor())
else:
style['facecolor'] = 'none'
style['edgewidth'] = path.get_linewidth()
style['dasharray'] = get_dasharray(path)
style['zorder'] = path.get_zorder()
return style |
<SYSTEM_TASK:>
Get the style dictionary for matplotlib line objects
<END_TASK>
<USER_TASK:>
Description:
def get_line_style(line):
"""Get the style dictionary for matplotlib line objects""" |
style = {}
style['alpha'] = line.get_alpha()
if style['alpha'] is None:
style['alpha'] = 1
style['color'] = color_to_hex(line.get_color())
style['linewidth'] = line.get_linewidth()
style['dasharray'] = get_dasharray(line)
style['zorder'] = line.get_zorder()
return style |
<SYSTEM_TASK:>
Get the style dictionary for matplotlib marker objects
<END_TASK>
<USER_TASK:>
Description:
def get_marker_style(line):
"""Get the style dictionary for matplotlib marker objects""" |
style = {}
style['alpha'] = line.get_alpha()
if style['alpha'] is None:
style['alpha'] = 1
style['facecolor'] = color_to_hex(line.get_markerfacecolor())
style['edgecolor'] = color_to_hex(line.get_markeredgecolor())
style['edgewidth'] = line.get_markeredgewidth()
style['marker'] = line.get_marker()
markerstyle = MarkerStyle(line.get_marker())
markersize = line.get_markersize()
markertransform = (markerstyle.get_transform() +
Affine2D().scale(markersize, -markersize))
style['markerpath'] = SVG_path(markerstyle.get_path(),
markertransform)
style['markersize'] = markersize
style['zorder'] = line.get_zorder()
return style |
<SYSTEM_TASK:>
Return the property dictionary for a matplotlib.Axis instance
<END_TASK>
<USER_TASK:>
Description:
def get_axis_properties(axis):
"""Return the property dictionary for a matplotlib.Axis instance""" |
props = {}
label1On = axis._major_tick_kw.get('label1On', True)
if isinstance(axis, matplotlib.axis.XAxis):
if label1On:
props['position'] = "bottom"
else:
props['position'] = "top"
elif isinstance(axis, matplotlib.axis.YAxis):
if label1On:
props['position'] = "left"
else:
props['position'] = "right"
else:
raise ValueError("{0} should be an Axis instance".format(axis))
# Use tick values if appropriate
locator = axis.get_major_locator()
props['nticks'] = len(locator())
if isinstance(locator, ticker.FixedLocator):
props['tickvalues'] = list(locator())
else:
props['tickvalues'] = None
# Find tick formats
formatter = axis.get_major_formatter()
if isinstance(formatter, ticker.NullFormatter):
props['tickformat'] = ""
elif isinstance(formatter, ticker.FixedFormatter):
props['tickformat'] = list(formatter.seq)
elif not any(label.get_visible() for label in axis.get_ticklabels()):
props['tickformat'] = ""
else:
props['tickformat'] = None
# Get axis scale
props['scale'] = axis.get_scale()
# Get major tick label size (assumes that's all we really care about!)
labels = axis.get_ticklabels()
if labels:
props['fontsize'] = labels[0].get_fontsize()
else:
props['fontsize'] = None
# Get associated grid
props['grid'] = get_grid_style(axis)
return props |
<SYSTEM_TASK:>
Convert a matplotlib image to a base64 png representation
<END_TASK>
<USER_TASK:>
Description:
def image_to_base64(image):
"""
Convert a matplotlib image to a base64 png representation
Parameters
----------
image : matplotlib image object
The image to be converted.
Returns
-------
image_base64 : string
The UTF8-encoded base64 string representation of the png image.
""" |
ax = image.axes
binary_buffer = io.BytesIO()
# image is saved in axes coordinates: we need to temporarily
# set the correct limits to get the correct image
lim = ax.axis()
ax.axis(image.get_extent())
image.write_png(binary_buffer)
ax.axis(lim)
binary_buffer.seek(0)
return base64.b64encode(binary_buffer.read()).decode('utf-8') |
<SYSTEM_TASK:>
Activate the IPython hook for VisPy. If the app is not specified, the
<END_TASK>
<USER_TASK:>
Description:
def set_interactive(enabled=True, app=None):
"""Activate the IPython hook for VisPy. If the app is not specified, the
default is used.
""" |
if enabled:
inputhook_manager.enable_gui('vispy', app)
else:
inputhook_manager.disable_gui() |
<SYSTEM_TASK:>
Resize buffers only if necessary
<END_TASK>
<USER_TASK:>
Description:
def _resize_buffers(self, font_scale):
"""Resize buffers only if necessary""" |
new_sizes = (font_scale,) + self.size
if new_sizes == self._current_sizes: # don't need resize
return
self._n_rows = int(max(self.size[1] /
(self._char_height * font_scale), 1))
self._n_cols = int(max(self.size[0] /
(self._char_width * font_scale), 1))
self._bytes_012 = np.zeros((self._n_rows, self._n_cols, 3), np.float32)
self._bytes_345 = np.zeros((self._n_rows, self._n_cols, 3), np.float32)
pos = np.empty((self._n_rows, self._n_cols, 2), np.float32)
C, R = np.meshgrid(np.arange(self._n_cols), np.arange(self._n_rows))
# We are in left, top orientation
x_off = 4.
y_off = 4 - self.size[1] / font_scale
pos[..., 0] = x_off + self._char_width * C
pos[..., 1] = y_off + self._char_height * R
self._position = VertexBuffer(pos)
# Restore lines
for ii, line in enumerate(self._text_lines[:self._n_rows]):
self._insert_text_buf(line, ii)
self._current_sizes = new_sizes |
<SYSTEM_TASK:>
Write text and scroll
<END_TASK>
<USER_TASK:>
Description:
def write(self, text='', wrap=True):
"""Write text and scroll
Parameters
----------
text : str
Text to write. ``''`` can be used for a blank line, as a newline
is automatically added to the end of each line.
wrap : str
If True, long messages will be wrapped to span multiple lines.
""" |
# Clear line
if not isinstance(text, string_types):
raise TypeError('text must be a string')
# ensure we only have ASCII chars
text = text.encode('utf-8').decode('ascii', errors='replace')
self._pending_writes.append((text, wrap))
self.update() |
<SYSTEM_TASK:>
Do any pending text writes
<END_TASK>
<USER_TASK:>
Description:
def _do_pending_writes(self):
"""Do any pending text writes""" |
for text, wrap in self._pending_writes:
# truncate in case of *really* long messages
text = text[-self._n_cols*self._n_rows:]
text = text.split('\n')
text = [t if len(t) > 0 else '' for t in text]
nr, nc = self._n_rows, self._n_cols
for para in text:
para = para[:nc] if not wrap else para
lines = [para[ii:(ii+nc)] for ii in range(0, len(para), nc)]
lines = [''] if len(lines) == 0 else lines
for line in lines:
# Update row and scroll if necessary
self._text_lines.insert(0, line)
self._text_lines = self._text_lines[:nr]
self._bytes_012[1:] = self._bytes_012[:-1]
self._bytes_345[1:] = self._bytes_345[:-1]
self._insert_text_buf(line, 0)
self._pending_writes = [] |
<SYSTEM_TASK:>
find all template variables in self._code, excluding the
<END_TASK>
<USER_TASK:>
Description:
def _parse_template_vars(self):
""" find all template variables in self._code, excluding the
function name.
""" |
template_vars = set()
for var in parsing.find_template_variables(self._code):
var = var.lstrip('$')
if var == self.name:
continue
if var in ('pre', 'post'):
raise ValueError('GLSL uses reserved template variable $%s' %
var)
template_vars.add(var)
return template_vars |
<SYSTEM_TASK:>
Return code, with new name, expressions, and replacements applied.
<END_TASK>
<USER_TASK:>
Description:
def _get_replaced_code(self, names):
""" Return code, with new name, expressions, and replacements applied.
""" |
code = self._code
# Modify name
fname = names[self]
code = code.replace(" " + self.name + "(", " " + fname + "(")
# Apply string replacements first -- these may contain $placeholders
for key, val in self._replacements.items():
code = code.replace(key, val)
# Apply assignments to the end of the function
# Collect post lines
post_lines = []
for key, val in self._assignments.items():
if isinstance(key, Variable):
key = names[key]
if isinstance(val, ShaderObject):
val = val.expression(names)
line = ' %s = %s;' % (key, val)
post_lines.append(line)
# Add a default $post placeholder if needed
if 'post' in self._expressions:
post_lines.append(' $post')
# Apply placeholders for hooks
post_text = '\n'.join(post_lines)
if post_text:
post_text = '\n' + post_text + '\n'
code = code.rpartition('}')
code = code[0] + post_text + code[1] + code[2]
# Add a default $pre placeholder if needed
if 'pre' in self._expressions:
m = re.search(fname + r'\s*\([^{]*\)\s*{', code)
if m is None:
raise RuntimeError("Cound not find beginning of function '%s'"
% fname)
ind = m.span()[1]
code = code[:ind] + "\n $pre\n" + code[ind:]
# Apply template variables
for key, val in self._expressions.items():
val = val.expression(names)
search = r'\$' + key + r'($|[^a-zA-Z0-9_])'
code = re.sub(search, val+r'\1', code)
# Done
if '$' in code:
v = parsing.find_template_variables(code)
logger.warning('Unsubstituted placeholders in code: %s\n'
' replacements made: %s',
v, list(self._expressions.keys()))
return code + '\n' |
<SYSTEM_TASK:>
Append a new function to the end of this chain.
<END_TASK>
<USER_TASK:>
Description:
def append(self, function, update=True):
""" Append a new function to the end of this chain.
""" |
self._funcs.append(function)
self._add_dep(function)
if update:
self._update() |
<SYSTEM_TASK:>
Add an item to the list unless it is already present.
<END_TASK>
<USER_TASK:>
Description:
def add(self, item, position=5):
"""Add an item to the list unless it is already present.
If the item is an expression, then a semicolon will be appended to it
in the final compiled code.
""" |
if item in self.items:
return
self.items[item] = position
self._add_dep(item)
self.order = None
self.changed(code_changed=True) |
<SYSTEM_TASK:>
Remove an item from the list.
<END_TASK>
<USER_TASK:>
Description:
def remove(self, item):
"""Remove an item from the list.
""" |
self.items.pop(item)
self._remove_dep(item)
self.order = None
self.changed(code_changed=True) |
<SYSTEM_TASK:>
Return an array of vertex indexes representing the convex hull.
<END_TASK>
<USER_TASK:>
Description:
def convex_hull(self):
"""Return an array of vertex indexes representing the convex hull.
If faces have not been computed for this mesh, the function
computes them.
If no vertices or faces are specified, the function returns None.
""" |
if self._faces is None:
if self._vertices is None:
return None
self.triangulate()
return self._convex_hull |
<SYSTEM_TASK:>
Triangulates the set of vertices and stores the triangles in faces and
<END_TASK>
<USER_TASK:>
Description:
def triangulate(self):
"""
Triangulates the set of vertices and stores the triangles in faces and
the convex hull in convex_hull.
""" |
npts = self._vertices.shape[0]
if np.any(self._vertices[0] != self._vertices[1]):
# start != end, so edges must wrap around to beginning.
edges = np.empty((npts, 2), dtype=np.uint32)
edges[:, 0] = np.arange(npts)
edges[:, 1] = edges[:, 0] + 1
edges[-1, 1] = 0
else:
# start == end; no wrapping required.
edges = np.empty((npts-1, 2), dtype=np.uint32)
edges[:, 0] = np.arange(npts)
edges[:, 1] = edges[:, 0] + 1
tri = Triangulation(self._vertices, edges)
tri.triangulate()
return tri.pts, tri.tris |
<SYSTEM_TASK:>
Locate a filename into the shader library.
<END_TASK>
<USER_TASK:>
Description:
def find(name):
"""Locate a filename into the shader library.""" |
if op.exists(name):
return name
path = op.dirname(__file__) or '.'
paths = [path] + config['include_path']
for path in paths:
filename = op.abspath(op.join(path, name))
if op.exists(filename):
return filename
for d in os.listdir(path):
fullpath = op.abspath(op.join(path, d))
if op.isdir(fullpath):
filename = op.abspath(op.join(fullpath, name))
if op.exists(filename):
return filename
return None |
<SYSTEM_TASK:>
try many times as in times with sleep time
<END_TASK>
<USER_TASK:>
Description:
def expect(func, args, times=7, sleep_t=0.5):
"""try many times as in times with sleep time""" |
while times > 0:
try:
return func(*args)
except Exception as e:
times -= 1
logger.debug("expect failed - attempts left: %d" % times)
time.sleep(sleep_t)
if times == 0:
raise exceptions.BaseExc(e) |
<SYSTEM_TASK:>
convert a string to float
<END_TASK>
<USER_TASK:>
Description:
def num(string):
"""convert a string to float""" |
if not isinstance(string, type('')):
raise ValueError(type(''))
try:
string = re.sub('[^a-zA-Z0-9\.\-]', '', string)
number = re.findall(r"[-+]?\d*\.\d+|[-+]?\d+", string)
return float(number[0])
except Exception as e:
logger = logging.getLogger('tradingAPI.utils.num')
logger.debug("number not found in %s" % string)
logger.debug(e)
return None |
<SYSTEM_TASK:>
get the unit of number
<END_TASK>
<USER_TASK:>
Description:
def get_number_unit(number):
"""get the unit of number""" |
n = str(float(number))
mult, submult = n.split('.')
if float(submult) != 0:
unit = '0.' + (len(submult)-1)*'0' + '1'
return float(unit)
else:
return float(1) |
<SYSTEM_TASK:>
Set current capacity of the underlying array
<END_TASK>
<USER_TASK:>
Description:
def reserve(self, capacity):
""" Set current capacity of the underlying array""" |
if capacity >= self._data.size:
capacity = int(2 ** np.ceil(np.log2(capacity)))
self._data = np.resize(self._data, capacity) |
<SYSTEM_TASK:>
Append data to the end.
<END_TASK>
<USER_TASK:>
Description:
def append(self, data, itemsize=None):
"""
Append data to the end.
Parameters
----------
data : array_like
An array, any object exposing the array interface, an object
whose __array__ method returns an array, or any (nested) sequence.
itemsize: int or 1-D array
If `itemsize is an integer, N, the array will be divided
into elements of size N. If such partition is not possible,
an error is raised.
If `itemsize` is 1-D array, the array will be divided into
elements whose succesive sizes will be picked from itemsize.
If the sum of itemsize values is different from array size,
an error is raised.
""" |
self.insert(len(self), data, itemsize) |
<SYSTEM_TASK:>
Link this axis to a ViewBox
<END_TASK>
<USER_TASK:>
Description:
def link_view(self, view):
"""Link this axis to a ViewBox
This makes it so that the axis's domain always matches the
visible range in the ViewBox.
Parameters
----------
view : instance of ViewBox
The ViewBox to link.
""" |
if view is self._linked_view:
return
if self._linked_view is not None:
self._linked_view.scene.transform.changed.disconnect(
self._view_changed)
self._linked_view = view
view.scene.transform.changed.connect(self._view_changed)
self._view_changed() |
<SYSTEM_TASK:>
Return next power of 2 greater than or equal to n
<END_TASK>
<USER_TASK:>
Description:
def next_power_of_2(n):
""" Return next power of 2 greater than or equal to n """ |
n -= 1 # greater than OR EQUAL TO n
shift = 1
while (n + 1) & n: # n+1 is not a power of 2 yet
n |= n >> shift
shift *= 2
return max(4, n + 1) |
<SYSTEM_TASK:>
Retrieve a graph layout
<END_TASK>
<USER_TASK:>
Description:
def get_layout(name, *args, **kwargs):
"""
Retrieve a graph layout
Some graph layouts accept extra options. Please refer to their
documentation for more information.
Parameters
----------
name : string
The name of the layout. The variable `AVAILABLE_LAYOUTS`
contains all available layouts.
*args
Positional arguments which are passed to the layout.
**kwargs
Keyword arguments which are passed to the layout.
Returns
-------
layout : callable
The callable generator which will calculate the graph layout
""" |
if name not in _layout_map:
raise KeyError("Graph layout '%s' not found. Should be one of %s"
% (name, AVAILABLE_LAYOUTS))
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout |
<SYSTEM_TASK:>
Given viewer session information, make sure the session information is
<END_TASK>
<USER_TASK:>
Description:
def update_viewer_state(rec, context):
"""
Given viewer session information, make sure the session information is
compatible with the current version of the viewers, and if not, update
the session information in-place.
""" |
if '_protocol' not in rec:
rec.pop('properties')
rec['state'] = {}
rec['state']['values'] = rec.pop('options')
layer_states = []
for layer in rec['layers']:
state_id = str(uuid.uuid4())
state_cls = STATE_CLASS[layer['_type'].split('.')[-1]]
state = state_cls(layer=context.object(layer.pop('layer')))
properties = set(layer.keys()) - set(['_type'])
for prop in sorted(properties, key=state.update_priority, reverse=True):
value = layer.pop(prop)
value = context.object(value)
if isinstance(value, six.string_types) and value == 'fixed':
value = 'Fixed'
if isinstance(value, six.string_types) and value == 'linear':
value = 'Linear'
setattr(state, prop, value)
context.register_object(state_id, state)
layer['state'] = state_id
layer_states.append(state)
list_id = str(uuid.uuid4())
context.register_object(list_id, layer_states)
rec['state']['values']['layers'] = list_id
rec['state']['values']['visible_axes'] = rec['state']['values'].pop('visible_box') |
<SYSTEM_TASK:>
Remove C-style comment from GLSL code string.
<END_TASK>
<USER_TASK:>
Description:
def remove_comments(code):
"""Remove C-style comment from GLSL code string.""" |
pattern = r"(\".*?\"|\'.*?\')|(/\*.*?\*/|//[^\r\n]*\n)"
# first group captures quoted strings (double or single)
# second group captures comments (//single-line or /* multi-line */)
regex = re.compile(pattern, re.MULTILINE | re.DOTALL)
def do_replace(match):
# if the 2nd group (capturing comments) is not None,
# it means we have captured a non-quoted (real) comment string.
if match.group(2) is not None:
return "" # so we will return empty to remove the comment
else: # otherwise, we will return the 1st group
return match.group(1) # captured quoted-string
return regex.sub(do_replace, code) |
<SYSTEM_TASK:>
Merge all includes recursively.
<END_TASK>
<USER_TASK:>
Description:
def merge_includes(code):
"""Merge all includes recursively.""" |
pattern = '\#\s*include\s*"(?P<filename>[a-zA-Z0-9\_\-\.\/]+)"'
regex = re.compile(pattern)
includes = []
def replace(match):
filename = match.group("filename")
if filename not in includes:
includes.append(filename)
path = glsl.find(filename)
if not path:
logger.critical('"%s" not found' % filename)
raise RuntimeError("File not found", filename)
text = '\n// --- start of "%s" ---\n' % filename
with open(path) as fh:
text += fh.read()
text += '// --- end of "%s" ---\n' % filename
return text
return ''
# Limit recursion to depth 10
for i in range(10):
if re.search(regex, code):
code = re.sub(regex, replace, code)
else:
break
return code |
<SYSTEM_TASK:>
Add a new widget to this grid. This will cause other widgets in the
<END_TASK>
<USER_TASK:>
Description:
def add_widget(self, widget=None, row=None, col=None, row_span=1,
col_span=1, **kwargs):
"""
Add a new widget to this grid. This will cause other widgets in the
grid to be resized to make room for the new widget. Can be used
to replace a widget as well
Parameters
----------
widget : Widget | None
The Widget to add. New widget is constructed if widget is None.
row : int
The row in which to add the widget (0 is the topmost row)
col : int
The column in which to add the widget (0 is the leftmost column)
row_span : int
The number of rows to be occupied by this widget. Default is 1.
col_span : int
The number of columns to be occupied by this widget. Default is 1.
**kwargs : dict
parameters sent to the new Widget that is constructed if
widget is None
Notes
-----
The widget's parent is automatically set to this grid, and all other
parent(s) are removed.
""" |
if row is None:
row = self._next_cell[0]
if col is None:
col = self._next_cell[1]
if widget is None:
widget = Widget(**kwargs)
else:
if kwargs:
raise ValueError("cannot send kwargs if widget is given")
_row = self._cells.setdefault(row, {})
_row[col] = widget
self._grid_widgets[self._n_added] = (row, col, row_span, col_span,
widget)
self._n_added += 1
widget.parent = self
self._next_cell = [row, col+col_span]
widget._var_w = Variable("w-(row: %s | col: %s)" % (row, col))
widget._var_h = Variable("h-(row: %s | col: %s)" % (row, col))
# update stretch based on colspan/rowspan
# usually, if you make something consume more grids or columns,
# you also want it to actually *take it up*, ratio wise.
# otherwise, it will never *use* the extra rows and columns,
# thereby collapsing the extras to 0.
stretch = list(widget.stretch)
stretch[0] = col_span if stretch[0] is None else stretch[0]
stretch[1] = row_span if stretch[1] is None else stretch[1]
widget.stretch = stretch
self._need_solver_recreate = True
return widget |
<SYSTEM_TASK:>
Remove a widget from this grid
<END_TASK>
<USER_TASK:>
Description:
def remove_widget(self, widget):
"""Remove a widget from this grid
Parameters
----------
widget : Widget
The Widget to remove
""" |
self._grid_widgets = dict((key, val)
for (key, val) in self._grid_widgets.items()
if val[-1] != widget)
self._need_solver_recreate = True |
<SYSTEM_TASK:>
Resize a widget in the grid to new dimensions.
<END_TASK>
<USER_TASK:>
Description:
def resize_widget(self, widget, row_span, col_span):
"""Resize a widget in the grid to new dimensions.
Parameters
----------
widget : Widget
The widget to resize
row_span : int
The number of rows to be occupied by this widget.
col_span : int
The number of columns to be occupied by this widget.
""" |
row = None
col = None
for (r, c, rspan, cspan, w) in self._grid_widgets.values():
if w == widget:
row = r
col = c
break
if row is None or col is None:
raise ValueError("%s not found in grid" % widget)
self.remove_widget(widget)
self.add_widget(widget, row, col, row_span, col_span)
self._need_solver_recreate = True |
<SYSTEM_TASK:>
Helper to get vispy calling function from the stack
<END_TASK>
<USER_TASK:>
Description:
def _get_vispy_caller():
"""Helper to get vispy calling function from the stack""" |
records = inspect.stack()
# first few records are vispy-based logging calls
for record in records[5:]:
module = record[0].f_globals['__name__']
if module.startswith('vispy'):
line = str(record[0].f_lineno)
func = record[3]
cls = record[0].f_locals.get('self', None)
clsname = "" if cls is None else cls.__class__.__name__ + '.'
caller = "{0}:{1}{2}({3}): ".format(module, clsname, func, line)
return caller
return 'unknown' |
<SYSTEM_TASK:>
Convenience function for setting the logging level
<END_TASK>
<USER_TASK:>
Description:
def set_log_level(verbose, match=None, return_old=False):
"""Convenience function for setting the logging level
Parameters
----------
verbose : bool, str, int, or None
The verbosity of messages to print. If a str, it can be either DEBUG,
INFO, WARNING, ERROR, or CRITICAL. Note that these are for
convenience and are equivalent to passing in logging.DEBUG, etc.
For bool, True is the same as 'INFO', False is the same as 'WARNING'.
match : str | None
String to match. Only those messages that both contain a substring
that regexp matches ``'match'`` (and the ``verbose`` level) will be
displayed.
return_old : bool
If True, return the old verbosity level and old match.
Notes
-----
If ``verbose=='debug'``, then the ``vispy`` method emitting the log
message will be prepended to each log message, which is useful for
debugging. If ``verbose=='debug'`` or ``match is not None``, then a
small performance overhead is added. Thus it is suggested to only use
these options when performance is not crucial.
See also
--------
vispy.util.use_log_level
""" |
# This method is responsible for setting properties of the handler and
# formatter such that proper messages (possibly with the vispy caller
# prepended) are displayed. Storing log messages is only available
# via the context handler (use_log_level), so that configuration is
# done by the context handler itself.
if isinstance(verbose, bool):
verbose = 'info' if verbose else 'warning'
if isinstance(verbose, string_types):
verbose = verbose.lower()
if verbose not in logging_types:
raise ValueError('Invalid argument "%s"' % verbose)
verbose = logging_types[verbose]
else:
raise TypeError('verbose must be a bool or string')
logger = logging.getLogger('vispy')
old_verbose = logger.level
old_match = _lh._vispy_set_match(match)
logger.setLevel(verbose)
if verbose <= logging.DEBUG:
_lf._vispy_set_prepend(True)
else:
_lf._vispy_set_prepend(False)
out = None
if return_old:
out = (old_verbose, old_match)
return out |
<SYSTEM_TASK:>
Helper for prining errors in callbacks
<END_TASK>
<USER_TASK:>
Description:
def _handle_exception(ignore_callback_errors, print_callback_errors, obj,
cb_event=None, node=None):
"""Helper for prining errors in callbacks
See EventEmitter._invoke_callback for a use example.
""" |
if not hasattr(obj, '_vispy_err_registry'):
obj._vispy_err_registry = {}
registry = obj._vispy_err_registry
if cb_event is not None:
cb, event = cb_event
exp_type = 'callback'
else:
exp_type = 'node'
type_, value, tb = sys.exc_info()
tb = tb.tb_next # Skip *this* frame
sys.last_type = type_
sys.last_value = value
sys.last_traceback = tb
del tb # Get rid of it in this namespace
# Handle
if not ignore_callback_errors:
raise
if print_callback_errors != "never":
this_print = 'full'
if print_callback_errors in ('first', 'reminders'):
# need to check to see if we've hit this yet
if exp_type == 'callback':
key = repr(cb) + repr(event)
else:
key = repr(node)
if key in registry:
registry[key] += 1
if print_callback_errors == 'first':
this_print = None
else: # reminders
ii = registry[key]
# Use logarithmic selection
# (1, 2, ..., 10, 20, ..., 100, 200, ...)
if ii == (2 ** int(np.log2(ii))):
this_print = ii
else:
this_print = None
else:
registry[key] = 1
if this_print == 'full':
logger.log_exception()
if exp_type == 'callback':
logger.error("Invoking %s for %s" % (cb, event))
else: # == 'node':
logger.error("Drawing node %s" % node)
elif this_print is not None:
if exp_type == 'callback':
logger.error("Invoking %s repeat %s"
% (cb, this_print))
else: # == 'node':
logger.error("Drawing node %s repeat %s"
% (node, this_print)) |
<SYSTEM_TASK:>
Serialize a NumPy array.
<END_TASK>
<USER_TASK:>
Description:
def _serialize_buffer(buffer, array_serialization=None):
"""Serialize a NumPy array.""" |
if array_serialization == 'binary':
# WARNING: in NumPy 1.9, tostring() has been renamed to tobytes()
# but tostring() is still here for now for backward compatibility.
return buffer.ravel().tostring()
elif array_serialization == 'base64':
return {'storage_type': 'base64',
'buffer': base64.b64encode(buffer).decode('ascii')
}
raise ValueError("The array serialization method should be 'binary' or "
"'base64'.") |
<SYSTEM_TASK:>
Called when a dependency's expression has changed.
<END_TASK>
<USER_TASK:>
Description:
def _dep_changed(self, dep, code_changed=False, value_changed=False):
""" Called when a dependency's expression has changed.
""" |
self.changed(code_changed, value_changed) |
<SYSTEM_TASK:>
Inform dependents that this shaderobject has changed.
<END_TASK>
<USER_TASK:>
Description:
def changed(self, code_changed=False, value_changed=False):
"""Inform dependents that this shaderobject has changed.
""" |
for d in self._dependents:
d._dep_changed(self, code_changed=code_changed,
value_changed=value_changed) |
<SYSTEM_TASK:>
Pan the view.
<END_TASK>
<USER_TASK:>
Description:
def pan(self, *pan):
"""Pan the view.
Parameters
----------
*pan : length-2 sequence
The distance to pan the view, in the coordinate system of the
scene.
""" |
if len(pan) == 1:
pan = pan[0]
self.rect = self.rect + pan |
<SYSTEM_TASK:>
The SubScene received a mouse event; update transform
<END_TASK>
<USER_TASK:>
Description:
def viewbox_mouse_event(self, event):
"""
The SubScene received a mouse event; update transform
accordingly.
Parameters
----------
event : instance of Event
The event.
""" |
if event.handled or not self.interactive:
return
# Scrolling
BaseCamera.viewbox_mouse_event(self, event)
if event.type == 'mouse_wheel':
center = self._scene_transform.imap(event.pos)
self.zoom((1 + self.zoom_factor) ** (-event.delta[1] * 30), center)
event.handled = True
elif event.type == 'mouse_move':
if event.press_event is None:
return
modifiers = event.mouse_event.modifiers
p1 = event.mouse_event.press_event.pos
p2 = event.mouse_event.pos
if 1 in event.buttons and not modifiers:
# Translate
p1 = np.array(event.last_event.pos)[:2]
p2 = np.array(event.pos)[:2]
p1s = self._transform.imap(p1)
p2s = self._transform.imap(p2)
self.pan(p1s-p2s)
event.handled = True
elif 2 in event.buttons and not modifiers:
# Zoom
p1c = np.array(event.last_event.pos)[:2]
p2c = np.array(event.pos)[:2]
scale = ((1 + self.zoom_factor) **
((p1c-p2c) * np.array([1, -1])))
center = self._transform.imap(event.press_event.pos[:2])
self.zoom(scale, center)
event.handled = True
else:
event.handled = False
elif event.type == 'mouse_press':
# accept the event if it is button 1 or 2.
# This is required in order to receive future events
event.handled = event.button in [1, 2]
else:
event.handled = False |
<SYSTEM_TASK:>
Set the volume data.
<END_TASK>
<USER_TASK:>
Description:
def set_data(self, vol, clim=None):
""" Set the volume data.
Parameters
----------
vol : ndarray
The 3D volume.
clim : tuple | None
Colormap limits to use. None will use the min and max values.
""" |
# Check volume
if not isinstance(vol, np.ndarray):
raise ValueError('Volume visual needs a numpy array.')
if not ((vol.ndim == 3) or (vol.ndim == 4 and vol.shape[-1] <= 4)):
raise ValueError('Volume visual needs a 3D image.')
# Handle clim
if clim is not None:
clim = np.array(clim, float)
if not (clim.ndim == 1 and clim.size == 2):
raise ValueError('clim must be a 2-element array-like')
self._clim = tuple(clim)
if self._clim is None:
self._clim = vol.min(), vol.max()
# Apply clim
vol = np.array(vol, dtype='float32', copy=False)
if self._clim[1] == self._clim[0]:
if self._clim[0] != 0.:
vol *= 1.0 / self._clim[0]
else:
vol -= self._clim[0]
vol /= self._clim[1] - self._clim[0]
# Apply to texture
self._tex.set_data(vol) # will be efficient if vol is same shape
self.shared_program['u_shape'] = (vol.shape[2], vol.shape[1],
vol.shape[0])
shape = vol.shape[:3]
if self._vol_shape != shape:
self._vol_shape = shape
self._need_vertex_update = True
self._vol_shape = shape
# Get some stats
self._kb_for_texture = np.prod(self._vol_shape) / 1024 |
<SYSTEM_TASK:>
This method is called immediately before each draw.
<END_TASK>
<USER_TASK:>
Description:
def _prepare_draw(self, view=None):
"""This method is called immediately before each draw.
The *view* argument indicates which view is about to be drawn.
""" |
if self._changed['pos']:
self.pos_buf.set_data(self._pos)
self._changed['pos'] = False
if self._changed['color']:
self.color_buf.set_data(self._color)
self._program.vert['color'] = self.color_buf
self._changed['color'] = False
return True |
<SYSTEM_TASK:>
Merge overlapping intervals.
<END_TASK>
<USER_TASK:>
Description:
def _merge_intervals(self, min_depth):
"""
Merge overlapping intervals.
This method is called only once in the constructor.
""" |
def add_interval(ret, start, stop):
if min_depth is not None:
shift = 2 * (29 - min_depth)
mask = (int(1) << shift) - 1
if stop - start < mask:
ret.append((start, stop))
else:
ofs = start & mask
st = start
if ofs > 0:
st = (start - ofs) + (mask + 1)
ret.append((start, st))
while st + mask + 1 < stop:
ret.append((st, st + mask + 1))
st = st + mask + 1
ret.append((st, stop))
else:
ret.append((start, stop))
ret = []
start = stop = None
# Use numpy sort method
self._intervals.sort(axis=0)
for itv in self._intervals:
if start is None:
start, stop = itv
continue
# gap between intervals
if itv[0] > stop:
add_interval(ret, start, stop)
start, stop = itv
else:
# merge intervals
if itv[1] > stop:
stop = itv[1]
if start is not None and stop is not None:
add_interval(ret, start, stop)
self._intervals = np.asarray(ret) |
<SYSTEM_TASK:>
Return the union between self and ``another_is``.
<END_TASK>
<USER_TASK:>
Description:
def union(self, another_is):
"""
Return the union between self and ``another_is``.
Parameters
----------
another_is : `IntervalSet`
an IntervalSet object.
Returns
-------
interval : `IntervalSet`
the union of self with ``another_is``.
""" |
result = IntervalSet()
if another_is.empty():
result._intervals = self._intervals
elif self.empty():
result._intervals = another_is._intervals
else:
# res has no overlapping intervals
result._intervals = IntervalSet.merge(self._intervals,
another_is._intervals,
lambda in_a, in_b: in_a or in_b)
return result |
<SYSTEM_TASK:>
Convert an IntervalSet using the NESTED numbering scheme to an IntervalSet containing UNIQ numbers for HEALPix
<END_TASK>
<USER_TASK:>
Description:
def to_nuniq_interval_set(cls, nested_is):
"""
Convert an IntervalSet using the NESTED numbering scheme to an IntervalSet containing UNIQ numbers for HEALPix
cells.
Parameters
----------
nested_is : `IntervalSet`
IntervalSet object storing HEALPix cells as [ipix*4^(29-order), (ipix+1)*4^(29-order)[ intervals.
Returns
-------
interval : `IntervalSet`
IntervalSet object storing HEALPix cells as [ipix + 4*4^(order), ipix+1 + 4*4^(order)[ intervals.
""" |
r2 = nested_is.copy()
res = []
if r2.empty():
return IntervalSet()
order = 0
while not r2.empty():
shift = int(2 * (IntervalSet.HPY_MAX_ORDER - order))
ofs = (int(1) << shift) - 1
ofs2 = int(1) << (2 * order + 2)
r4 = []
for iv in r2._intervals:
a = (int(iv[0]) + ofs) >> shift
b = int(iv[1]) >> shift
c = a << shift
d = b << shift
if d > c:
r4.append((c, d))
res.append((a + ofs2, b + ofs2))
if len(r4) > 0:
r4_is = IntervalSet(np.asarray(r4))
r2 = r2.difference(r4_is)
order += 1
return IntervalSet(np.asarray(res)) |
<SYSTEM_TASK:>
Convert an IntervalSet containing NUNIQ intervals to an IntervalSet representing HEALPix
<END_TASK>
<USER_TASK:>
Description:
def from_nuniq_interval_set(cls, nuniq_is):
"""
Convert an IntervalSet containing NUNIQ intervals to an IntervalSet representing HEALPix
cells following the NESTED numbering scheme.
Parameters
----------
nuniq_is : `IntervalSet`
IntervalSet object storing HEALPix cells as [ipix + 4*4^(order), ipix+1 + 4*4^(order)[ intervals.
Returns
-------
interval : `IntervalSet`
IntervalSet object storing HEALPix cells as [ipix*4^(29-order), (ipix+1)*4^(29-order)[ intervals.
""" |
nested_is = IntervalSet()
# Appending a list is faster than appending a numpy array
# For these algorithms we append a list and create the interval set from the finished list
rtmp = []
last_order = 0
intervals = nuniq_is._intervals
diff_order = IntervalSet.HPY_MAX_ORDER
shift_order = 2 * diff_order
for interval in intervals:
for j in range(interval[0], interval[1]):
order, i_pix = uniq2orderipix(j)
if order != last_order:
nested_is = nested_is.union(IntervalSet(np.asarray(rtmp)))
rtmp = []
last_order = order
diff_order = IntervalSet.HPY_MAX_ORDER - order
shift_order = 2 * diff_order
rtmp.append((i_pix << shift_order, (i_pix + 1) << shift_order))
nested_is = nested_is.union(IntervalSet(np.asarray(rtmp)))
return nested_is |
<SYSTEM_TASK:>
Merge two lists of intervals according to the boolean function op
<END_TASK>
<USER_TASK:>
Description:
def merge(a_intervals, b_intervals, op):
"""
Merge two lists of intervals according to the boolean function op
``a_intervals`` and ``b_intervals`` need to be sorted and consistent (no overlapping intervals).
This operation keeps the resulting interval set consistent.
Parameters
----------
a_intervals : `~numpy.ndarray`
A sorted merged list of intervals represented as a N x 2 numpy array
b_intervals : `~numpy.ndarray`
A sorted merged list of intervals represented as a N x 2 numpy array
op : `function`
Lambda function taking two params and returning the result of the operation between
these two params.
Exemple : lambda in_a, in_b: in_a and in_b describes the intersection of ``a_intervals`` and
``b_intervals`` whereas lambda in_a, in_b: in_a or in_b describes the union of ``a_intervals`` and
``b_intervals``.
Returns
-------
array : `numpy.ndarray`
a N x 2 numpy containing intervals resulting from the op between ``a_intervals`` and ``b_intervals``.
""" |
a_endpoints = a_intervals.flatten().tolist()
b_endpoints = b_intervals.flatten().tolist()
sentinel = max(a_endpoints[-1], b_endpoints[-1]) + 1
a_endpoints += [sentinel]
b_endpoints += [sentinel]
a_index = 0
b_index = 0
res = []
scan = min(a_endpoints[0], b_endpoints[0])
while scan < sentinel:
in_a = not ((scan < a_endpoints[a_index]) ^ (a_index % 2))
in_b = not ((scan < b_endpoints[b_index]) ^ (b_index % 2))
in_res = op(in_a, in_b)
if in_res ^ (len(res) % 2):
res += [scan]
if scan == a_endpoints[a_index]:
a_index += 1
if scan == b_endpoints[b_index]:
b_index += 1
scan = min(a_endpoints[a_index], b_endpoints[b_index])
return np.asarray(res).reshape((-1, 2)) |
<SYSTEM_TASK:>
Delete the object from GPU memory.
<END_TASK>
<USER_TASK:>
Description:
def delete(self):
""" Delete the object from GPU memory.
Note that the GPU object will also be deleted when this gloo
object is about to be deleted. However, sometimes you want to
explicitly delete the GPU object explicitly.
""" |
# We only allow the object from being deleted once, otherwise
# we might be deleting another GPU object that got our gl-id
# after our GPU object was deleted. Also note that e.g.
# DataBufferView does not have the _glir attribute.
if hasattr(self, '_glir'):
# Send our final command into the queue
self._glir.command('DELETE', self._id)
# Tell master glir queue that this queue is no longer being used
self._glir._deletable = True
# Detach the queue
del self._glir |
<SYSTEM_TASK:>
Rebuild the _data_lookup_fn using different interpolations within
<END_TASK>
<USER_TASK:>
Description:
def _build_interpolation(self):
"""Rebuild the _data_lookup_fn using different interpolations within
the shader
""" |
interpolation = self._interpolation
self._data_lookup_fn = self._interpolation_fun[interpolation]
self.shared_program.frag['get_data'] = self._data_lookup_fn
# only 'bilinear' uses 'linear' texture interpolation
if interpolation == 'bilinear':
texture_interpolation = 'linear'
else:
# 'nearest' (and also 'bilinear') doesn't use spatial_filters.frag
# so u_kernel and shape setting is skipped
texture_interpolation = 'nearest'
if interpolation != 'nearest':
self.shared_program['u_kernel'] = self._kerneltex
self._data_lookup_fn['shape'] = self._data.shape[:2][::-1]
if self._texture.interpolation != texture_interpolation:
self._texture.interpolation = texture_interpolation
self._data_lookup_fn['texture'] = self._texture
self._need_interpolation_update = False |
<SYSTEM_TASK:>
Rebuild the vertex buffers used for rendering the image when using
<END_TASK>
<USER_TASK:>
Description:
def _build_vertex_data(self):
"""Rebuild the vertex buffers used for rendering the image when using
the subdivide method.
""" |
grid = self._grid
w = 1.0 / grid[1]
h = 1.0 / grid[0]
quad = np.array([[0, 0, 0], [w, 0, 0], [w, h, 0],
[0, 0, 0], [w, h, 0], [0, h, 0]],
dtype=np.float32)
quads = np.empty((grid[1], grid[0], 6, 3), dtype=np.float32)
quads[:] = quad
mgrid = np.mgrid[0.:grid[1], 0.:grid[0]].transpose(1, 2, 0)
mgrid = mgrid[:, :, np.newaxis, :]
mgrid[..., 0] *= w
mgrid[..., 1] *= h
quads[..., :2] += mgrid
tex_coords = quads.reshape(grid[1]*grid[0]*6, 3)
tex_coords = np.ascontiguousarray(tex_coords[:, :2])
vertices = tex_coords * self.size
self._subdiv_position.set_data(vertices.astype('float32'))
self._subdiv_texcoord.set_data(tex_coords.astype('float32')) |
<SYSTEM_TASK:>
Given a path P, return the baked vertices as they should be copied in
<END_TASK>
<USER_TASK:>
Description:
def bake(self, P, key='curr', closed=False, itemsize=None):
"""
Given a path P, return the baked vertices as they should be copied in
the collection if the path has already been appended.
Example:
--------
paths.append(P)
P *= 2
paths['prev'][0] = bake(P,'prev')
paths['curr'][0] = bake(P,'curr')
paths['next'][0] = bake(P,'next')
""" |
itemsize = itemsize or len(P)
itemcount = len(P) / itemsize # noqa
n = itemsize
if closed:
I = np.arange(n + 3)
if key == 'prev':
I -= 2
I[0], I[1], I[-1] = n - 1, n - 1, n - 1
elif key == 'next':
I[0], I[-3], I[-2], I[-1] = 1, 0, 1, 1
else:
I -= 1
I[0], I[-1], I[n + 1] = 0, 0, 0
else:
I = np.arange(n + 2)
if key == 'prev':
I -= 2
I[0], I[1], I[-1] = 0, 0, n - 2
elif key == 'next':
I[0], I[-1], I[-2] = 1, n - 1, n - 1
else:
I -= 1
I[0], I[-1] = 0, n - 1
I = np.repeat(I, 2)
return P[I] |
<SYSTEM_TASK:>
Stop all timers in a canvas.
<END_TASK>
<USER_TASK:>
Description:
def _stop_timers(canvas):
"""Stop all timers in a canvas.""" |
for attr in dir(canvas):
try:
attr_obj = getattr(canvas, attr)
except NotImplementedError:
# This try/except is needed because canvas.position raises
# an error (it is not implemented in this backend).
attr_obj = None
if isinstance(attr_obj, Timer):
attr_obj.stop() |
<SYSTEM_TASK:>
Print stack trace from call that didn't originate from here
<END_TASK>
<USER_TASK:>
Description:
def _last_stack_str():
"""Print stack trace from call that didn't originate from here""" |
stack = extract_stack()
for s in stack[::-1]:
if op.join('vispy', 'gloo', 'buffer.py') not in __file__:
break
return format_list([s])[0] |
<SYSTEM_TASK:>
GLSL declaration strings required for a variable to hold this data.
<END_TASK>
<USER_TASK:>
Description:
def glsl_type(self):
""" GLSL declaration strings required for a variable to hold this data.
""" |
if self.dtype is None:
return None
dtshape = self.dtype[0].shape
n = dtshape[0] if dtshape else 1
if n > 1:
dtype = 'vec%d' % n
else:
dtype = 'float' if 'f' in self.dtype[0].base.kind else 'int'
return 'attribute', dtype |
<SYSTEM_TASK:>
Rename all objects like "name_1" to avoid conflicts. Objects are
<END_TASK>
<USER_TASK:>
Description:
def _rename_objects_pretty(self):
""" Rename all objects like "name_1" to avoid conflicts. Objects are
only renamed if necessary.
This method produces more readable GLSL, but is rather slow.
""" |
#
# 1. For each object, add its static names to the global namespace
# and make a list of the shaders used by the object.
#
# {name: obj} mapping for finding unique names
# initialize with reserved keywords.
self._global_ns = dict([(kwd, None) for kwd in gloo.util.KEYWORDS])
# functions are local per-shader
self._shader_ns = dict([(shader, {}) for shader in self.shaders])
# for each object, keep a list of shaders the object appears in
obj_shaders = {}
for shader_name, deps in self._shader_deps.items():
for dep in deps:
# Add static names to namespace
for name in dep.static_names():
self._global_ns[name] = None
obj_shaders.setdefault(dep, []).append(shader_name)
#
# 2. Assign new object names
#
name_index = {}
for obj, shaders in obj_shaders.items():
name = obj.name
if self._name_available(obj, name, shaders):
# hooray, we get to keep this name
self._assign_name(obj, name, shaders)
else:
# boo, find a new name
while True:
index = name_index.get(name, 0) + 1
name_index[name] = index
ext = '_%d' % index
new_name = name[:32-len(ext)] + ext
if self._name_available(obj, new_name, shaders):
self._assign_name(obj, new_name, shaders)
break |
<SYSTEM_TASK:>
updates the positions of the colorbars and labels
<END_TASK>
<USER_TASK:>
Description:
def _update_positions(self):
"""
updates the positions of the colorbars and labels
""" |
self._colorbar.pos = self._pos
self._border.pos = self._pos
if self._orientation == "right" or self._orientation == "left":
self._label.rotation = -90
x, y = self._pos
halfw, halfh = self._halfdim
label_anchors = \
ColorBarVisual._get_label_anchors(center=self._pos,
halfdim=self._halfdim,
orientation=self._orientation,
transforms=self.label.transforms)
self._label.anchors = label_anchors
ticks_anchors = \
ColorBarVisual._get_ticks_anchors(center=self._pos,
halfdim=self._halfdim,
orientation=self._orientation,
transforms=self.label.transforms)
self._ticks[0].anchors = ticks_anchors
self._ticks[1].anchors = ticks_anchors
(label_pos, ticks_pos) = \
ColorBarVisual._calc_positions(center=self._pos,
halfdim=self._halfdim,
border_width=self.border_width,
orientation=self._orientation,
transforms=self.transforms)
self._label.pos = label_pos
self._ticks[0].pos = ticks_pos[0]
self._ticks[1].pos = ticks_pos[1] |
<SYSTEM_TASK:>
Calculate the text centeritions given the ColorBar
<END_TASK>
<USER_TASK:>
Description:
def _calc_positions(center, halfdim, border_width,
orientation, transforms):
"""
Calculate the text centeritions given the ColorBar
parameters.
Note
----
This is static because in principle, this
function does not need access to the state of the ColorBar
at all. It's a computation function that computes coordinate
transforms
Parameters
----------
center: tuple (x, y)
Center of the ColorBar
halfdim: tuple (halfw, halfh)
Half of the dimensions measured from the center
border_width: float
Width of the border of the ColorBar
orientation: "top" | "bottom" | "left" | "right"
Position of the label with respect to the ColorBar
transforms: TransformSystem
the transforms of the ColorBar
""" |
(x, y) = center
(halfw, halfh) = halfdim
visual_to_doc = transforms.get_transform('visual', 'document')
doc_to_visual = transforms.get_transform('document', 'visual')
# doc_widths = visual_to_doc.map(np.array([halfw, halfh, 0, 0],
# dtype=np.float32))
doc_x = visual_to_doc.map(np.array([halfw, 0, 0, 0], dtype=np.float32))
doc_y = visual_to_doc.map(np.array([0, halfh, 0, 0], dtype=np.float32))
if doc_x[0] < 0:
doc_x *= -1
if doc_y[1] < 0:
doc_y *= -1
# doc_halfw = np.abs(doc_widths[0])
# doc_halfh = np.abs(doc_widths[1])
if orientation == "top":
doc_perp_vector = -doc_y
elif orientation == "bottom":
doc_perp_vector = doc_y
elif orientation == "left":
doc_perp_vector = -doc_x
if orientation == "right":
doc_perp_vector = doc_x
perp_len = np.linalg.norm(doc_perp_vector)
doc_perp_vector /= perp_len
perp_len += border_width
perp_len += 5 # pixels
perp_len *= ColorBarVisual.text_padding_factor
doc_perp_vector *= perp_len
doc_center = visual_to_doc.map(np.array([x, y, 0, 0],
dtype=np.float32))
doc_label_pos = doc_center + doc_perp_vector
visual_label_pos = doc_to_visual.map(doc_label_pos)[:3]
# next, calculate tick positions
if orientation in ["top", "bottom"]:
doc_ticks_pos = [doc_label_pos - doc_x,
doc_label_pos + doc_x]
else:
doc_ticks_pos = [doc_label_pos + doc_y,
doc_label_pos - doc_y]
visual_ticks_pos = []
visual_ticks_pos.append(doc_to_visual.map(doc_ticks_pos[0])[:3])
visual_ticks_pos.append(doc_to_visual.map(doc_ticks_pos[1])[:3])
return (visual_label_pos, visual_ticks_pos) |
<SYSTEM_TASK:>
The size of the ColorBar
<END_TASK>
<USER_TASK:>
Description:
def size(self):
""" The size of the ColorBar
Returns
-------
size: (major_axis_length, minor_axis_length)
major and minor axis are defined by the
orientation of the ColorBar
""" |
(halfw, halfh) = self._halfdim
if self.orientation in ["top", "bottom"]:
return (halfw * 2., halfh * 2.)
else:
return (halfh * 2., halfw * 2.) |
<SYSTEM_TASK:>
Return a Rect covering the same area, but with height and width
<END_TASK>
<USER_TASK:>
Description:
def normalized(self):
"""Return a Rect covering the same area, but with height and width
guaranteed to be positive.""" |
return Rect(pos=(min(self.left, self.right),
min(self.top, self.bottom)),
size=(abs(self.width), abs(self.height))) |
<SYSTEM_TASK:>
Return a Rect with the same bounds but with axes inverted
<END_TASK>
<USER_TASK:>
Description:
def flipped(self, x=False, y=True):
"""Return a Rect with the same bounds but with axes inverted
Parameters
----------
x : bool
Flip the X axis.
y : bool
Flip the Y axis.
Returns
-------
rect : instance of Rect
The flipped rectangle.
""" |
pos = list(self.pos)
size = list(self.size)
for i, flip in enumerate((x, y)):
if flip:
pos[i] += size[i]
size[i] *= -1
return Rect(pos, size) |
<SYSTEM_TASK:>
Return array of coordinates that can be mapped by Transform
<END_TASK>
<USER_TASK:>
Description:
def _transform_in(self):
"""Return array of coordinates that can be mapped by Transform
classes.""" |
return np.array([
[self.left, self.bottom, 0, 1],
[self.right, self.top, 0, 1]]) |
<SYSTEM_TASK:>
Helper to calculate the delta position
<END_TASK>
<USER_TASK:>
Description:
def _calculate_delta_pos(adjacency_arr, pos, t, optimal):
"""Helper to calculate the delta position""" |
# XXX eventually this should be refactored for the sparse case to only
# do the necessary pairwise distances
delta = pos[:, np.newaxis, :] - pos
# Distance between points
distance2 = (delta*delta).sum(axis=-1)
# Enforce minimum distance of 0.01
distance2 = np.where(distance2 < 0.0001, 0.0001, distance2)
distance = np.sqrt(distance2)
# Displacement "force"
displacement = np.zeros((len(delta), 2))
for ii in range(2):
displacement[:, ii] = (
delta[:, :, ii] *
((optimal * optimal) / (distance*distance) -
(adjacency_arr * distance) / optimal)).sum(axis=1)
length = np.sqrt((displacement**2).sum(axis=1))
length = np.where(length < 0.01, 0.1, length)
delta_pos = displacement * t / length[:, np.newaxis]
return delta_pos |
<SYSTEM_TASK:>
You can insert arbitrary business logic code here
<END_TASK>
<USER_TASK:>
Description:
def get_recipe_intent_handler(request):
"""
You can insert arbitrary business logic code here
""" |
# Get variables like userId, slots, intent name etc from the 'Request' object
ingredient = request.slots["Ingredient"] # Gets an Ingredient Slot from the Request object.
if ingredient == None:
return alexa.create_response("Could not find an ingredient!")
# All manipulations to the request's session object are automatically reflected in the request returned to Amazon.
# For e.g. This statement adds a new session attribute (automatically returned with the response) storing the
# Last seen ingredient value in the 'last_ingredient' key.
request.session['last_ingredient'] = ingredient # Automatically returned as a sessionAttribute
# Modifying state like this saves us from explicitly having to return Session objects after every response
# alexa can also build cards which can be sent as part of the response
card = alexa.create_card(title="GetRecipeIntent activated", subtitle=None,
content="asked alexa to find a recipe using {}".format(ingredient))
return alexa.create_response("Finding a recipe with the ingredient {}".format(ingredient),
end_session=False, card_obj=card) |
<SYSTEM_TASK:>
Set the usage options for vispy
<END_TASK>
<USER_TASK:>
Description:
def use(app=None, gl=None):
""" Set the usage options for vispy
Specify what app backend and GL backend to use.
Parameters
----------
app : str
The app backend to use (case insensitive). Standard backends:
* 'PyQt4': use Qt widget toolkit via PyQt4.
* 'PyQt5': use Qt widget toolkit via PyQt5.
* 'PySide': use Qt widget toolkit via PySide.
* 'PyGlet': use Pyglet backend.
* 'Glfw': use Glfw backend (successor of Glut). Widely available
on Linux.
* 'SDL2': use SDL v2 backend.
* 'osmesa': Use OSMesa backend
Additional backends:
* 'ipynb_vnc': render in the IPython notebook via a VNC approach
(experimental)
gl : str
The gl backend to use (case insensitive). Options are:
* 'gl2': use Vispy's desktop OpenGL API.
* 'pyopengl2': use PyOpenGL's desktop OpenGL API. Mostly for
testing.
* 'es2': (TO COME) use real OpenGL ES 2.0 on Windows via Angle.
Availability of ES 2.0 is larger for Windows, since it relies
on DirectX.
* 'gl+': use the full OpenGL functionality available on
your system (via PyOpenGL).
Notes
-----
If the app option is given, ``vispy.app.use_app()`` is called. If
the gl option is given, ``vispy.gloo.use_gl()`` is called.
If an app backend name is provided, and that backend could not be
loaded, an error is raised.
If no backend name is provided, Vispy will first check if the GUI
toolkit corresponding to each backend is already imported, and try
that backend first. If this is unsuccessful, it will try the
'default_backend' provided in the vispy config. If still not
succesful, it will try each backend in a predetermined order.
See Also
--------
vispy.app.use_app
vispy.gloo.gl.use_gl
""" |
if app is None and gl is None:
raise TypeError('Must specify at least one of "app" or "gl".')
# Example for future. This wont work (yet).
if app == 'ipynb_webgl':
app = 'headless'
gl = 'webgl'
if app == 'osmesa':
from ..util.osmesa_gl import fix_osmesa_gl_lib
fix_osmesa_gl_lib()
if gl is not None:
raise ValueError("Do not specify gl when using osmesa")
# Apply now
if gl:
from .. import gloo, config
config['gl_backend'] = gl
gloo.gl.use_gl(gl)
if app:
from ..app import use_app
use_app(app) |
<SYSTEM_TASK:>
Run command using subprocess.Popen
<END_TASK>
<USER_TASK:>
Description:
def run_subprocess(command, return_code=False, **kwargs):
"""Run command using subprocess.Popen
Run command and wait for command to complete. If the return code was zero
then return, otherwise raise CalledProcessError.
By default, this will also add stdout= and stderr=subproces.PIPE
to the call to Popen to suppress printing to the terminal.
Parameters
----------
command : list of str
Command to run as subprocess (see subprocess.Popen documentation).
return_code : bool
If True, the returncode will be returned, and no error checking
will be performed (so this function should always return without
error).
**kwargs : dict
Additional kwargs to pass to ``subprocess.Popen``.
Returns
-------
stdout : str
Stdout returned by the process.
stderr : str
Stderr returned by the process.
code : int
The command exit code. Only returned if ``return_code`` is True.
""" |
# code adapted with permission from mne-python
use_kwargs = dict(stderr=subprocess.PIPE, stdout=subprocess.PIPE)
use_kwargs.update(kwargs)
p = subprocess.Popen(command, **use_kwargs)
output = p.communicate()
# communicate() may return bytes, str, or None depending on the kwargs
# passed to Popen(). Convert all to unicode str:
output = ['' if s is None else s for s in output]
output = [s.decode('utf-8') if isinstance(s, bytes) else s for s in output]
output = tuple(output)
if not return_code and p.returncode:
print(output[0])
print(output[1])
err_fun = subprocess.CalledProcessError.__init__
if 'output' in inspect.getargspec(err_fun).args:
raise subprocess.CalledProcessError(p.returncode, command, output)
else:
raise subprocess.CalledProcessError(p.returncode, command)
if return_code:
output = output + (p.returncode,)
return output |
<SYSTEM_TASK:>
Start the timer.
<END_TASK>
<USER_TASK:>
Description:
def start(self, interval=None, iterations=None):
"""Start the timer.
A timeout event will be generated every *interval* seconds.
If *interval* is None, then self.interval will be used.
If *iterations* is specified, the timer will stop after
emitting that number of events. If unspecified, then
the previous value of self.iterations will be used. If the value is
negative, then the timer will continue running until stop() is called.
If the timer is already running when this function is called, nothing
happens (timer continues running as it did previously, without
changing the interval, number of iterations, or emitting a timer
start event).
""" |
if self.running:
return # don't do anything if already running
self.iter_count = 0
if interval is not None:
self.interval = interval
if iterations is not None:
self.max_iterations = iterations
self._backend._vispy_start(self.interval)
self._running = True
self._first_emit_time = precision_time()
self._last_emit_time = precision_time()
self.events.start(type='timer_start') |
<SYSTEM_TASK:>
Returns a numpy array of all the HEALPix indexes contained in the MOC at its max order.
<END_TASK>
<USER_TASK:>
Description:
def _best_res_pixels(self):
"""
Returns a numpy array of all the HEALPix indexes contained in the MOC at its max order.
Returns
-------
result : `~numpy.ndarray`
The array of HEALPix at ``max_order``
""" |
factor = 2 * (AbstractMOC.HPY_MAX_NORDER - self.max_order)
pix_l = []
for iv in self._interval_set._intervals:
for val in range(iv[0] >> factor, iv[1] >> factor):
pix_l.append(val)
return np.asarray(pix_l) |
<SYSTEM_TASK:>
Extends the MOC instance so that it includes the HEALPix cells touching its border.
<END_TASK>
<USER_TASK:>
Description:
def add_neighbours(self):
"""
Extends the MOC instance so that it includes the HEALPix cells touching its border.
The depth of the HEALPix cells added at the border is equal to the maximum depth of the MOC instance.
Returns
-------
moc : `~mocpy.moc.MOC`
self extended by one degree of neighbours.
""" |
# Get the pixels array of the MOC at the its max order.
ipix = self._best_res_pixels()
hp = HEALPix(nside=(1 << self.max_order), order='nested')
# Get the HEALPix array containing the neighbors of ``ipix``.
# This array "extends" ``ipix`` by one degree of neighbors.
extend_ipix = AbstractMOC._neighbour_pixels(hp, ipix)
# Compute the difference between ``extend_ipix`` and ``ipix`` to get only the neighboring pixels
# located at the border of the MOC.
neigh_ipix = np.setdiff1d(extend_ipix, ipix)
shift = 2 * (AbstractMOC.HPY_MAX_NORDER - self.max_order)
neigh_itv = np.vstack((neigh_ipix << shift, (neigh_ipix + 1) << shift)).T
# This array of HEALPix neighbors are added to the MOC to get an ``extended`` MOC at its max order.
self._interval_set = self._interval_set.union(IntervalSet(neigh_itv))
return self |
<SYSTEM_TASK:>
Removes from the MOC instance the HEALPix cells located at its border.
<END_TASK>
<USER_TASK:>
Description:
def remove_neighbours(self):
"""
Removes from the MOC instance the HEALPix cells located at its border.
The depth of the HEALPix cells removed is equal to the maximum depth of the MOC instance.
Returns
-------
moc : `~mocpy.moc.MOC`
self minus its HEALPix cells located at its border.
""" |
# Get the HEALPix cells of the MOC at its max depth
ipix = self._best_res_pixels()
hp = HEALPix(nside=(1 << self.max_order), order='nested')
# Extend it to include the max depth neighbor cells.
extend_ipix = AbstractMOC._neighbour_pixels(hp, ipix)
# Get only the max depth HEALPix cells lying at the border of the MOC
neigh_ipix = np.setxor1d(extend_ipix, ipix)
# Remove these pixels from ``ipix``
border_ipix = AbstractMOC._neighbour_pixels(hp, neigh_ipix)
reduced_ipix = np.setdiff1d(ipix, border_ipix)
# Build the reduced MOC, i.e. MOC without its pixels which were located at its border.
shift = 2 * (AbstractMOC.HPY_MAX_NORDER - self.max_order)
reduced_itv = np.vstack((reduced_ipix << shift, (reduced_ipix + 1) << shift)).T
self._interval_set = IntervalSet(reduced_itv)
return self |
<SYSTEM_TASK:>
Draws the MOC on a matplotlib axis.
<END_TASK>
<USER_TASK:>
Description:
def fill(self, ax, wcs, **kw_mpl_pathpatch):
"""
Draws the MOC on a matplotlib axis.
This performs the projection of the cells from the world coordinate system to the pixel image coordinate system.
You are able to specify various styling kwargs for `matplotlib.patches.PathPatch`
(see the `list of valid keywords <https://matplotlib.org/api/_as_gen/matplotlib.patches.PathPatch.html#matplotlib.patches.PathPatch>`__).
Parameters
----------
ax : `matplotlib.axes.Axes`
Matplotlib axis.
wcs : `astropy.wcs.WCS`
WCS defining the World system <-> Image system projection.
kw_mpl_pathpatch
Plotting arguments for `matplotlib.patches.PathPatch`.
Examples
--------
>>> from mocpy import MOC, WCS
>>> from astropy.coordinates import Angle, SkyCoord
>>> import astropy.units as u
>>> # Load a MOC, e.g. the MOC of GALEXGR6-AIS-FUV
>>> filename = './../resources/P-GALEXGR6-AIS-FUV.fits'
>>> moc = MOC.from_fits(filename)
>>> # Plot the MOC using matplotlib
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure(111, figsize=(15, 15))
>>> # Define a WCS as a context
>>> with WCS(fig,
... fov=50 * u.deg,
... center=SkyCoord(0, 20, unit='deg', frame='icrs'),
... coordsys="icrs",
... rotation=Angle(0, u.degree),
... projection="AIT") as wcs:
... ax = fig.add_subplot(1, 1, 1, projection=wcs)
... # Call fill giving the matplotlib axe and the `~astropy.wcs.WCS` object.
... # We will set the matplotlib keyword linewidth to 0 so that it does not plot
... # the border of each HEALPix cell.
... # The color can also be specified along with an alpha value.
... moc.fill(ax=ax, wcs=wcs, linewidth=0, alpha=0.5, fill=True, color="green")
>>> plt.xlabel('ra')
>>> plt.ylabel('dec')
>>> plt.grid(color="black", linestyle="dotted")
""" |
fill.fill(self, ax, wcs, **kw_mpl_pathpatch) |
<SYSTEM_TASK:>
Creates a `~mocpy.moc.MOC` from an image stored as a FITS file.
<END_TASK>
<USER_TASK:>
Description:
def from_image(cls, header, max_norder, mask=None):
"""
Creates a `~mocpy.moc.MOC` from an image stored as a FITS file.
Parameters
----------
header : `astropy.io.fits.Header`
FITS header containing all the info of where the image is located (position, size, etc...)
max_norder : int
The moc resolution.
mask : `numpy.ndarray`, optional
A boolean array of the same size of the image where pixels having the value 1 are part of
the final MOC and pixels having the value 0 are not.
Returns
-------
moc : `~mocpy.moc.MOC`
The resulting MOC.
""" |
# load the image data
height = header['NAXIS2']
width = header['NAXIS1']
# use wcs from astropy to locate the image in the world coordinates
w = wcs.WCS(header)
if mask is not None:
# We have an array of pixels that are part of of survey
y, x = np.where(mask)
pix_crd = np.dstack((x, y))[0]
else:
# If we do not have a mask array we create the moc of all the image
#
step_pix = 1
"""
Coords returned by wcs_pix2world method correspond to pixel centers. We want to retrieve the moc pix
crossing the borders of the image so we have to add 1/2 to the pixels coords before computing the lonlat.
The step between two pix_crd is set to `step_pix` but can be diminished to have a better precision at the
borders so that all the image is covered (a too big step does not retrieve all
the moc pix crossing the borders of the image).
"""
x, y = np.mgrid[0.5:(width + 0.5 + step_pix):step_pix, 0.5:(height + 0.5 + step_pix):step_pix]
pix_crd = np.dstack((x.ravel(), y.ravel()))[0]
frame = wcs.utils.wcs_to_celestial_frame(w)
world_pix_crd = SkyCoord(w.wcs_pix2world(pix_crd, 1), unit='deg', frame=frame)
hp = HEALPix(nside=(1 << max_norder), order='nested', frame=ICRS())
ipix = hp.skycoord_to_healpix(world_pix_crd)
# remove doubles
ipix = np.unique(ipix)
shift = 2 * (AbstractMOC.HPY_MAX_NORDER - max_norder)
intervals_arr = np.vstack((ipix << shift, (ipix + 1) << shift)).T
# This MOC will be consistent when one will do operations on the moc (union, inter, ...) or
# simply write it to a fits or json file
interval_set = IntervalSet(intervals_arr)
return cls(interval_set=interval_set) |
<SYSTEM_TASK:>
Loads a MOC from a set of FITS file images.
<END_TASK>
<USER_TASK:>
Description:
def from_fits_images(cls, path_l, max_norder):
"""
Loads a MOC from a set of FITS file images.
Parameters
----------
path_l : [str]
A list of path where the fits image are located.
max_norder : int
The MOC resolution.
Returns
-------
moc : `~mocpy.moc.MOC`
The union of all the MOCs created from the paths found in ``path_l``.
""" |
moc = MOC()
for path in path_l:
header = fits.getheader(path)
current_moc = MOC.from_image(header=header, max_norder=max_norder)
moc = moc.union(current_moc)
return moc |
<SYSTEM_TASK:>
Creates a `~mocpy.moc.MOC` object from a VizieR table.
<END_TASK>
<USER_TASK:>
Description:
def from_vizier_table(cls, table_id, nside=256):
"""
Creates a `~mocpy.moc.MOC` object from a VizieR table.
**Info**: This method is already implemented in `astroquery.cds <https://astroquery.readthedocs.io/en/latest/cds/cds.html>`__. You can ask to get a `mocpy.moc.MOC` object
from a vizier catalog ID.
Parameters
----------
table_id : str
table index
nside : int, optional
256 by default
Returns
-------
result : `~mocpy.moc.MOC`
The resulting MOC.
""" |
nside_possible_values = (8, 16, 32, 64, 128, 256, 512)
if nside not in nside_possible_values:
raise ValueError('Bad value for nside. Must be in {0}'.format(nside_possible_values))
result = cls.from_ivorn('ivo://CDS/' + table_id, nside)
return result |
<SYSTEM_TASK:>
Creates a `~mocpy.moc.MOC` object from a given ivorn.
<END_TASK>
<USER_TASK:>
Description:
def from_ivorn(cls, ivorn, nside=256):
"""
Creates a `~mocpy.moc.MOC` object from a given ivorn.
Parameters
----------
ivorn : str
nside : int, optional
256 by default
Returns
-------
result : `~mocpy.moc.MOC`
The resulting MOC.
""" |
return cls.from_url('%s?%s' % (MOC.MOC_SERVER_ROOT_URL,
urlencode({
'ivorn': ivorn,
'get': 'moc',
'order': int(np.log2(nside))
}))) |
<SYSTEM_TASK:>
Creates a `~mocpy.moc.MOC` object from a given url.
<END_TASK>
<USER_TASK:>
Description:
def from_url(cls, url):
"""
Creates a `~mocpy.moc.MOC` object from a given url.
Parameters
----------
url : str
The url of a FITS file storing a MOC.
Returns
-------
result : `~mocpy.moc.MOC`
The resulting MOC.
""" |
path = download_file(url, show_progress=False, timeout=60)
return cls.from_fits(path) |
<SYSTEM_TASK:>
Creates a MOC from an `astropy.coordinates.SkyCoord`.
<END_TASK>
<USER_TASK:>
Description:
def from_skycoords(cls, skycoords, max_norder):
"""
Creates a MOC from an `astropy.coordinates.SkyCoord`.
Parameters
----------
skycoords : `astropy.coordinates.SkyCoord`
The sky coordinates that will belong to the MOC.
max_norder : int
The depth of the smallest HEALPix cells contained in the MOC.
Returns
-------
result : `~mocpy.moc.MOC`
The resulting MOC
""" |
hp = HEALPix(nside=(1 << max_norder), order='nested')
ipix = hp.lonlat_to_healpix(skycoords.icrs.ra, skycoords.icrs.dec)
shift = 2 * (AbstractMOC.HPY_MAX_NORDER - max_norder)
intervals = np.vstack((ipix << shift, (ipix + 1) << shift)).T
interval_set = IntervalSet(intervals)
return cls(interval_set) |
<SYSTEM_TASK:>
Creates a MOC from a polygon.
<END_TASK>
<USER_TASK:>
Description:
def from_polygon_skycoord(cls, skycoord, inside=None, max_depth=10):
"""
Creates a MOC from a polygon.
The polygon is given as an `astropy.coordinates.SkyCoord` that contains the
vertices of the polygon. Concave and convex polygons are accepted but
self-intersecting ones are currently not properly handled.
Parameters
----------
skycoord : `astropy.coordinates.SkyCoord`
The sky coordinates defining the vertices of a polygon. It can describe a convex or
concave polygon but not a self-intersecting one.
inside : `astropy.coordinates.SkyCoord`, optional
A point that will be inside the MOC is needed as it is not possible to determine the inside area of a polygon
on the unit sphere (there is no infinite area that can be considered as the outside because on the sphere,
a closed polygon delimits two finite areas).
Possible improvement: take the inside area as the one covering the smallest region on the sphere.
If inside=None (default behavior), the mean of all the vertices is taken as lying inside the polygon. That approach may not work for
concave polygons.
max_depth : int, optional
The resolution of the MOC. Set to 10 by default.
Returns
-------
result : `~mocpy.moc.MOC`
The resulting MOC
""" |
return MOC.from_polygon(lon=skycoord.icrs.ra, lat=skycoord.icrs.dec,
inside=inside, max_depth=max_depth) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.