text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Creates a MOC from a polygon
<END_TASK>
<USER_TASK:>
Description:
def from_polygon(cls, lon, lat, inside=None, max_depth=10):
"""
Creates a MOC from a polygon
The polygon is given as lon and lat `astropy.units.Quantity` that define the
vertices of the polygon. Concave and convex polygons are accepted but
self-intersecting ones are currently not properly handled.
Parameters
----------
lon : `astropy.units.Quantity`
The longitudes defining the polygon. Can describe convex and
concave polygons but not self-intersecting ones.
lat : `astropy.units.Quantity`
The latitudes defining the polygon. Can describe convex and concave
polygons but not self-intersecting ones.
inside : `astropy.coordinates.SkyCoord`, optional
A point that will be inside the MOC is needed as it is not possible to determine the inside area of a polygon
on the unit sphere (there is no infinite area that can be considered as the outside because on the sphere,
a closed polygon delimits two finite areas).
Possible improvement: take the inside area as the one covering the smallest region on the sphere.
If inside=None (default behavior), the mean of all the vertices is taken as lying inside the polygon. That approach may not work for
concave polygons.
max_depth : int, optional
The resolution of the MOC. Set to 10 by default.
Returns
-------
result : `~mocpy.moc.MOC`
The resulting MOC
""" |
from .polygon import PolygonComputer
polygon_computer = PolygonComputer(lon, lat, inside, max_depth)
# Create the moc from the python dictionary
moc = MOC.from_json(polygon_computer.ipix)
# We degrade it to the user-requested order
if polygon_computer.degrade_to_max_depth:
moc = moc.degrade_to_order(max_depth)
return moc |
<SYSTEM_TASK:>
Internal method to query Simbad or a VizieR table
<END_TASK>
<USER_TASK:>
Description:
def _query(self, resource_id, max_rows):
"""
Internal method to query Simbad or a VizieR table
for sources in the coverage of the MOC instance
""" |
from astropy.io.votable import parse_single_table
if max_rows is not None and max_rows >= 0:
max_rows_str = str(max_rows)
else:
max_rows_str = str(9999999999)
tmp_moc = tempfile.NamedTemporaryFile(delete=False)
self.write(tmp_moc.name)
r = requests.post('http://cdsxmatch.u-strasbg.fr/QueryCat/QueryCat',
data={'mode': 'mocfile',
'catName': resource_id,
'format': 'votable',
'limit': max_rows_str},
files={'moc': open(tmp_moc.name, 'rb')},
headers={'User-Agent': 'MOCPy'},
stream=True)
tmp_vot = BytesIO()
tmp_vot.write(r.content)
table = parse_single_table(tmp_vot).to_table()
# finally delete temp files
os.unlink(tmp_moc.name)
return table |
<SYSTEM_TASK:>
The inverse of this transform.
<END_TASK>
<USER_TASK:>
Description:
def inverse(self):
""" The inverse of this transform.
""" |
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse |
<SYSTEM_TASK:>
Tiles tick marks along the axis.
<END_TASK>
<USER_TASK:>
Description:
def _tile_ticks(self, frac, tickvec):
"""Tiles tick marks along the axis.""" |
origins = np.tile(self.axis._vec, (len(frac), 1))
origins = self.axis.pos[0].T + (origins.T*frac).T
endpoints = tickvec + origins
return origins, endpoints |
<SYSTEM_TASK:>
Get the major ticks, minor ticks, and major labels
<END_TASK>
<USER_TASK:>
Description:
def _get_tick_frac_labels(self):
"""Get the major ticks, minor ticks, and major labels""" |
minor_num = 4 # number of minor ticks per major division
if (self.axis.scale_type == 'linear'):
domain = self.axis.domain
if domain[1] < domain[0]:
flip = True
domain = domain[::-1]
else:
flip = False
offset = domain[0]
scale = domain[1] - domain[0]
transforms = self.axis.transforms
length = self.axis.pos[1] - self.axis.pos[0] # in logical coords
n_inches = np.sqrt(np.sum(length ** 2)) / transforms.dpi
# major = np.linspace(domain[0], domain[1], num=11)
# major = MaxNLocator(10).tick_values(*domain)
major = _get_ticks_talbot(domain[0], domain[1], n_inches, 2)
labels = ['%g' % x for x in major]
majstep = major[1] - major[0]
minor = []
minstep = majstep / (minor_num + 1)
minstart = 0 if self.axis._stop_at_major[0] else -1
minstop = -1 if self.axis._stop_at_major[1] else 0
for i in range(minstart, len(major) + minstop):
maj = major[0] + i * majstep
minor.extend(np.linspace(maj + minstep,
maj + majstep - minstep,
minor_num))
major_frac = (major - offset) / scale
minor_frac = (np.array(minor) - offset) / scale
major_frac = major_frac[::-1] if flip else major_frac
use_mask = (major_frac > -0.0001) & (major_frac < 1.0001)
major_frac = major_frac[use_mask]
labels = [l for li, l in enumerate(labels) if use_mask[li]]
minor_frac = minor_frac[(minor_frac > -0.0001) &
(minor_frac < 1.0001)]
elif self.axis.scale_type == 'logarithmic':
return NotImplementedError
elif self.axis.scale_type == 'power':
return NotImplementedError
return major_frac, minor_frac, labels |
<SYSTEM_TASK:>
Write PNG file to `outfile`. The pixel data comes from `rows`
<END_TASK>
<USER_TASK:>
Description:
def write_packed(self, outfile, rows):
"""
Write PNG file to `outfile`. The pixel data comes from `rows`
which should be in boxed row packed format. Each row should be
a sequence of packed bytes.
Technically, this method does work for interlaced images but it
is best avoided. For interlaced images, the rows should be
presented in the order that they appear in the file.
This method should not be used when the source image bit depth
is not one naturally supported by PNG; the bit depth should be
1, 2, 4, 8, or 16.
""" |
if self.rescale:
raise Error("write_packed method not suitable for bit depth %d" %
self.rescale[0])
return self.write_passes(outfile, rows, packed=True) |
<SYSTEM_TASK:>
Convert a PPM and PGM file containing raw pixel data into a
<END_TASK>
<USER_TASK:>
Description:
def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile):
"""
Convert a PPM and PGM file containing raw pixel data into a
PNG outfile with the parameters set in the writer object.
""" |
pixels = array('B')
pixels.fromfile(ppmfile,
(self.bitdepth/8) * self.color_planes *
self.width * self.height)
apixels = array('B')
apixels.fromfile(pgmfile,
(self.bitdepth/8) *
self.width * self.height)
pixels = interleave_planes(pixels, apixels,
(self.bitdepth/8) * self.color_planes,
(self.bitdepth/8))
if self.interlace:
self.write_passes(outfile, self.array_scanlines_interlace(pixels))
else:
self.write_passes(outfile, self.array_scanlines(pixels)) |
<SYSTEM_TASK:>
Generator for interlaced scanlines from an array. `pixels` is
<END_TASK>
<USER_TASK:>
Description:
def array_scanlines_interlace(self, pixels):
"""
Generator for interlaced scanlines from an array. `pixels` is
the full source image in flat row flat pixel format. The
generator yields each scanline of the reduced passes in turn, in
boxed row flat pixel format.
""" |
# http://www.w3.org/TR/PNG/#8InterlaceMethods
# Array type.
fmt = 'BH'[self.bitdepth > 8]
# Value per row
vpr = self.width * self.planes
for xstart, ystart, xstep, ystep in _adam7:
if xstart >= self.width:
continue
# Pixels per row (of reduced image)
ppr = int(math.ceil((self.width-xstart)/float(xstep)))
# number of values in reduced image row.
row_len = ppr*self.planes
for y in range(ystart, self.height, ystep):
if xstep == 1:
offset = y * vpr
yield pixels[offset:offset+vpr]
else:
row = array(fmt)
# There's no easier way to set the length of an array
row.extend(pixels[0:row_len])
offset = y * vpr + xstart * self.planes
end_offset = (y+1) * vpr
skip = self.planes * xstep
for i in range(self.planes):
row[i::self.planes] = \
pixels[offset+i:end_offset:skip]
yield row |
<SYSTEM_TASK:>
Read raw pixel data, undo filters, deinterlace, and flatten.
<END_TASK>
<USER_TASK:>
Description:
def deinterlace(self, raw):
"""
Read raw pixel data, undo filters, deinterlace, and flatten.
Return in flat row flat pixel format.
""" |
# Values per row (of the target image)
vpr = self.width * self.planes
# Make a result array, and make it big enough. Interleaving
# writes to the output array randomly (well, not quite), so the
# entire output array must be in memory.
fmt = 'BH'[self.bitdepth > 8]
a = array(fmt, [0]*vpr*self.height)
source_offset = 0
for xstart, ystart, xstep, ystep in _adam7:
if xstart >= self.width:
continue
# The previous (reconstructed) scanline. None at the
# beginning of a pass to indicate that there is no previous
# line.
recon = None
# Pixels per row (reduced pass image)
ppr = int(math.ceil((self.width-xstart)/float(xstep)))
# Row size in bytes for this pass.
row_size = int(math.ceil(self.psize * ppr))
for y in range(ystart, self.height, ystep):
filter_type = raw[source_offset]
source_offset += 1
scanline = raw[source_offset:source_offset+row_size]
source_offset += row_size
recon = self.undo_filter(filter_type, scanline, recon)
# Convert so that there is one element per pixel value
flat = self.serialtoflat(recon, ppr)
if xstep == 1:
assert xstart == 0
offset = y * vpr
a[offset:offset+vpr] = flat
else:
offset = y * vpr + xstart * self.planes
end_offset = (y+1) * vpr
skip = self.planes * xstep
for i in range(self.planes):
a[offset+i:end_offset:skip] = \
flat[i::self.planes]
return a |
<SYSTEM_TASK:>
Iterator that yields each scanline in boxed row flat pixel
<END_TASK>
<USER_TASK:>
Description:
def iterboxed(self, rows):
"""Iterator that yields each scanline in boxed row flat pixel
format. `rows` should be an iterator that yields the bytes of
each row in turn.
""" |
def asvalues(raw):
"""Convert a row of raw bytes into a flat row. Result will
be a freshly allocated object, not shared with
argument.
"""
if self.bitdepth == 8:
return array('B', raw)
if self.bitdepth == 16:
raw = tostring(raw)
return array('H', struct.unpack('!%dH' % (len(raw)//2), raw))
assert self.bitdepth < 8
width = self.width
# Samples per byte
spb = 8//self.bitdepth
out = array('B')
mask = 2**self.bitdepth - 1
shifts = map(self.bitdepth.__mul__, reversed(range(spb)))
for o in raw:
out.extend(map(lambda i: mask&(o>>i), shifts))
return out[:width]
return imap(asvalues, rows) |
<SYSTEM_TASK:>
Get a standard vispy demo data file
<END_TASK>
<USER_TASK:>
Description:
def load_data_file(fname, directory=None, force_download=False):
"""Get a standard vispy demo data file
Parameters
----------
fname : str
The filename on the remote ``demo-data`` repository to download,
e.g. ``'molecular_viewer/micelle.npy'``. These correspond to paths
on ``https://github.com/vispy/demo-data/``.
directory : str | None
Directory to use to save the file. By default, the vispy
configuration directory is used.
force_download : bool | str
If True, the file will be downloaded even if a local copy exists
(and this copy will be overwritten). Can also be a YYYY-MM-DD date
to ensure a file is up-to-date (modified date of a file on disk,
if present, is checked).
Returns
-------
fname : str
The path to the file on the local system.
""" |
_url_root = 'http://github.com/vispy/demo-data/raw/master/'
url = _url_root + fname
if directory is None:
directory = config['data_path']
if directory is None:
raise ValueError('config["data_path"] is not defined, '
'so directory must be supplied')
fname = op.join(directory, op.normcase(fname)) # convert to native
if op.isfile(fname):
if not force_download: # we're done
return fname
if isinstance(force_download, string_types):
ntime = time.strptime(force_download, '%Y-%m-%d')
ftime = time.gmtime(op.getctime(fname))
if ftime >= ntime:
return fname
else:
print('File older than %s, updating...' % force_download)
if not op.isdir(op.dirname(fname)):
os.makedirs(op.abspath(op.dirname(fname)))
# let's go get the file
_fetch_file(url, fname)
return fname |
<SYSTEM_TASK:>
Write a chunk to file and update the progress bar
<END_TASK>
<USER_TASK:>
Description:
def _chunk_write(chunk, local_file, progress):
"""Write a chunk to file and update the progress bar""" |
local_file.write(chunk)
progress.update_with_increment_value(len(chunk)) |
<SYSTEM_TASK:>
Load requested file, downloading it if needed or requested
<END_TASK>
<USER_TASK:>
Description:
def _fetch_file(url, file_name, print_destination=True):
"""Load requested file, downloading it if needed or requested
Parameters
----------
url: string
The url of file to be downloaded.
file_name: string
Name, along with the path, of where downloaded file will be saved.
print_destination: bool, optional
If true, destination of where file was saved will be printed after
download finishes.
""" |
# Adapted from NISL:
# https://github.com/nisl/tutorial/blob/master/nisl/datasets.py
temp_file_name = file_name + ".part"
local_file = None
initial_size = 0
# Checking file size and displaying it alongside the download url
n_try = 3
for ii in range(n_try):
try:
data = urllib.request.urlopen(url, timeout=15.)
except Exception as e:
if ii == n_try - 1:
raise RuntimeError('Error while fetching file %s.\n'
'Dataset fetching aborted (%s)' % (url, e))
try:
file_size = int(data.headers['Content-Length'].strip())
print('Downloading data from %s (%s)' % (url, sizeof_fmt(file_size)))
local_file = open(temp_file_name, "wb")
_chunk_read(data, local_file, initial_size=initial_size)
# temp file must be closed prior to the move
if not local_file.closed:
local_file.close()
shutil.move(temp_file_name, file_name)
if print_destination is True:
sys.stdout.write('File saved as %s.\n' % file_name)
except Exception as e:
raise RuntimeError('Error while fetching file %s.\n'
'Dataset fetching aborted (%s)' % (url, e))
finally:
if local_file is not None:
if not local_file.closed:
local_file.close() |
<SYSTEM_TASK:>
Update progressbar with current value of process
<END_TASK>
<USER_TASK:>
Description:
def update(self, cur_value, mesg=None):
"""Update progressbar with current value of process
Parameters
----------
cur_value : number
Current value of process. Should be <= max_value (but this is not
enforced). The percent of the progressbar will be computed as
(cur_value / max_value) * 100
mesg : str
Message to display to the right of the progressbar. If None, the
last message provided will be used. To clear the current message,
pass a null string, ''.
""" |
# Ensure floating-point division so we can get fractions of a percent
# for the progressbar.
self.cur_value = cur_value
progress = float(self.cur_value) / self.max_value
num_chars = int(progress * self.max_chars)
num_left = self.max_chars - num_chars
# Update the message
if mesg is not None:
self.mesg = mesg
# The \r tells the cursor to return to the beginning of the line rather
# than starting a new line. This allows us to have a progressbar-style
# display in the console window.
bar = self.template.format(self.progress_character * num_chars,
' ' * num_left,
progress * 100,
self.spinner_symbols[self.spinner_index],
self.mesg)
sys.stdout.write(bar)
# Increament the spinner
if self.spinner:
self.spinner_index = (self.spinner_index + 1) % self.n_spinner
# Force a flush because sometimes when using bash scripts and pipes,
# the output is not printed until after the program exits.
sys.stdout.flush() |
<SYSTEM_TASK:>
Returns the default widget that occupies the entire area of the
<END_TASK>
<USER_TASK:>
Description:
def central_widget(self):
""" Returns the default widget that occupies the entire area of the
canvas.
""" |
if self._central_widget is None:
self._central_widget = Widget(size=self.size, parent=self.scene)
return self._central_widget |
<SYSTEM_TASK:>
Return the visual at a given position
<END_TASK>
<USER_TASK:>
Description:
def visual_at(self, pos):
"""Return the visual at a given position
Parameters
----------
pos : tuple
The position in logical coordinates to query.
Returns
-------
visual : instance of Visual | None
The visual at the position, if it exists.
""" |
tr = self.transforms.get_transform('canvas', 'framebuffer')
fbpos = tr.map(pos)[:2]
try:
id_ = self._render_picking(region=(fbpos[0], fbpos[1],
1, 1))
vis = VisualNode._visual_ids.get(id_[0, 0], None)
except RuntimeError:
# Don't have read_pixels() support for IPython. Fall back to
# bounds checking.
return self._visual_bounds_at(pos)
return vis |
<SYSTEM_TASK:>
Render the scene in picking mode, returning a 2D array of visual
<END_TASK>
<USER_TASK:>
Description:
def _render_picking(self, **kwargs):
"""Render the scene in picking mode, returning a 2D array of visual
IDs.
""" |
try:
self._scene.picking = True
img = self.render(bgcolor=(0, 0, 0, 0), **kwargs)
finally:
self._scene.picking = False
img = img.astype('int32') * [2**0, 2**8, 2**16, 2**24]
id_ = img.sum(axis=2).astype('int32')
return id_ |
<SYSTEM_TASK:>
Close event handler
<END_TASK>
<USER_TASK:>
Description:
def on_close(self, event):
"""Close event handler
Parameters
----------
event : instance of Event
The event.
""" |
self.events.mouse_press.disconnect(self._process_mouse_event)
self.events.mouse_move.disconnect(self._process_mouse_event)
self.events.mouse_release.disconnect(self._process_mouse_event)
self.events.mouse_wheel.disconnect(self._process_mouse_event) |
<SYSTEM_TASK:>
Pop a viewport from the stack.
<END_TASK>
<USER_TASK:>
Description:
def pop_viewport(self):
""" Pop a viewport from the stack.
""" |
vp = self._vp_stack.pop()
# Activate latest
if len(self._vp_stack) > 0:
self.context.set_viewport(*self._vp_stack[-1])
else:
self.context.set_viewport(0, 0, *self.physical_size)
self._update_transforms()
return vp |
<SYSTEM_TASK:>
Push an FBO on the stack.
<END_TASK>
<USER_TASK:>
Description:
def push_fbo(self, fbo, offset, csize):
""" Push an FBO on the stack.
This activates the framebuffer and causes subsequent rendering to be
written to the framebuffer rather than the canvas's back buffer. This
will also set the canvas viewport to cover the boundaries of the
framebuffer.
Parameters
----------
fbo : instance of FrameBuffer
The framebuffer object .
offset : tuple
The location of the fbo origin relative to the canvas's framebuffer
origin.
csize : tuple
The size of the region in the canvas's framebuffer that should be
covered by this framebuffer object.
""" |
self._fb_stack.append((fbo, offset, csize))
try:
fbo.activate()
h, w = fbo.color_buffer.shape[:2]
self.push_viewport((0, 0, w, h))
except Exception:
self._fb_stack.pop()
raise
self._update_transforms() |
<SYSTEM_TASK:>
Pop an FBO from the stack.
<END_TASK>
<USER_TASK:>
Description:
def pop_fbo(self):
""" Pop an FBO from the stack.
""" |
fbo = self._fb_stack.pop()
fbo[0].deactivate()
self.pop_viewport()
if len(self._fb_stack) > 0:
old_fbo = self._fb_stack[-1]
old_fbo[0].activate()
self._update_transforms()
return fbo |
<SYSTEM_TASK:>
Update the canvas's TransformSystem to correct for the current
<END_TASK>
<USER_TASK:>
Description:
def _update_transforms(self):
"""Update the canvas's TransformSystem to correct for the current
canvas size, framebuffer, and viewport.
""" |
if len(self._fb_stack) == 0:
fb_size = fb_rect = None
else:
fb, origin, fb_size = self._fb_stack[-1]
fb_rect = origin + fb_size
if len(self._vp_stack) == 0:
viewport = None
else:
viewport = self._vp_stack[-1]
self.transforms.configure(viewport=viewport, fbo_size=fb_size,
fbo_rect=fb_rect) |
<SYSTEM_TASK:>
Texture wrapping mode
<END_TASK>
<USER_TASK:>
Description:
def wrapping(self):
""" Texture wrapping mode """ |
value = self._wrapping
return value[0] if all([v == value[0] for v in value]) else value |
<SYSTEM_TASK:>
Internal method for resize.
<END_TASK>
<USER_TASK:>
Description:
def _resize(self, shape, format=None, internalformat=None):
"""Internal method for resize.
""" |
shape = self._normalize_shape(shape)
# Check
if not self._resizable:
raise RuntimeError("Texture is not resizable")
# Determine format
if format is None:
format = self._formats[shape[-1]]
# Keep current format if channels match
if self._format and \
self._inv_formats[self._format] == self._inv_formats[format]:
format = self._format
else:
format = check_enum(format)
if internalformat is None:
# Keep current internalformat if channels match
if self._internalformat and \
self._inv_internalformats[self._internalformat] == shape[-1]:
internalformat = self._internalformat
else:
internalformat = check_enum(internalformat)
# Check
if format not in self._inv_formats:
raise ValueError('Invalid texture format: %r.' % format)
elif shape[-1] != self._inv_formats[format]:
raise ValueError('Format does not match with given shape. '
'(format expects %d elements, data has %d)' %
(self._inv_formats[format], shape[-1]))
if internalformat is None:
pass
elif internalformat not in self._inv_internalformats:
raise ValueError(
'Invalid texture internalformat: %r. Allowed formats: %r'
% (internalformat, self._inv_internalformats)
)
elif shape[-1] != self._inv_internalformats[internalformat]:
raise ValueError('Internalformat does not match with given shape.')
# Store and send GLIR command
self._shape = shape
self._format = format
self._internalformat = internalformat
self._glir.command('SIZE', self._id, self._shape, self._format,
self._internalformat) |
<SYSTEM_TASK:>
Get a free region of given size and allocate it
<END_TASK>
<USER_TASK:>
Description:
def get_free_region(self, width, height):
"""Get a free region of given size and allocate it
Parameters
----------
width : int
Width of region to allocate
height : int
Height of region to allocate
Returns
-------
bounds : tuple | None
A newly allocated region as (x, y, w, h) or None
(if failed).
""" |
best_height = best_width = np.inf
best_index = -1
for i in range(len(self._atlas_nodes)):
y = self._fit(i, width, height)
if y >= 0:
node = self._atlas_nodes[i]
if (y+height < best_height or
(y+height == best_height and node[2] < best_width)):
best_height = y+height
best_index = i
best_width = node[2]
region = node[0], y, width, height
if best_index == -1:
return None
node = region[0], region[1] + height, width
self._atlas_nodes.insert(best_index, node)
i = best_index+1
while i < len(self._atlas_nodes):
node = self._atlas_nodes[i]
prev_node = self._atlas_nodes[i-1]
if node[0] < prev_node[0]+prev_node[2]:
shrink = prev_node[0]+prev_node[2] - node[0]
x, y, w = self._atlas_nodes[i]
self._atlas_nodes[i] = x+shrink, y, w-shrink
if self._atlas_nodes[i][2] <= 0:
del self._atlas_nodes[i]
i -= 1
else:
break
else:
break
i += 1
# Merge nodes
i = 0
while i < len(self._atlas_nodes)-1:
node = self._atlas_nodes[i]
next_node = self._atlas_nodes[i+1]
if node[1] == next_node[1]:
self._atlas_nodes[i] = node[0], node[1], node[2]+next_node[2]
del self._atlas_nodes[i+1]
else:
i += 1
return region |
<SYSTEM_TASK:>
Convert an object to either a scalar or a row or column vector.
<END_TASK>
<USER_TASK:>
Description:
def _vector_or_scalar(x, type='row'):
"""Convert an object to either a scalar or a row or column vector.""" |
if isinstance(x, (list, tuple)):
x = np.array(x)
if isinstance(x, np.ndarray):
assert x.ndim == 1
if type == 'column':
x = x[:, None]
return x |
<SYSTEM_TASK:>
Convert an object to a row or column vector.
<END_TASK>
<USER_TASK:>
Description:
def _vector(x, type='row'):
"""Convert an object to a row or column vector.""" |
if isinstance(x, (list, tuple)):
x = np.array(x, dtype=np.float32)
elif not isinstance(x, np.ndarray):
x = np.array([x], dtype=np.float32)
assert x.ndim == 1
if type == 'column':
x = x[:, None]
return x |
<SYSTEM_TASK:>
performs smooth Hermite interpolation
<END_TASK>
<USER_TASK:>
Description:
def smoothstep(edge0, edge1, x):
""" performs smooth Hermite interpolation
between 0 and 1 when edge0 < x < edge1. """ |
# Scale, bias and saturate x to 0..1 range
x = np.clip((x - edge0)/(edge1 - edge0), 0.0, 1.0)
# Evaluate polynomial
return x*x*(3 - 2*x) |
<SYSTEM_TASK:>
Generate a GLSL template function from a given interpolation patterns
<END_TASK>
<USER_TASK:>
Description:
def _glsl_mix(controls=None):
"""Generate a GLSL template function from a given interpolation patterns
and control points.""" |
assert (controls[0], controls[-1]) == (0., 1.)
ncolors = len(controls)
assert ncolors >= 2
if ncolors == 2:
s = " return mix($color_0, $color_1, t);\n"
else:
s = ""
for i in range(ncolors-1):
if i == 0:
ifs = 'if (t < %.6f)' % (controls[i+1])
elif i == (ncolors-2):
ifs = 'else'
else:
ifs = 'else if (t < %.6f)' % (controls[i+1])
adj_t = '(t - %s) / %s' % (controls[i],
controls[i+1] - controls[i])
s += ("%s {\n return mix($color_%d, $color_%d, %s);\n} " %
(ifs, i, i+1, adj_t))
return "vec4 colormap(float t) {\n%s\n}" % s |
<SYSTEM_TASK:>
Obtain a colormap
<END_TASK>
<USER_TASK:>
Description:
def get_colormap(name, *args, **kwargs):
"""Obtain a colormap
Some colormaps can have additional configuration parameters. Refer to
their corresponding documentation for more information.
Parameters
----------
name : str | Colormap
Colormap name. Can also be a Colormap for pass-through.
Examples
--------
>>> get_colormap('autumn')
>>> get_colormap('single_hue', hue=10)
""" |
if isinstance(name, BaseColormap):
cmap = name
else:
if not isinstance(name, string_types):
raise TypeError('colormap must be a Colormap or string name')
if name not in _colormaps:
raise KeyError('colormap name %s not found' % name)
cmap = _colormaps[name]
if inspect.isclass(cmap):
cmap = cmap(*args, **kwargs)
return cmap |
<SYSTEM_TASK:>
The border width in visual coordinates
<END_TASK>
<USER_TASK:>
Description:
def visual_border_width(self):
""" The border width in visual coordinates
""" |
render_to_doc = \
self.transforms.get_transform('document', 'visual')
vec = render_to_doc.map([self.border_width, self.border_width, 0])
origin = render_to_doc.map([0, 0, 0])
visual_border_width = [vec[0] - origin[0], vec[1] - origin[1]]
# we need to flip the y axis because coordinate systems are inverted
visual_border_width[1] *= -1
return visual_border_width |
<SYSTEM_TASK:>
Run the exporter on the given figure
<END_TASK>
<USER_TASK:>
Description:
def run(self, fig):
"""
Run the exporter on the given figure
Parmeters
---------
fig : matplotlib.Figure instance
The figure to export
""" |
# Calling savefig executes the draw() command, putting elements
# in the correct place.
fig.savefig(io.BytesIO(), format='png', dpi=fig.dpi)
if self.close_mpl:
import matplotlib.pyplot as plt
plt.close(fig)
self.crawl_fig(fig) |
<SYSTEM_TASK:>
Process the transform and convert data to figure or data coordinates
<END_TASK>
<USER_TASK:>
Description:
def process_transform(transform, ax=None, data=None, return_trans=False,
force_trans=None):
"""Process the transform and convert data to figure or data coordinates
Parameters
----------
transform : matplotlib Transform object
The transform applied to the data
ax : matplotlib Axes object (optional)
The axes the data is associated with
data : ndarray (optional)
The array of data to be transformed.
return_trans : bool (optional)
If true, return the final transform of the data
force_trans : matplotlib.transform instance (optional)
If supplied, first force the data to this transform
Returns
-------
code : string
Code is either "data", "axes", "figure", or "display", indicating
the type of coordinates output.
transform : matplotlib transform
the transform used to map input data to output data.
Returned only if return_trans is True
new_data : ndarray
Data transformed to match the given coordinate code.
Returned only if data is specified
""" |
if isinstance(transform, transforms.BlendedGenericTransform):
warnings.warn("Blended transforms not yet supported. "
"Zoom behavior may not work as expected.")
if force_trans is not None:
if data is not None:
data = (transform - force_trans).transform(data)
transform = force_trans
code = "display"
if ax is not None:
for (c, trans) in [("data", ax.transData),
("axes", ax.transAxes),
("figure", ax.figure.transFigure),
("display", transforms.IdentityTransform())]:
if transform.contains_branch(trans):
code, transform = (c, transform - trans)
break
if data is not None:
if return_trans:
return code, transform.transform(data), transform
else:
return code, transform.transform(data)
else:
if return_trans:
return code, transform
else:
return code |
<SYSTEM_TASK:>
Crawl the figure and process all axes
<END_TASK>
<USER_TASK:>
Description:
def crawl_fig(self, fig):
"""Crawl the figure and process all axes""" |
with self.renderer.draw_figure(fig=fig,
props=utils.get_figure_properties(fig)):
for ax in fig.axes:
self.crawl_ax(ax) |
<SYSTEM_TASK:>
Crawl the axes and process all elements within
<END_TASK>
<USER_TASK:>
Description:
def crawl_ax(self, ax):
"""Crawl the axes and process all elements within""" |
with self.renderer.draw_axes(ax=ax,
props=utils.get_axes_properties(ax)):
for line in ax.lines:
self.draw_line(ax, line)
for text in ax.texts:
self.draw_text(ax, text)
for (text, ttp) in zip([ax.xaxis.label, ax.yaxis.label, ax.title],
["xlabel", "ylabel", "title"]):
if(hasattr(text, 'get_text') and text.get_text()):
self.draw_text(ax, text, force_trans=ax.transAxes,
text_type=ttp)
for artist in ax.artists:
# TODO: process other artists
if isinstance(artist, matplotlib.text.Text):
self.draw_text(ax, artist)
for patch in ax.patches:
self.draw_patch(ax, patch)
for collection in ax.collections:
self.draw_collection(ax, collection)
for image in ax.images:
self.draw_image(ax, image)
legend = ax.get_legend()
if legend is not None:
props = utils.get_legend_properties(ax, legend)
with self.renderer.draw_legend(legend=legend, props=props):
if props['visible']:
self.crawl_legend(ax, legend) |
<SYSTEM_TASK:>
Recursively look through objects in legend children
<END_TASK>
<USER_TASK:>
Description:
def crawl_legend(self, ax, legend):
"""
Recursively look through objects in legend children
""" |
legendElements = list(utils.iter_all_children(legend._legend_box,
skipContainers=True))
legendElements.append(legend.legendPatch)
for child in legendElements:
# force a large zorder so it appears on top
child.set_zorder(1E6 + child.get_zorder())
try:
# What kind of object...
if isinstance(child, matplotlib.patches.Patch):
self.draw_patch(ax, child, force_trans=ax.transAxes)
elif isinstance(child, matplotlib.text.Text):
if not (child is legend.get_children()[-1]
and child.get_text() == 'None'):
self.draw_text(ax, child, force_trans=ax.transAxes)
elif isinstance(child, matplotlib.lines.Line2D):
self.draw_line(ax, child, force_trans=ax.transAxes)
elif isinstance(child, matplotlib.collections.Collection):
self.draw_collection(ax, child,
force_pathtrans=ax.transAxes)
else:
warnings.warn("Legend element %s not impemented" % child)
except NotImplementedError:
warnings.warn("Legend element %s not impemented" % child) |
<SYSTEM_TASK:>
Process a matplotlib line and call renderer.draw_line
<END_TASK>
<USER_TASK:>
Description:
def draw_line(self, ax, line, force_trans=None):
"""Process a matplotlib line and call renderer.draw_line""" |
coordinates, data = self.process_transform(line.get_transform(),
ax, line.get_xydata(),
force_trans=force_trans)
linestyle = utils.get_line_style(line)
if linestyle['dasharray'] is None:
linestyle = None
markerstyle = utils.get_marker_style(line)
if (markerstyle['marker'] in ['None', 'none', None]
or markerstyle['markerpath'][0].size == 0):
markerstyle = None
label = line.get_label()
if markerstyle or linestyle:
self.renderer.draw_marked_line(data=data, coordinates=coordinates,
linestyle=linestyle,
markerstyle=markerstyle,
label=label,
mplobj=line) |
<SYSTEM_TASK:>
Process a matplotlib patch object and call renderer.draw_path
<END_TASK>
<USER_TASK:>
Description:
def draw_patch(self, ax, patch, force_trans=None):
"""Process a matplotlib patch object and call renderer.draw_path""" |
vertices, pathcodes = utils.SVG_path(patch.get_path())
transform = patch.get_transform()
coordinates, vertices = self.process_transform(transform,
ax, vertices,
force_trans=force_trans)
linestyle = utils.get_path_style(patch, fill=patch.get_fill())
self.renderer.draw_path(data=vertices,
coordinates=coordinates,
pathcodes=pathcodes,
style=linestyle,
mplobj=patch) |
<SYSTEM_TASK:>
Process a matplotlib image object and call renderer.draw_image
<END_TASK>
<USER_TASK:>
Description:
def draw_image(self, ax, image):
"""Process a matplotlib image object and call renderer.draw_image""" |
self.renderer.draw_image(imdata=utils.image_to_base64(image),
extent=image.get_extent(),
coordinates="data",
style={"alpha": image.get_alpha(),
"zorder": image.get_zorder()},
mplobj=image) |
<SYSTEM_TASK:>
Draw a line that also has markers.
<END_TASK>
<USER_TASK:>
Description:
def draw_marked_line(self, data, coordinates, linestyle, markerstyle,
label, mplobj=None):
"""Draw a line that also has markers.
If this isn't reimplemented by a renderer object, by default, it will
make a call to BOTH draw_line and draw_markers when both markerstyle
and linestyle are not None in the same Line2D object.
""" |
if linestyle is not None:
self.draw_line(data, coordinates, linestyle, label, mplobj)
if markerstyle is not None:
self.draw_markers(data, coordinates, markerstyle, label, mplobj) |
<SYSTEM_TASK:>
Build an iterator over the elements of the path collection
<END_TASK>
<USER_TASK:>
Description:
def _iter_path_collection(paths, path_transforms, offsets, styles):
"""Build an iterator over the elements of the path collection""" |
N = max(len(paths), len(offsets))
if not path_transforms:
path_transforms = [np.eye(3)]
edgecolor = styles['edgecolor']
if np.size(edgecolor) == 0:
edgecolor = ['none']
facecolor = styles['facecolor']
if np.size(facecolor) == 0:
facecolor = ['none']
elements = [paths, path_transforms, offsets,
edgecolor, styles['linewidth'], facecolor]
it = itertools
return it.islice(py3k.zip(*py3k.map(it.cycle, elements)), N) |
<SYSTEM_TASK:>
Draw a path.
<END_TASK>
<USER_TASK:>
Description:
def draw_path(self, data, coordinates, pathcodes, style,
offset=None, offset_coordinates="data", mplobj=None):
"""
Draw a path.
In matplotlib, paths are created by filled regions, histograms,
contour plots, patches, etc.
Parameters
----------
data : array_like
A shape (N, 2) array of datapoints.
coordinates : string
A string code, which should be either 'data' for data coordinates,
'figure' for figure (pixel) coordinates, or "points" for raw
point coordinates (useful in conjunction with offsets, below).
pathcodes : list
A list of single-character SVG pathcodes associated with the data.
Path codes are one of ['M', 'm', 'L', 'l', 'Q', 'q', 'T', 't',
'S', 's', 'C', 'c', 'Z', 'z']
See the SVG specification for details. Note that some path codes
consume more than one datapoint (while 'Z' consumes none), so
in general, the length of the pathcodes list will not be the same
as that of the data array.
style : dictionary
a dictionary specifying the appearance of the line.
offset : list (optional)
the (x, y) offset of the path. If not given, no offset will
be used.
offset_coordinates : string (optional)
A string code, which should be either 'data' for data coordinates,
or 'figure' for figure (pixel) coordinates.
mplobj : matplotlib object
the matplotlib plot element which generated this path
""" |
raise NotImplementedError() |
<SYSTEM_TASK:>
Create a TimeMOC from a `astropy.time.Time`
<END_TASK>
<USER_TASK:>
Description:
def from_times(cls, times, delta_t=DEFAULT_OBSERVATION_TIME):
"""
Create a TimeMOC from a `astropy.time.Time`
Parameters
----------
times : `astropy.time.Time`
astropy observation times
delta_t : `astropy.time.TimeDelta`, optional
the duration of one observation. It is set to 30 min by default. This data is used to compute the
more efficient TimeMOC order to represent the observations (Best order = the less precise order which
is able to discriminate two observations separated by ``delta_t``).
Returns
-------
time_moc : `~mocpy.tmoc.TimeMOC`
""" |
times_arr = np.asarray(times.jd * TimeMOC.DAY_MICRO_SEC, dtype=int)
intervals_arr = np.vstack((times_arr, times_arr + 1)).T
# degrade the TimeMoc to the order computer from ``delta_t``
order = TimeMOC.time_resolution_to_order(delta_t)
return TimeMOC(IntervalSet(intervals_arr)).degrade_to_order(order) |
<SYSTEM_TASK:>
Create a TimeMOC from a range defined by two `astropy.time.Time`
<END_TASK>
<USER_TASK:>
Description:
def from_time_ranges(cls, min_times, max_times, delta_t=DEFAULT_OBSERVATION_TIME):
"""
Create a TimeMOC from a range defined by two `astropy.time.Time`
Parameters
----------
min_times : `astropy.time.Time`
astropy times defining the left part of the intervals
max_times : `astropy.time.Time`
astropy times defining the right part of the intervals
delta_t : `astropy.time.TimeDelta`, optional
the duration of one observation. It is set to 30 min by default. This data is used to compute the
more efficient TimeMOC order to represent the observations (Best order = the less precise order which
is able to discriminate two observations separated by ``delta_t``).
Returns
-------
time_moc : `~mocpy.tmoc.TimeMOC`
""" |
min_times_arr = np.asarray(min_times.jd * TimeMOC.DAY_MICRO_SEC, dtype=int)
max_times_arr = np.asarray(max_times.jd * TimeMOC.DAY_MICRO_SEC, dtype=int)
intervals_arr = np.vstack((min_times_arr, max_times_arr + 1)).T
# degrade the TimeMoc to the order computer from ``delta_t``
order = TimeMOC.time_resolution_to_order(delta_t)
return TimeMOC(IntervalSet(intervals_arr)).degrade_to_order(order) |
<SYSTEM_TASK:>
Add all the pixels at max order in the neighbourhood of the moc
<END_TASK>
<USER_TASK:>
Description:
def add_neighbours(self):
"""
Add all the pixels at max order in the neighbourhood of the moc
""" |
time_delta = 1 << (2*(IntervalSet.HPY_MAX_ORDER - self.max_order))
intervals_arr = self._interval_set._intervals
intervals_arr[:, 0] = np.maximum(intervals_arr[:, 0] - time_delta, 0)
intervals_arr[:, 1] = np.minimum(intervals_arr[:, 1] + time_delta, (1 << 58) - 1)
self._interval_set = IntervalSet(intervals_arr) |
<SYSTEM_TASK:>
Remove all the pixels at max order located at the bound of the moc
<END_TASK>
<USER_TASK:>
Description:
def remove_neighbours(self):
"""
Remove all the pixels at max order located at the bound of the moc
""" |
time_delta = 1 << (2*(IntervalSet.HPY_MAX_ORDER - self.max_order))
intervals_arr = self._interval_set._intervals
intervals_arr[:, 0] = np.minimum(intervals_arr[:, 0] + time_delta, (1 << 58) - 1)
intervals_arr[:, 1] = np.maximum(intervals_arr[:, 1] - time_delta, 0)
good_intervals = intervals_arr[:, 1] > intervals_arr[:, 0]
self._interval_set = IntervalSet(intervals_arr[good_intervals]) |
<SYSTEM_TASK:>
Intersection between self and moc. ``delta_t`` gives the possibility to the user
<END_TASK>
<USER_TASK:>
Description:
def intersection(self, another_moc, delta_t=DEFAULT_OBSERVATION_TIME):
"""
Intersection between self and moc. ``delta_t`` gives the possibility to the user
to set a time resolution for performing the tmoc intersection
Parameters
----------
another_moc : `~mocpy.abstract_moc.AbstractMOC`
the MOC/TimeMOC used for performing the intersection with self
delta_t : `~astropy.time.TimeDelta`, optional
the duration of one observation. It is set to 30 min by default. This data is used to compute the
more efficient TimeMoc order to represent the observations. (Best order = the less precise order which
is able to discriminate two observations separated by ``delta_t``)
Returns
-------
result : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
MOC object whose interval set corresponds to : self & ``moc``
""" |
order_op = TimeMOC.time_resolution_to_order(delta_t)
self_degraded, moc_degraded = self._process_degradation(another_moc, order_op)
return super(TimeMOC, self_degraded).intersection(moc_degraded) |
<SYSTEM_TASK:>
Get the total duration covered by the temporal moc
<END_TASK>
<USER_TASK:>
Description:
def total_duration(self):
"""
Get the total duration covered by the temporal moc
Returns
-------
duration : `~astropy.time.TimeDelta`
total duration of all the observation times of the tmoc
total duration of all the observation times of the tmoc
""" |
if self._interval_set.empty():
return 0
total_time_us = 0
# The interval set is checked for consistency before looping over all the intervals
for (start_time, stop_time) in self._interval_set._intervals:
total_time_us = total_time_us + (stop_time - start_time)
duration = TimeDelta(total_time_us / 1e6, format='sec', scale='tdb')
return duration |
<SYSTEM_TASK:>
Get a percentage of fill between the min and max time the moc is defined.
<END_TASK>
<USER_TASK:>
Description:
def consistency(self):
"""
Get a percentage of fill between the min and max time the moc is defined.
A value near 0 shows a sparse temporal moc (i.e. the moc does not cover a lot
of time and covers very distant times. A value near 1 means that the moc covers
a lot of time without big pauses.
Returns
-------
result : float
fill percentage (between 0 and 1.)
""" |
result = self.total_duration.jd / (self.max_time - self.min_time).jd
return result |
<SYSTEM_TASK:>
Plot the TimeMoc in a time window.
<END_TASK>
<USER_TASK:>
Description:
def plot(self, title='TimeMoc', view=(None, None)):
"""
Plot the TimeMoc in a time window.
This method uses interactive matplotlib. The user can move its mouse through the plot to see the
time (at the mouse position).
Parameters
----------
title : str, optional
The title of the plot. Set to 'TimeMoc' by default.
view : (`~astropy.time.Time`, `~astropy.time.Time`), optional
Define the view window in which the observations are plotted. Set to (None, None) by default (i.e.
all the observation time window is rendered).
""" |
from matplotlib.colors import LinearSegmentedColormap
import matplotlib.pyplot as plt
if self._interval_set.empty():
print('Nothing to print. This TimeMoc object is empty.')
return
plot_order = 15
if self.max_order > plot_order:
plotted_moc = self.degrade_to_order(plot_order)
else:
plotted_moc = self
min_jd = plotted_moc.min_time.jd if not view[0] else view[0].jd
max_jd = plotted_moc.max_time.jd if not view[1] else view[1].jd
if max_jd < min_jd:
raise ValueError("Invalid selection: max_jd = {0} must be > to min_jd = {1}".format(max_jd, min_jd))
fig1 = plt.figure(figsize=(9.5, 5))
ax = fig1.add_subplot(111)
ax.set_xlabel('iso')
ax.get_yaxis().set_visible(False)
size = 2000
delta = (max_jd - min_jd) / size
min_jd_time = min_jd
ax.set_xticks([0, size])
ax.set_xticklabels(Time([min_jd_time, max_jd], format='jd', scale='tdb').iso, rotation=70)
y = np.zeros(size)
for (s_time_us, e_time_us) in plotted_moc._interval_set._intervals:
s_index = int((s_time_us / TimeMOC.DAY_MICRO_SEC - min_jd_time) / delta)
e_index = int((e_time_us / TimeMOC.DAY_MICRO_SEC - min_jd_time) / delta)
y[s_index:(e_index+1)] = 1.0
# hack in case of full time mocs.
if np.all(y):
y[0] = 0
z = np.tile(y, (int(size//10), 1))
plt.title(title)
color_map = LinearSegmentedColormap.from_list('w2r', ['#fffff0', '#aa0000'])
color_map.set_under('w')
color_map.set_bad('gray')
plt.imshow(z, interpolation='bilinear', cmap=color_map)
def on_mouse_motion(event):
for txt in ax.texts:
txt.set_visible(False)
text = ax.text(0, 0, "", va="bottom", ha="left")
time = Time(event.xdata * delta + min_jd_time, format='jd', scale='tdb')
tx = '{0}'.format(time.iso)
text.set_position((event.xdata - 50, 700))
text.set_rotation(70)
text.set_text(tx)
cid = fig1.canvas.mpl_connect('motion_notify_event', on_mouse_motion)
plt.show() |
<SYSTEM_TASK:>
Handle a new update.
<END_TASK>
<USER_TASK:>
Description:
def handle(self, client, subhooks=()):
"""Handle a new update.
Fetches new data from the client, then compares it to the previous
lookup.
Returns:
(bool, new_data): whether changes occurred, and the new value.
""" |
new_data = self.fetch(client)
# Holds the list of updated fields.
updated = {}
if not subhooks:
# We always want to compare to previous values.
subhooks = [self.name]
for subhook in subhooks:
new_key = self.extract_key(new_data, subhook)
if new_key != self.previous_keys.get(subhook):
updated[subhook] = new_key
if updated:
logger.debug("Hook %s: data changed from %r to %r", self.name, self.previous_keys, updated)
self.previous_keys.update(updated)
return (True, new_data)
return (False, None) |
<SYSTEM_TASK:>
Build and store a glyph corresponding to an individual character
<END_TASK>
<USER_TASK:>
Description:
def _load_char(self, char):
"""Build and store a glyph corresponding to an individual character
Parameters
----------
char : str
A single character to be represented.
""" |
assert isinstance(char, string_types) and len(char) == 1
assert char not in self._glyphs
# load new glyph data from font
_load_glyph(self._font, char, self._glyphs)
# put new glyph into the texture
glyph = self._glyphs[char]
bitmap = glyph['bitmap']
# convert to padded array
data = np.zeros((bitmap.shape[0] + 2*self._spread,
bitmap.shape[1] + 2*self._spread), np.uint8)
data[self._spread:-self._spread, self._spread:-self._spread] = bitmap
# Store, while scaling down to proper size
height = data.shape[0] // self.ratio
width = data.shape[1] // self.ratio
region = self._atlas.get_free_region(width + 2, height + 2)
if region is None:
raise RuntimeError('Cannot store glyph')
x, y, w, h = region
x, y, w, h = x + 1, y + 1, w - 2, h - 2
self._renderer.render_to_texture(data, self._atlas, (x, y), (w, h))
u0 = x / float(self._atlas.shape[1])
v0 = y / float(self._atlas.shape[0])
u1 = (x+w) / float(self._atlas.shape[1])
v1 = (y+h) / float(self._atlas.shape[0])
texcoords = (u0, v0, u1, v1)
glyph.update(dict(size=(w, h), texcoords=texcoords)) |
<SYSTEM_TASK:>
Get a font described by face and size
<END_TASK>
<USER_TASK:>
Description:
def get_font(self, face, bold=False, italic=False):
"""Get a font described by face and size""" |
key = '%s-%s-%s' % (face, bold, italic)
if key not in self._fonts:
font = dict(face=face, bold=bold, italic=italic)
self._fonts[key] = TextureFont(font, self._renderer)
return self._fonts[key] |
<SYSTEM_TASK:>
Return frequencies for DFT
<END_TASK>
<USER_TASK:>
Description:
def fft_freqs(n_fft, fs):
"""Return frequencies for DFT
Parameters
----------
n_fft : int
Number of points in the FFT.
fs : float
The sampling rate.
""" |
return np.arange(0, (n_fft // 2 + 1)) / float(n_fft) * float(fs) |
<SYSTEM_TASK:>
Set the data used for this visual
<END_TASK>
<USER_TASK:>
Description:
def set_data(self, pos=None, color=None, width=None, connect=None,
arrows=None):
"""Set the data used for this visual
Parameters
----------
pos : array
Array of shape (..., 2) or (..., 3) specifying vertex coordinates.
color : Color, tuple, or array
The color to use when drawing the line. If an array is given, it
must be of shape (..., 4) and provide one rgba color per vertex.
Can also be a colormap name, or appropriate `Function`.
width:
The width of the line in px. Line widths > 1px are only
guaranteed to work when using 'agg' method.
connect : str or array
Determines which vertices are connected by lines.
* "strip" causes the line to be drawn with each vertex
connected to the next.
* "segments" causes each pair of vertices to draw an
independent line segment
* numpy arrays specify the exact set of segment pairs to
connect.
arrows : array
A Nx4 matrix where each row contains the x and y coordinate of the
first and second vertex of the arrow body. Remember that the second
vertex is used as center point for the arrow head, and the first
vertex is only used for determining the arrow head orientation.
""" |
if arrows is not None:
self._arrows = arrows
self._arrows_changed = True
LineVisual.set_data(self, pos, color, width, connect) |
<SYSTEM_TASK:>
Helper function to post a tweet
<END_TASK>
<USER_TASK:>
Description:
def post_tweet(user_id, message, additional_params={}):
"""
Helper function to post a tweet
""" |
url = "https://api.twitter.com/1.1/statuses/update.json"
params = { "status" : message }
params.update(additional_params)
r = make_twitter_request(url, user_id, params, request_type='POST')
print (r.text)
return "Successfully posted a tweet {}".format(message) |
<SYSTEM_TASK:>
Generically make a request to twitter API using a particular user's authorization
<END_TASK>
<USER_TASK:>
Description:
def make_twitter_request(url, user_id, params={}, request_type='GET'):
""" Generically make a request to twitter API using a particular user's authorization """ |
if request_type == "GET":
return requests.get(url, auth=get_twitter_auth(user_id), params=params)
elif request_type == "POST":
return requests.post(url, auth=get_twitter_auth(user_id), params=params) |
<SYSTEM_TASK:>
Search for a location - free form
<END_TASK>
<USER_TASK:>
Description:
def geo_search(user_id, search_location):
"""
Search for a location - free form
""" |
url = "https://api.twitter.com/1.1/geo/search.json"
params = {"query" : search_location }
response = make_twitter_request(url, user_id, params).json()
return response |
<SYSTEM_TASK:>
add value in form of dict
<END_TASK>
<USER_TASK:>
Description:
def add_val(self, val):
"""add value in form of dict""" |
if not isinstance(val, type({})):
raise ValueError(type({}))
self.read()
self.config.update(val)
self.save() |
<SYSTEM_TASK:>
Read mesh data from file.
<END_TASK>
<USER_TASK:>
Description:
def read_mesh(fname):
"""Read mesh data from file.
Parameters
----------
fname : str
File name to read. Format will be inferred from the filename.
Currently only '.obj' and '.obj.gz' are supported.
Returns
-------
vertices : array
Vertices.
faces : array | None
Triangle face definitions.
normals : array
Normals for the mesh.
texcoords : array | None
Texture coordinates.
""" |
# Check format
fmt = op.splitext(fname)[1].lower()
if fmt == '.gz':
fmt = op.splitext(op.splitext(fname)[0])[1].lower()
if fmt in ('.obj'):
return WavefrontReader.read(fname)
elif not format:
raise ValueError('read_mesh needs could not determine format.')
else:
raise ValueError('read_mesh does not understand format %s.' % fmt) |
<SYSTEM_TASK:>
Write mesh data to file.
<END_TASK>
<USER_TASK:>
Description:
def write_mesh(fname, vertices, faces, normals, texcoords, name='',
format='obj', overwrite=False, reshape_faces=True):
""" Write mesh data to file.
Parameters
----------
fname : str
Filename to write. Must end with ".obj" or ".gz".
vertices : array
Vertices.
faces : array | None
Triangle face definitions.
normals : array
Normals for the mesh.
texcoords : array | None
Texture coordinates.
name : str
Name of the object.
format : str
Currently only "obj" is supported.
overwrite : bool
If the file exists, overwrite it.
reshape_faces : bool
Reshape the `faces` array to (Nf, 3). Set to `False`
if you need to write a mesh with non triangular faces.
""" |
# Check file
if op.isfile(fname) and not overwrite:
raise IOError('file "%s" exists, use overwrite=True' % fname)
# Check format
if format not in ('obj'):
raise ValueError('Only "obj" format writing currently supported')
WavefrontWriter.write(fname, vertices, faces,
normals, texcoords, name, reshape_faces) |
<SYSTEM_TASK:>
Parse uniforms, attributes and varyings from the source code.
<END_TASK>
<USER_TASK:>
Description:
def _parse_variables_from_code(self):
""" Parse uniforms, attributes and varyings from the source code.
""" |
# Get one string of code with comments removed
code = '\n\n'.join(self._shaders)
code = re.sub(r'(.*)(//.*)', r'\1', code, re.M)
# Regexp to look for variable names
var_regexp = ("\s*VARIABLE\s+" # kind of variable
"((highp|mediump|lowp)\s+)?" # Precision (optional)
"(?P<type>\w+)\s+" # type
"(?P<name>\w+)\s*" # name
"(\[(?P<size>\d+)\])?" # size (optional)
"(\s*\=\s*[0-9.]+)?" # default value (optional)
"\s*;" # end
)
# Parse uniforms, attributes and varyings
self._code_variables = {}
for kind in ('uniform', 'attribute', 'varying', 'const'):
regex = re.compile(var_regexp.replace('VARIABLE', kind),
flags=re.MULTILINE)
for m in re.finditer(regex, code):
gtype = m.group('type')
size = int(m.group('size')) if m.group('size') else -1
this_kind = kind
if size >= 1:
# uniform arrays get added both as individuals and full
for i in range(size):
name = '%s[%d]' % (m.group('name'), i)
self._code_variables[name] = kind, gtype, name, -1
this_kind = 'uniform_array'
name = m.group('name')
self._code_variables[name] = this_kind, gtype, name, size
# Now that our code variables are up-to date, we can process
# the variables that were set but yet unknown.
self._process_pending_variables() |
<SYSTEM_TASK:>
Try to apply the variables that were set but not known yet.
<END_TASK>
<USER_TASK:>
Description:
def _process_pending_variables(self):
""" Try to apply the variables that were set but not known yet.
""" |
# Clear our list of pending variables
self._pending_variables, pending = {}, self._pending_variables
# Try to apply it. On failure, it will be added again
for name, data in pending.items():
self[name] = data |
<SYSTEM_TASK:>
Draw the attribute arrays in the specified mode.
<END_TASK>
<USER_TASK:>
Description:
def draw(self, mode='triangles', indices=None, check_error=True):
""" Draw the attribute arrays in the specified mode.
Parameters
----------
mode : str | GL_ENUM
'points', 'lines', 'line_strip', 'line_loop', 'triangles',
'triangle_strip', or 'triangle_fan'.
indices : array
Array of indices to draw.
check_error:
Check error after draw.
""" |
# Invalidate buffer (data has already been sent)
self._buffer = None
# Check if mode is valid
mode = check_enum(mode)
if mode not in ['points', 'lines', 'line_strip', 'line_loop',
'triangles', 'triangle_strip', 'triangle_fan']:
raise ValueError('Invalid draw mode: %r' % mode)
# Check leftover variables, warn, discard them
# In GLIR we check whether all attributes are indeed set
for name in self._pending_variables:
logger.warn('Variable %r is given but not known.' % name)
self._pending_variables = {}
# Check attribute sizes
attributes = [vbo for vbo in self._user_variables.values()
if isinstance(vbo, DataBuffer)]
sizes = [a.size for a in attributes]
if len(attributes) < 1:
raise RuntimeError('Must have at least one attribute')
if not all(s == sizes[0] for s in sizes[1:]):
msg = '\n'.join(['%s: %s' % (str(a), a.size) for a in attributes])
raise RuntimeError('All attributes must have the same size, got:\n'
'%s' % msg)
# Get the glir queue that we need now
canvas = get_current_canvas()
assert canvas is not None
# Associate canvas
canvas.context.glir.associate(self.glir)
# Indexbuffer
if isinstance(indices, IndexBuffer):
canvas.context.glir.associate(indices.glir)
logger.debug("Program drawing %r with index buffer" % mode)
gltypes = {np.dtype(np.uint8): 'UNSIGNED_BYTE',
np.dtype(np.uint16): 'UNSIGNED_SHORT',
np.dtype(np.uint32): 'UNSIGNED_INT'}
selection = indices.id, gltypes[indices.dtype], indices.size
canvas.context.glir.command('DRAW', self._id, mode, selection)
elif indices is None:
selection = 0, attributes[0].size
logger.debug("Program drawing %r with %r" % (mode, selection))
canvas.context.glir.command('DRAW', self._id, mode, selection)
else:
raise TypeError("Invalid index: %r (must be IndexBuffer)" %
indices)
# Process GLIR commands
canvas.context.flush_commands() |
<SYSTEM_TASK:>
Update the data in this surface plot.
<END_TASK>
<USER_TASK:>
Description:
def set_data(self, x=None, y=None, z=None, colors=None):
"""Update the data in this surface plot.
Parameters
----------
x : ndarray | None
1D array of values specifying the x positions of vertices in the
grid. If None, values will be assumed to be integers.
y : ndarray | None
1D array of values specifying the x positions of vertices in the
grid. If None, values will be assumed to be integers.
z : ndarray
2D array of height values for each grid vertex.
colors : ndarray
(width, height, 4) array of vertex colors.
""" |
if x is not None:
if self._x is None or len(x) != len(self._x):
self.__vertices = None
self._x = x
if y is not None:
if self._y is None or len(y) != len(self._y):
self.__vertices = None
self._y = y
if z is not None:
if self._x is not None and z.shape[0] != len(self._x):
raise TypeError('Z values must have shape (len(x), len(y))')
if self._y is not None and z.shape[1] != len(self._y):
raise TypeError('Z values must have shape (len(x), len(y))')
self._z = z
if (self.__vertices is not None and
self._z.shape != self.__vertices.shape[:2]):
self.__vertices = None
if self._z is None:
return
update_mesh = False
new_vertices = False
# Generate vertex and face array
if self.__vertices is None:
new_vertices = True
self.__vertices = np.empty((self._z.shape[0], self._z.shape[1], 3),
dtype=np.float32)
self.generate_faces()
self.__meshdata.set_faces(self.__faces)
update_mesh = True
# Copy x, y, z data into vertex array
if new_vertices or x is not None:
if x is None:
if self._x is None:
x = np.arange(self._z.shape[0])
else:
x = self._x
self.__vertices[:, :, 0] = x.reshape(len(x), 1)
update_mesh = True
if new_vertices or y is not None:
if y is None:
if self._y is None:
y = np.arange(self._z.shape[1])
else:
y = self._y
self.__vertices[:, :, 1] = y.reshape(1, len(y))
update_mesh = True
if new_vertices or z is not None:
self.__vertices[..., 2] = self._z
update_mesh = True
if colors is not None:
self.__meshdata.set_vertex_colors(colors)
update_mesh = True
# Update MeshData
if update_mesh:
self.__meshdata.set_vertices(
self.__vertices.reshape(self.__vertices.shape[0] *
self.__vertices.shape[1], 3))
MeshVisual.set_data(self, meshdata=self.__meshdata) |
<SYSTEM_TASK:>
A simplified representation of the same transformation.
<END_TASK>
<USER_TASK:>
Description:
def simplified(self):
"""A simplified representation of the same transformation.
""" |
if self._simplified is None:
self._simplified = SimplifiedChainTransform(self)
return self._simplified |
<SYSTEM_TASK:>
Add a new transform to the end of this chain.
<END_TASK>
<USER_TASK:>
Description:
def append(self, tr):
"""
Add a new transform to the end of this chain.
Parameters
----------
tr : instance of Transform
The transform to use.
""" |
self.transforms.append(tr)
tr.changed.connect(self._subtr_changed)
self._rebuild_shaders()
self.update() |
<SYSTEM_TASK:>
Add a new transform to the beginning of this chain.
<END_TASK>
<USER_TASK:>
Description:
def prepend(self, tr):
"""
Add a new transform to the beginning of this chain.
Parameters
----------
tr : instance of Transform
The transform to use.
""" |
self.transforms.insert(0, tr)
tr.changed.connect(self._subtr_changed)
self._rebuild_shaders()
self.update() |
<SYSTEM_TASK:>
Generate a simplified chain by joining adjacent transforms.
<END_TASK>
<USER_TASK:>
Description:
def source_changed(self, event):
"""Generate a simplified chain by joining adjacent transforms.
""" |
# bail out early if the chain is empty
transforms = self._chain.transforms[:]
if len(transforms) == 0:
self.transforms = []
return
# If the change signal comes from a transform that already appears in
# our simplified transform list, then there is no need to re-simplify.
if event is not None:
for source in event.sources[::-1]:
if source in self.transforms:
self.update(event)
return
# First flatten the chain by expanding all nested chains
new_chain = []
while len(transforms) > 0:
tr = transforms.pop(0)
if isinstance(tr, ChainTransform) and not tr.dynamic:
transforms = tr.transforms[:] + transforms
else:
new_chain.append(tr)
# Now combine together all compatible adjacent transforms
cont = True
tr = new_chain
while cont:
new_tr = [tr[0]]
cont = False
for t2 in tr[1:]:
t1 = new_tr[-1]
pr = t1 * t2
if (not t1.dynamic and not t2.dynamic and not
isinstance(pr, ChainTransform)):
cont = True
new_tr.pop()
new_tr.append(pr)
else:
new_tr.append(t2)
tr = new_tr
self.transforms = tr |
<SYSTEM_TASK:>
Clean queue items from a previous session.
<END_TASK>
<USER_TASK:>
Description:
def clean(self):
"""Clean queue items from a previous session.
In case a previous session crashed and there are still some running
entries in the queue ('running', 'stopping', 'killing'), we clean those
and enqueue them again.
""" |
for _, item in self.queue.items():
if item['status'] in ['paused', 'running', 'stopping', 'killing']:
item['status'] = 'queued'
item['start'] = ''
item['end'] = '' |
<SYSTEM_TASK:>
Remove all completed tasks from the queue.
<END_TASK>
<USER_TASK:>
Description:
def clear(self):
"""Remove all completed tasks from the queue.""" |
for key in list(self.queue.keys()):
if self.queue[key]['status'] in ['done', 'failed']:
del self.queue[key]
self.write() |
<SYSTEM_TASK:>
Get the next processable item of the queue.
<END_TASK>
<USER_TASK:>
Description:
def next(self):
"""Get the next processable item of the queue.
A processable item is supposed to have the status `queued`.
Returns:
None : If no key is found.
Int: If a valid entry is found.
""" |
smallest = None
for key in self.queue.keys():
if self.queue[key]['status'] == 'queued':
if smallest is None or key < smallest:
smallest = key
return smallest |
<SYSTEM_TASK:>
Write the current queue to a file. We need this to continue an earlier session.
<END_TASK>
<USER_TASK:>
Description:
def write(self):
"""Write the current queue to a file. We need this to continue an earlier session.""" |
queue_path = os.path.join(self.config_dir, 'queue')
queue_file = open(queue_path, 'wb+')
try:
pickle.dump(self.queue, queue_file, -1)
except Exception:
print('Error while writing to queue file. Wrong file permissions?')
queue_file.close() |
<SYSTEM_TASK:>
Add a new entry to the queue.
<END_TASK>
<USER_TASK:>
Description:
def add_new(self, command):
"""Add a new entry to the queue.""" |
self.queue[self.next_key] = command
self.queue[self.next_key]['status'] = 'queued'
self.queue[self.next_key]['returncode'] = ''
self.queue[self.next_key]['stdout'] = ''
self.queue[self.next_key]['stderr'] = ''
self.queue[self.next_key]['start'] = ''
self.queue[self.next_key]['end'] = ''
self.next_key += 1
self.write() |
<SYSTEM_TASK:>
Remove a key from the queue, return `False` if no such key exists.
<END_TASK>
<USER_TASK:>
Description:
def remove(self, key):
"""Remove a key from the queue, return `False` if no such key exists.""" |
if key in self.queue:
del self.queue[key]
self.write()
return True
return False |
<SYSTEM_TASK:>
Restart a previously finished entry.
<END_TASK>
<USER_TASK:>
Description:
def restart(self, key):
"""Restart a previously finished entry.""" |
if key in self.queue:
if self.queue[key]['status'] in ['failed', 'done']:
new_entry = {'command': self.queue[key]['command'],
'path': self.queue[key]['path']}
self.add_new(new_entry)
self.write()
return True
return False |
<SYSTEM_TASK:>
Switch two entries in the queue. Return False if an entry doesn't exist.
<END_TASK>
<USER_TASK:>
Description:
def switch(self, first, second):
"""Switch two entries in the queue. Return False if an entry doesn't exist.""" |
allowed_states = ['queued', 'stashed']
if first in self.queue and second in self.queue \
and self.queue[first]['status'] in allowed_states\
and self.queue[second]['status'] in allowed_states:
tmp = self.queue[second].copy()
self.queue[second] = self.queue[first].copy()
self.queue[first] = tmp
self.write()
return True
return False |
<SYSTEM_TASK:>
Receive an answer from the daemon and return the response.
<END_TASK>
<USER_TASK:>
Description:
def receive_data(socket):
"""Receive an answer from the daemon and return the response.
Args:
socket (socket.socket): A socket that is connected to the daemon.
Returns:
dir or string: The unpickled answer.
""" |
answer = b""
while True:
packet = socket.recv(4096)
if not packet: break
answer += packet
response = pickle.loads(answer)
socket.close()
return response |
<SYSTEM_TASK:>
Connect to a daemon's socket.
<END_TASK>
<USER_TASK:>
Description:
def connect_socket(root_dir):
"""Connect to a daemon's socket.
Args:
root_dir (str): The directory that used as root by the daemon.
Returns:
socket.socket: A socket that is connected to the daemon.
""" |
# Get config directory where the daemon socket is located
config_dir = os.path.join(root_dir, '.config/pueue')
# Create Socket and exit with 1, if socket can't be created
try:
client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
socket_path = os.path.join(config_dir, 'pueue.sock')
if os.path.exists(socket_path):
client.connect(socket_path)
else:
print("Socket doesn't exist")
raise Exception
except:
print("Error connecting to socket. Make sure the daemon is running")
sys.exit(1)
return client |
<SYSTEM_TASK:>
Spawn the process, then repeatedly attach to the process.
<END_TASK>
<USER_TASK:>
Description:
def attach_loop(argv):
"""Spawn the process, then repeatedly attach to the process.""" |
# Check if the pdbhandler module is built into python.
p = Popen((sys.executable, '-X', 'pdbhandler', '-c',
'import pdbhandler; pdbhandler.get_handler().host'),
stdout=PIPE, stderr=STDOUT)
p.wait()
use_xoption = True if p.returncode == 0 else False
# Spawn the process.
args = [sys.executable]
if use_xoption:
# Use SIGUSR2 as faulthandler is set on python test suite with
# SIGUSR1.
args.extend(['-X', 'pdbhandler=localhost 7935 %d' % signal.SIGUSR2])
args.extend(argv)
proc = Popen(args)
else:
args.extend(argv)
proc = Popen(args)
# Repeatedly attach to the process using the '-X' python option or gdb.
ctx = Context()
error = None
time.sleep(.5 + random.random())
while not error and proc.poll() is None:
if use_xoption:
os.kill(proc.pid, signal.SIGUSR2)
connections = {}
dev_null = io.StringIO() if PY3 else StringIO.StringIO()
asock = AttachSocketWithDetach(connections, stdout=dev_null)
asock.create_socket(socket.AF_INET, socket.SOCK_STREAM)
connect_process(asock, ctx, proc)
asyncore.loop(map=connections)
else:
error = spawn_gdb(proc.pid, ctx=ctx, proc_iut=proc)
time.sleep(random.random())
if error and gdb_terminated(error):
error = None
if proc.poll() is None:
proc.terminate()
else:
print('pdb-attach: program under test return code:', proc.wait())
result = str(ctx.result)
if result:
print(result)
return error |
<SYSTEM_TASK:>
Skip this py-pdb command to avoid attaching within the same loop.
<END_TASK>
<USER_TASK:>
Description:
def skip(self):
"""Skip this py-pdb command to avoid attaching within the same loop.""" |
line = self.line
self.line = ''
# 'line' is the statement line of the previous py-pdb command.
if line in self.lines:
if not self.skipping:
self.skipping = True
printflush('Skipping lines', end='')
printflush('.', end='')
return True
elif line:
self.lines.append(line)
if len(self.lines) > 30:
self.lines.popleft()
return False |
<SYSTEM_TASK:>
Move the current log to a new file with timestamp and create a new empty log file.
<END_TASK>
<USER_TASK:>
Description:
def rotate(self, log):
"""Move the current log to a new file with timestamp and create a new empty log file.""" |
self.write(log, rotate=True)
self.write({}) |
<SYSTEM_TASK:>
Write the output of all finished processes to a compiled log file.
<END_TASK>
<USER_TASK:>
Description:
def write(self, log, rotate=False):
"""Write the output of all finished processes to a compiled log file.""" |
# Get path for logfile
if rotate:
timestamp = time.strftime('-%Y%m%d-%H%M')
logPath = os.path.join(self.log_dir, 'queue{}.log'.format(timestamp))
else:
logPath = os.path.join(self.log_dir, 'queue.log')
# Remove existing Log
if os.path.exists(logPath):
os.remove(logPath)
log_file = open(logPath, 'w')
log_file.write('Pueue log for executed Commands: \n \n')
# Format, color and write log
for key, logentry in log.items():
if logentry.get('returncode') is not None:
try:
# Get returncode color:
returncode = logentry['returncode']
if returncode == 0:
returncode = Color('{autogreen}' + '{}'.format(returncode) + '{/autogreen}')
else:
returncode = Color('{autored}' + '{}'.format(returncode) + '{/autored}')
# Write command id with returncode and actual command
log_file.write(
Color('{autoyellow}' + 'Command #{} '.format(key) + '{/autoyellow}') +
'exited with returncode {}: \n'.format(returncode) +
'"{}" \n'.format(logentry['command'])
)
# Write path
log_file.write('Path: {} \n'.format(logentry['path']))
# Write times
log_file.write('Start: {}, End: {} \n'
.format(logentry['start'], logentry['end']))
# Write STDERR
if logentry['stderr']:
log_file.write(Color('{autored}Stderr output: {/autored}\n ') + logentry['stderr'])
# Write STDOUT
if len(logentry['stdout']) > 0:
log_file.write(Color('{autogreen}Stdout output: {/autogreen}\n ') + logentry['stdout'])
log_file.write('\n')
except Exception as a:
print('Failed while writing to log file. Wrong file permissions?')
print('Exception: {}'.format(str(a)))
log_file.close() |
<SYSTEM_TASK:>
Remove all logs which are older than the specified time.
<END_TASK>
<USER_TASK:>
Description:
def remove_old(self, max_log_time):
"""Remove all logs which are older than the specified time.""" |
files = glob.glob('{}/queue-*'.format(self.log_dir))
files = list(map(lambda x: os.path.basename(x), files))
for log_file in files:
# Get time stamp from filename
name = os.path.splitext(log_file)[0]
timestamp = name.split('-', maxsplit=1)[1]
# Get datetime from time stamp
time = datetime.strptime(timestamp, '%Y%m%d-%H%M')
now = datetime.now()
# Get total delta in seconds
delta = now - time
seconds = delta.total_seconds()
# Delete log file, if the delta is bigger than the specified log time
if seconds > int(max_log_time):
log_filePath = os.path.join(self.log_dir, log_file)
os.remove(log_filePath) |
<SYSTEM_TASK:>
Get MediaFireHashInfo structure from the fd, unit_size
<END_TASK>
<USER_TASK:>
Description:
def compute_hash_info(fd, unit_size=None):
"""Get MediaFireHashInfo structure from the fd, unit_size
fd -- file descriptor - expects exclusive access because of seeking
unit_size -- size of a single unit
Returns MediaFireHashInfo:
hi.file -- sha256 of the whole file
hi.units -- list of sha256 hashes for each unit
""" |
logger.debug("compute_hash_info(%s, unit_size=%s)", fd, unit_size)
fd.seek(0, os.SEEK_END)
file_size = fd.tell()
fd.seek(0, os.SEEK_SET)
units = []
unit_counter = 0
file_hash = hashlib.sha256()
unit_hash = hashlib.sha256()
for chunk in iter(lambda: fd.read(HASH_CHUNK_SIZE_BYTES), b''):
file_hash.update(chunk)
unit_hash.update(chunk)
unit_counter += len(chunk)
if unit_size is not None and unit_counter == unit_size:
# flush the current unit hash
units.append(unit_hash.hexdigest().lower())
unit_counter = 0
unit_hash = hashlib.sha256()
if unit_size is not None and unit_counter > 0:
# leftover block
units.append(unit_hash.hexdigest().lower())
fd.seek(0, os.SEEK_SET)
return MediaFireHashInfo(
file=file_hash.hexdigest().lower(),
units=units,
size=file_size
) |
<SYSTEM_TASK:>
Upload file, returns UploadResult object
<END_TASK>
<USER_TASK:>
Description:
def upload(self, fd, name=None, folder_key=None, filedrop_key=None,
path=None, action_on_duplicate=None):
"""Upload file, returns UploadResult object
fd -- file-like object to upload from, expects exclusive access
name -- file name
folder_key -- folderkey of the target folder
path -- path to file relative to folder_key
filedrop_key -- filedrop to use instead of folder_key
action_on_duplicate -- skip, keep, replace
""" |
# Get file handle content length in the most reliable way
fd.seek(0, os.SEEK_END)
size = fd.tell()
fd.seek(0, os.SEEK_SET)
if size > UPLOAD_SIMPLE_LIMIT_BYTES:
resumable = True
else:
resumable = False
logger.debug("Calculating checksum")
hash_info = compute_hash_info(fd)
if hash_info.size != size:
# Has the file changed beween computing the hash
# and calling upload()?
raise ValueError("hash_info.size mismatch")
upload_info = _UploadInfo(fd=fd, name=name, folder_key=folder_key,
hash_info=hash_info, size=size, path=path,
filedrop_key=filedrop_key,
action_on_duplicate=action_on_duplicate)
# Check whether file is present
check_result = self._upload_check(upload_info, resumable)
upload_result = None
upload_func = None
folder_key = check_result.get('folder_key', None)
if folder_key is not None:
# We know precisely what folder_key to use, drop path
upload_info.folder_key = folder_key
upload_info.path = None
if check_result['hash_exists'] == 'yes':
# file exists somewhere in MediaFire
if check_result['in_folder'] == 'yes' and \
check_result['file_exists'] == 'yes':
# file exists in this directory
different_hash = check_result.get('different_hash', 'no')
if different_hash == 'no':
# file is already there
upload_func = self._upload_none
if not upload_func:
# different hash or in other folder
upload_func = self._upload_instant
if not upload_func:
if resumable:
resumable_upload_info = check_result['resumable_upload']
upload_info.hash_info = compute_hash_info(
fd, int(resumable_upload_info['unit_size']))
upload_func = self._upload_resumable
else:
upload_func = self._upload_simple
# Retry retriable exceptions
retries = UPLOAD_RETRY_COUNT
while retries > 0:
try:
# Provide check_result to avoid calling API twice
upload_result = upload_func(upload_info, check_result)
except (RetriableUploadError, MediaFireConnectionError):
retries -= 1
logger.exception("%s failed (%d retries left)",
upload_func.__name__, retries)
# Refresh check_result for next iteration
check_result = self._upload_check(upload_info, resumable)
except Exception:
logger.exception("%s failed", upload_func)
break
else:
break
if upload_result is None:
raise UploadError("Upload failed")
return upload_result |
<SYSTEM_TASK:>
Poll upload until quickkey is found
<END_TASK>
<USER_TASK:>
Description:
def _poll_upload(self, upload_key, action):
"""Poll upload until quickkey is found
upload_key -- upload_key returned by upload/* functions
""" |
if len(upload_key) != UPLOAD_KEY_LENGTH:
# not a regular 11-char-long upload key
# There is no API to poll filedrop uploads
return UploadResult(
action=action,
quickkey=None,
hash_=None,
filename=None,
size=None,
created=None,
revision=None
)
quick_key = None
while quick_key is None:
poll_result = self._api.upload_poll(upload_key)
doupload = poll_result['doupload']
logger.debug("poll(%s): status=%d, description=%s, filename=%s,"
" result=%d",
upload_key, int(doupload['status']),
doupload['description'], doupload['filename'],
int(doupload['result']))
if int(doupload['result']) != 0:
break
if doupload['fileerror'] != '':
# TODO: we may have to handle this a bit more dramatically
logger.warning("poll(%s): fileerror=%d", upload_key,
int(doupload['fileerror']))
break
if int(doupload['status']) == STATUS_NO_MORE_REQUESTS:
quick_key = doupload['quickkey']
elif int(doupload['status']) == STATUS_UPLOAD_IN_PROGRESS:
# BUG: http://forum.mediafiredev.com/showthread.php?588
raise RetriableUploadError(
"Invalid state transition ({})".format(
doupload['description']
)
)
else:
time.sleep(UPLOAD_POLL_INTERVAL)
return UploadResult(
action=action,
quickkey=doupload['quickkey'],
hash_=doupload['hash'],
filename=doupload['filename'],
size=doupload['size'],
created=doupload['created'],
revision=doupload['revision']
) |
<SYSTEM_TASK:>
Dummy upload function for when we don't actually upload
<END_TASK>
<USER_TASK:>
Description:
def _upload_none(self, upload_info, check_result):
"""Dummy upload function for when we don't actually upload""" |
return UploadResult(
action=None,
quickkey=check_result['duplicate_quickkey'],
hash_=upload_info.hash_info.file,
filename=upload_info.name,
size=upload_info.size,
created=None,
revision=None
) |
<SYSTEM_TASK:>
Instant upload and return quickkey
<END_TASK>
<USER_TASK:>
Description:
def _upload_instant(self, upload_info, _=None):
"""Instant upload and return quickkey
Can be used when the file is already stored somewhere in MediaFire
upload_info -- UploadInfo object
check_result -- ignored
""" |
result = self._api.upload_instant(
upload_info.name,
upload_info.size,
upload_info.hash_info.file,
path=upload_info.path,
folder_key=upload_info.folder_key,
filedrop_key=upload_info.filedrop_key,
action_on_duplicate=upload_info.action_on_duplicate
)
return UploadResult(
action='upload/instant',
quickkey=result['quickkey'],
filename=result['filename'],
revision=result['new_device_revision'],
hash_=upload_info.hash_info.file,
size=upload_info.size,
created=None
) |
<SYSTEM_TASK:>
Simple upload and return quickkey
<END_TASK>
<USER_TASK:>
Description:
def _upload_simple(self, upload_info, _=None):
"""Simple upload and return quickkey
Can be used for small files smaller than UPLOAD_SIMPLE_LIMIT_BYTES
upload_info -- UploadInfo object
check_result -- ignored
""" |
upload_result = self._api.upload_simple(
upload_info.fd,
upload_info.name,
folder_key=upload_info.folder_key,
filedrop_key=upload_info.filedrop_key,
path=upload_info.path,
file_size=upload_info.size,
file_hash=upload_info.hash_info.file,
action_on_duplicate=upload_info.action_on_duplicate)
logger.debug("upload_result: %s", upload_result)
upload_key = upload_result['doupload']['key']
return self._poll_upload(upload_key, 'upload/simple') |
<SYSTEM_TASK:>
Prepare and upload all resumable units and return upload_key
<END_TASK>
<USER_TASK:>
Description:
def _upload_resumable_all(self, upload_info, bitmap,
number_of_units, unit_size):
"""Prepare and upload all resumable units and return upload_key
upload_info -- UploadInfo object
bitmap -- bitmap node of upload/check
number_of_units -- number of units requested
unit_size -- size of a single upload unit in bytes
""" |
fd = upload_info.fd
upload_key = None
for unit_id in range(number_of_units):
upload_status = decode_resumable_upload_bitmap(
bitmap, number_of_units)
if upload_status[unit_id]:
logger.debug("Skipping unit %d/%d - already uploaded",
unit_id + 1, number_of_units)
continue
logger.debug("Uploading unit %d/%d",
unit_id + 1, number_of_units)
offset = unit_id * unit_size
with SubsetIO(fd, offset, unit_size) as unit_fd:
unit_info = _UploadUnitInfo(
upload_info=upload_info,
hash_=upload_info.hash_info.units[unit_id],
fd=unit_fd,
uid=unit_id)
upload_result = self._upload_resumable_unit(unit_info)
# upload_key is needed for polling
if upload_key is None:
upload_key = upload_result['doupload']['key']
return upload_key |
<SYSTEM_TASK:>
Remove from sys.modules the modules imported by the debuggee.
<END_TASK>
<USER_TASK:>
Description:
def reset(self):
"""Remove from sys.modules the modules imported by the debuggee.""" |
if not self.hooked:
self.hooked = True
sys.path_hooks.append(self)
sys.path.insert(0, self.PATH_ENTRY)
return
for modname in self:
if modname in sys.modules:
del sys.modules[modname]
submods = []
for subm in sys.modules:
if subm.startswith(modname + '.'):
submods.append(subm)
# All submodules of modname may not have been imported by the
# debuggee, but they are still removed from sys.modules as
# there is no way to distinguish them.
for subm in submods:
del sys.modules[subm]
self[:] = [] |
<SYSTEM_TASK:>
Get the actual breakpoint line number.
<END_TASK>
<USER_TASK:>
Description:
def get_actual_bp(self, lineno):
"""Get the actual breakpoint line number.
When an exact match cannot be found in the lnotab expansion of the
module code object or one of its subcodes, pick up the next valid
statement line number.
Return the statement line defined by the tuple (code firstlineno,
statement line number) which is at the shortest distance to line
'lineno' and greater or equal to 'lineno'. When 'lineno' is the first
line number of a subcode, use its first statement line instead.
""" |
def _distance(code, module_level=False):
"""The shortest distance to the next valid statement."""
subcodes = dict((c.co_firstlineno, c) for c in code.co_consts
if isinstance(c, types.CodeType) and not
c.co_name.startswith('<'))
# Get the shortest distance to the subcode whose first line number
# is the last to be less or equal to lineno. That is, find the
# index of the first subcode whose first_lno is the first to be
# strictly greater than lineno.
subcode_dist = None
subcodes_flnos = sorted(subcodes)
idx = bisect(subcodes_flnos, lineno)
if idx != 0:
flno = subcodes_flnos[idx-1]
subcode_dist = _distance(subcodes[flno])
# Check if lineno is a valid statement line number in the current
# code, excluding function or method definition lines.
code_lnos = sorted(code_line_numbers(code))
# Do not stop at execution of function definitions.
if not module_level and len(code_lnos) > 1:
code_lnos = code_lnos[1:]
if lineno in code_lnos and lineno not in subcodes_flnos:
return 0, (code.co_firstlineno, lineno)
# Compute the distance to the next valid statement in this code.
idx = bisect(code_lnos, lineno)
if idx == len(code_lnos):
# lineno is greater that all 'code' line numbers.
return subcode_dist
actual_lno = code_lnos[idx]
dist = actual_lno - lineno
if subcode_dist and subcode_dist[0] < dist:
return subcode_dist
if actual_lno not in subcodes_flnos:
return dist, (code.co_firstlineno, actual_lno)
else:
# The actual line number is the line number of the first
# statement of the subcode following lineno (recursively).
return _distance(subcodes[actual_lno])
if self.code:
code_dist = _distance(self.code, module_level=True)
if not self.code or not code_dist:
raise BdbSourceError('{}: line {} is after the last '
'valid statement.'.format(self.filename, lineno))
return code_dist[1] |
<SYSTEM_TASK:>
Return the list of breakpoints set at lineno.
<END_TASK>
<USER_TASK:>
Description:
def get_breakpoints(self, lineno):
"""Return the list of breakpoints set at lineno.""" |
try:
firstlineno, actual_lno = self.bdb_module.get_actual_bp(lineno)
except BdbSourceError:
return []
if firstlineno not in self:
return []
code_bps = self[firstlineno]
if actual_lno not in code_bps:
return []
return [bp for bp in sorted(code_bps[actual_lno],
key=attrgetter('number')) if bp.line == lineno] |
<SYSTEM_TASK:>
Set or remove the trace function.
<END_TASK>
<USER_TASK:>
Description:
def settrace(self, do_set):
"""Set or remove the trace function.""" |
if do_set:
sys.settrace(self.trace_dispatch)
else:
sys.settrace(None) |
<SYSTEM_TASK:>
Restart the debugger after source code changes.
<END_TASK>
<USER_TASK:>
Description:
def restart(self):
"""Restart the debugger after source code changes.""" |
_module_finder.reset()
linecache.checkcache()
for module_bpts in self.breakpoints.values():
module_bpts.reset() |
<SYSTEM_TASK:>
Stop when the current line number in frame is greater than lineno or
<END_TASK>
<USER_TASK:>
Description:
def set_until(self, frame, lineno=None):
"""Stop when the current line number in frame is greater than lineno or
when returning from frame.""" |
if lineno is None:
lineno = frame.f_lineno + 1
self._set_stopinfo(frame, lineno) |
<SYSTEM_TASK:>
Start debugging from `frame`.
<END_TASK>
<USER_TASK:>
Description:
def set_trace(self, frame=None):
"""Start debugging from `frame`.
If frame is not specified, debugging starts from caller's frame.
""" |
# First disable tracing temporarily as set_trace() may be called while
# tracing is in use. For example when called from a signal handler and
# within a debugging session started with runcall().
self.settrace(False)
if not frame:
frame = sys._getframe().f_back
frame.f_trace = self.trace_dispatch
# Do not change botframe when the debuggee has been started from an
# instance of Pdb with one of the family of run methods.
self.reset(ignore_first_call_event=False, botframe=self.botframe)
self.topframe = frame
while frame:
if frame is self.botframe:
break
botframe = frame
frame = frame.f_back
else:
self.botframe = botframe
# Must trace the bottom frame to disable tracing on termination,
# see issue 13044.
if not self.botframe.f_trace:
self.botframe.f_trace = self.trace_dispatch
self.settrace(True) |
<SYSTEM_TASK:>
Returns list of nested files and directories for local directory by path
<END_TASK>
<USER_TASK:>
Description:
def listdir(directory):
"""Returns list of nested files and directories for local directory by path
:param directory: absolute or relative path to local directory
:return: list nested of file or directory names
""" |
file_names = list()
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
if os.path.isdir(file_path):
filename = f'{filename}{os.path.sep}'
file_names.append(filename)
return file_names |
<SYSTEM_TASK:>
Extract options for specified option type from all options
<END_TASK>
<USER_TASK:>
Description:
def get_options(option_type, from_options):
"""Extract options for specified option type from all options
:param option_type: the object of specified type of options
:param from_options: all options dictionary
:return: the dictionary of options for specified type, each option can be filled by value from all options
dictionary or blank in case the option for specified type is not exist in all options dictionary
""" |
_options = dict()
for key in option_type.keys:
key_with_prefix = f'{option_type.prefix}{key}'
if key not in from_options and key_with_prefix not in from_options:
_options[key] = ''
elif key in from_options:
_options[key] = from_options.get(key)
else:
_options[key] = from_options.get(key_with_prefix)
return _options |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.