Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
5,000 |
def _get_allocations(self):
utils.ensure_dir(TMP_DIR)
try:
with open(self._state_file_path, 'r') as allocations_file:
contents = allocations_file.read()
except __HOLE__:
contents = None
# If the file was empty, we want to return an empty set, not {''}
return set(contents.split(',')) if contents else set()
|
IOError
|
dataset/ETHPy150Open openstack/neutron/neutron/tests/common/exclusive_resources/resource_allocator.py/ResourceAllocator._get_allocations
|
5,001 |
def fit_point_cloud(src_pts, tgt_pts, rotate=True, translate=True,
scale=0, x0=None, leastsq_args={}, out='params'):
"""Find a transform that minimizes the squared distance from each source
point to its closest target point
Uses :func:`scipy.optimize.leastsq` to find a transformation involving
a combination of rotation, translation, and scaling (in that order).
Parameters
----------
src_pts : array, shape = (n, 3)
Points to which the transform should be applied.
tgt_pts : array, shape = (m, 3)
Points to which src_pts should be fitted. Each point in tgt_pts should
correspond to the point in src_pts with the same index.
rotate : bool
Allow rotation of the ``src_pts``.
translate : bool
Allow translation of the ``src_pts``.
scale : 0 | 1 | 3
Number of scaling parameters. With 0, points are not scaled. With 1,
points are scaled by the same factor along all axes. With 3, points are
scaled by a separate factor along each axis.
x0 : None | tuple
Initial values for the fit parameters.
leastsq_args : dict
Additional parameters to submit to :func:`scipy.optimize.leastsq`.
out : 'params' | 'trans'
In what format to return the estimate: 'params' returns a tuple with
the fit parameters; 'trans' returns a transformation matrix of shape
(4, 4).
Returns
-------
x : array, shape = (n_params, )
Estimated parameters for the transformation.
Notes
-----
Assumes that the target points form a dense enough point cloud so that
the distance of each src_pt to the closest tgt_pt can be used as an
estimate of the distance of src_pt to tgt_pts.
"""
from scipy.optimize import leastsq
kwargs = {'epsfcn': 0.01}
kwargs.update(leastsq_args)
# assert correct argument types
src_pts = np.atleast_2d(src_pts)
tgt_pts = np.atleast_2d(tgt_pts)
translate = bool(translate)
rotate = bool(rotate)
scale = int(scale)
if translate:
src_pts = np.hstack((src_pts, np.ones((len(src_pts), 1))))
try:
from sklearn.neighbors import BallTree
tgt_pts = BallTree(tgt_pts)
errfunc = _point_cloud_error_balltree
except __HOLE__:
warn("Sklearn could not be imported. Fitting points will be slower. "
"To improve performance, install the sklearn module.")
errfunc = _point_cloud_error
# for efficiency, define parameter specific error function
param_info = (rotate, translate, scale)
if param_info == (True, False, 0):
x0 = x0 or (0, 0, 0)
def error(x):
rx, ry, rz = x
trans = rotation3d(rx, ry, rz)
est = dot(src_pts, trans.T)
err = errfunc(est, tgt_pts)
return err
elif param_info == (True, False, 1):
x0 = x0 or (0, 0, 0, 1)
def error(x):
rx, ry, rz, s = x
trans = rotation3d(rx, ry, rz) * s
est = dot(src_pts, trans.T)
err = errfunc(est, tgt_pts)
return err
elif param_info == (True, False, 3):
x0 = x0 or (0, 0, 0, 1, 1, 1)
def error(x):
rx, ry, rz, sx, sy, sz = x
trans = rotation3d(rx, ry, rz) * [sx, sy, sz]
est = dot(src_pts, trans.T)
err = errfunc(est, tgt_pts)
return err
elif param_info == (True, True, 0):
x0 = x0 or (0, 0, 0, 0, 0, 0)
def error(x):
rx, ry, rz, tx, ty, tz = x
trans = dot(translation(tx, ty, tz), rotation(rx, ry, rz))
est = dot(src_pts, trans.T)
err = errfunc(est[:, :3], tgt_pts)
return err
else:
raise NotImplementedError(
"The specified parameter combination is not implemented: "
"rotate=%r, translate=%r, scale=%r" % param_info)
est, _, info, msg, _ = leastsq(error, x0, full_output=True, **kwargs)
logger.debug("fit_point_cloud leastsq (%i calls) info: %s", info['nfev'],
msg)
if out == 'params':
return est
elif out == 'trans':
return _trans_from_params(param_info, est)
else:
raise ValueError("Invalid out parameter: %r. Needs to be 'params' or "
"'trans'." % out)
|
ImportError
|
dataset/ETHPy150Open mne-tools/mne-python/mne/coreg.py/fit_point_cloud
|
5,002 |
def resolveBoundary( self, boundarySpecifier, property, client, value ):
"""Resolve a particular boundary specifier into a boundary class"""
try:
return latebind.bind( boundarySpecifier )
except (ImportError, __HOLE__):
raise BoundaryTypeError(
property, self, client, value,
"Class/type %s could not be imported"""%(boundarySpecifier,)
)
|
AttributeError
|
dataset/ETHPy150Open correl/Transmission-XBMC/resources/lib/basictypes/boundary.py/Type.resolveBoundary
|
5,003 |
def __init__(self, data, subject, **kwargs):
if isinstance(data, str):
import nibabel
nib = nibabel.load(data)
data = nib.get_data().T
self._data = data
try:
basestring
except __HOLE__:
subject = subject if isinstance(subject, str) else subject.decode('utf-8')
self.subject = subject
super(BrainData, self).__init__(**kwargs)
|
NameError
|
dataset/ETHPy150Open gallantlab/pycortex/cortex/dataset/braindata.py/BrainData.__init__
|
5,004 |
def __init__(self, data, subject, xfmname, mask=None, **kwargs):
"""Three possible variables: volume, movie, vertex. Enumerated with size:
volume movie: (t, z, y, x)
volume image: (z, y, x)
linear movie: (t, v)
linear image: (v,)
"""
if self.__class__ == VolumeData:
raise TypeError('Cannot directly instantiate VolumeData objects')
super(VolumeData, self).__init__(data, subject, **kwargs)
try:
basestring
except __HOLE__:
xfmname = xfmname if isinstance(xfmname, str) else xfmname.decode('utf-8')
self.xfmname = xfmname
self._check_size(mask)
self.masked = _masker(self)
|
NameError
|
dataset/ETHPy150Open gallantlab/pycortex/cortex/dataset/braindata.py/VolumeData.__init__
|
5,005 |
def __init__(self, data, subject, **kwargs):
"""Represents `data` at each vertex on a `subject`s cortex.
`data` shape possibilities:
reg linear movie: (t, v)
reg linear image: (v,)
None: creates zero-filled VertexData
where t is the number of time points, c is colors (i.e. RGB), and v is the
number of vertices (either in both hemispheres or one hemisphere).
"""
if self.__class__ == VertexData:
raise TypeError('Cannot directly instantiate VertexData objects')
super(VertexData, self).__init__(data, subject, **kwargs)
try:
left, right = db.get_surf(self.subject, "wm")
except __HOLE__:
left, right = db.get_surf(self.subject, "fiducial")
self.llen = len(left[0])
self.rlen = len(right[0])
self._set_data(data)
|
IOError
|
dataset/ETHPy150Open gallantlab/pycortex/cortex/dataset/braindata.py/VertexData.__init__
|
5,006 |
@classmethod
def empty(cls, subject, value=0, **kwargs):
try:
left, right = db.get_surf(subject, "wm")
except __HOLE__:
left, right = db.get_surf(subject, "fiducial")
nverts = len(left[0]) + len(right[0])
return cls(np.ones((nverts,))*value, subject, **kwargs)
|
IOError
|
dataset/ETHPy150Open gallantlab/pycortex/cortex/dataset/braindata.py/VertexData.empty
|
5,007 |
@classmethod
def random(cls, subject, **kwargs):
try:
left, right = db.get_surf(subject, "wm")
except __HOLE__:
left, right = db.get_surf(subject, "fiducial")
nverts = len(left[0]) + len(right[0])
return cls(np.random.randn(nverts), subject, **kwargs)
|
IOError
|
dataset/ETHPy150Open gallantlab/pycortex/cortex/dataset/braindata.py/VertexData.random
|
5,008 |
def _hdf_write(h5, data, name="data", group="/data"):
try:
node = h5.require_dataset("%s/%s"%(group, name), data.shape, data.dtype, exact=True)
except __HOLE__:
del h5[group][name]
node = h5.create_dataset("%s/%s"%(group, name), data.shape, data.dtype, exact=True)
node[:] = data
return node
|
TypeError
|
dataset/ETHPy150Open gallantlab/pycortex/cortex/dataset/braindata.py/_hdf_write
|
5,009 |
def convert_pdf(filename, type='xml'):
commands = {'text': ['pdftotext', '-layout', filename, '-'],
'text-nolayout': ['pdftotext', filename, '-'],
'xml': ['pdftohtml', '-xml', '-stdout', filename],
'html': ['pdftohtml', '-stdout', filename]}
try:
pipe = subprocess.Popen(commands[type], stdout=subprocess.PIPE, close_fds=True).stdout
except __HOLE__ as e:
raise EnvironmentError("error running %s, missing executable? [%s]" %
' '.join(commands[type]), e)
data = pipe.read()
pipe.close()
return data
|
OSError
|
dataset/ETHPy150Open opencivicdata/pupa/pupa/utils/generic.py/convert_pdf
|
5,010 |
def _find_cached_images(session, sr_ref):
"""Return a dict(uuid=vdi_ref) representing all cached images."""
cached_images = {}
for vdi_ref, vdi_rec in _get_all_vdis_in_sr(session, sr_ref):
try:
image_id = vdi_rec['other_config']['image-id']
except __HOLE__:
continue
cached_images[image_id] = vdi_ref
return cached_images
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/vm_utils.py/_find_cached_images
|
5,011 |
def create_image(context, session, instance, name_label, image_id,
image_type):
"""Creates VDI from the image stored in the local cache. If the image
is not present in the cache, it streams it from glance.
Returns: A list of dictionaries that describe VDIs
"""
cache_images = CONF.xenserver.cache_images.lower()
# Determine if the image is cacheable
if image_type == ImageType.DISK_ISO:
cache = False
elif cache_images == 'all':
cache = True
elif cache_images == 'some':
sys_meta = utils.instance_sys_meta(instance)
try:
cache = strutils.bool_from_string(sys_meta['image_cache_in_nova'])
except __HOLE__:
cache = False
elif cache_images == 'none':
cache = False
else:
LOG.warning(_LW("Unrecognized cache_images value '%s', defaulting to"
" True"), CONF.xenserver.cache_images)
cache = True
# Fetch (and cache) the image
start_time = timeutils.utcnow()
if cache:
downloaded, vdis = _create_cached_image(context, session, instance,
name_label, image_id,
image_type)
else:
vdis = _fetch_image(context, session, instance, name_label,
image_id, image_type)
downloaded = True
duration = timeutils.delta_seconds(start_time, timeutils.utcnow())
LOG.info(_LI("Image creation data, cacheable: %(cache)s, "
"downloaded: %(downloaded)s duration: %(duration).2f secs "
"for image %(image_id)s"),
{'image_id': image_id, 'cache': cache, 'downloaded': downloaded,
'duration': duration})
for vdi_type, vdi in six.iteritems(vdis):
vdi_ref = session.call_xenapi('VDI.get_by_uuid', vdi['uuid'])
_set_vdi_info(session, vdi_ref, vdi_type, name_label, vdi_type,
instance)
return vdis
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/vm_utils.py/create_image
|
5,012 |
def _image_uses_bittorrent(context, instance):
bittorrent = False
torrent_images = CONF.xenserver.torrent_images.lower()
if torrent_images == 'all':
bittorrent = True
elif torrent_images == 'some':
sys_meta = utils.instance_sys_meta(instance)
try:
bittorrent = strutils.bool_from_string(
sys_meta['image_bittorrent'])
except __HOLE__:
pass
elif torrent_images == 'none':
pass
else:
LOG.warning(_LW("Invalid value '%s' for torrent_images"),
torrent_images)
return bittorrent
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/vm_utils.py/_image_uses_bittorrent
|
5,013 |
def _fetch_disk_image(context, session, instance, name_label, image_id,
image_type):
"""Fetch the image from Glance
NOTE:
Unlike _fetch_vhd_image, this method does not use the Glance
plugin; instead, it streams the disks through domU to the VDI
directly.
Returns: A single filename if image_type is KERNEL_RAMDISK
A list of dictionaries that describe VDIs, otherwise
"""
# FIXME(sirp): Since the Glance plugin seems to be required for the
# VHD disk, it may be worth using the plugin for both VHD and RAW and
# DISK restores
image_type_str = ImageType.to_string(image_type)
LOG.debug("Fetching image %(image_id)s, type %(image_type_str)s",
{'image_id': image_id, 'image_type_str': image_type_str},
instance=instance)
if image_type == ImageType.DISK_ISO:
sr_ref = _safe_find_iso_sr(session)
else:
sr_ref = safe_find_sr(session)
glance_image = image_utils.GlanceImage(context, image_id)
if glance_image.is_raw_tgz():
image = image_utils.RawTGZImage(glance_image)
else:
image = image_utils.RawImage(glance_image)
virtual_size = image.get_size()
vdi_size = virtual_size
LOG.debug("Size for image %(image_id)s: %(virtual_size)d",
{'image_id': image_id, 'virtual_size': virtual_size},
instance=instance)
if image_type == ImageType.DISK:
# Make room for MBR.
vdi_size += MBR_SIZE_BYTES
elif (image_type in (ImageType.KERNEL, ImageType.RAMDISK) and
vdi_size > CONF.xenserver.max_kernel_ramdisk_size):
max_size = CONF.xenserver.max_kernel_ramdisk_size
raise exception.NovaException(
_("Kernel/Ramdisk image is too large: %(vdi_size)d bytes, "
"max %(max_size)d bytes") %
{'vdi_size': vdi_size, 'max_size': max_size})
vdi_ref = create_vdi(session, sr_ref, instance, name_label,
image_type_str, vdi_size)
# From this point we have a VDI on Xen host;
# If anything goes wrong, we need to remember its uuid.
try:
filename = None
vdi_uuid = session.call_xenapi("VDI.get_uuid", vdi_ref)
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
_stream_disk(
session, image.stream_to, image_type, virtual_size, dev)
if image_type in (ImageType.KERNEL, ImageType.RAMDISK):
# We need to invoke a plugin for copying the
# content of the VDI into the proper path.
LOG.debug("Copying VDI %s to /boot/guest on dom0",
vdi_ref, instance=instance)
args = {}
args['vdi-ref'] = vdi_ref
# Let the plugin copy the correct number of bytes.
args['image-size'] = str(vdi_size)
if CONF.xenserver.cache_images:
args['cached-image'] = image_id
filename = session.call_plugin('kernel', 'copy_vdi', args)
# Remove the VDI as it is not needed anymore.
destroy_vdi(session, vdi_ref)
LOG.debug("Kernel/Ramdisk VDI %s destroyed", vdi_ref,
instance=instance)
vdi_role = ImageType.get_role(image_type)
return {vdi_role: dict(uuid=None, file=filename)}
else:
vdi_role = ImageType.get_role(image_type)
return {vdi_role: dict(uuid=vdi_uuid, file=None)}
except (session.XenAPI.Failure, __HOLE__, OSError) as e:
# We look for XenAPI and OS failures.
LOG.exception(_LE("Failed to fetch glance image"),
instance=instance)
e.args = e.args + ([dict(type=ImageType.to_string(image_type),
uuid=vdi_uuid,
file=filename)],)
raise
|
IOError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/vm_utils.py/_fetch_disk_image
|
5,014 |
def determine_disk_image_type(image_meta):
"""Disk Image Types are used to determine where the kernel will reside
within an image. To figure out which type we're dealing with, we use
the following rules:
1. If we're using Glance, we can use the image_type field to
determine the image_type
2. If we're not using Glance, then we need to deduce this based on
whether a kernel_id is specified.
"""
if not image_meta.obj_attr_is_set("disk_format"):
return None
disk_format_map = {
'ami': ImageType.DISK,
'aki': ImageType.KERNEL,
'ari': ImageType.RAMDISK,
'raw': ImageType.DISK_RAW,
'vhd': ImageType.DISK_VHD,
'iso': ImageType.DISK_ISO,
}
try:
image_type = disk_format_map[image_meta.disk_format]
except __HOLE__:
raise exception.InvalidDiskFormat(disk_format=image_meta.disk_format)
LOG.debug("Detected %(type)s format for image %(image)s",
{'type': ImageType.to_string(image_type),
'image': image_meta})
return image_type
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/vm_utils.py/determine_disk_image_type
|
5,015 |
def _find_sr(session):
"""Return the storage repository to hold VM images."""
host = session.host_ref
try:
tokens = CONF.xenserver.sr_matching_filter.split(':')
filter_criteria = tokens[0]
filter_pattern = tokens[1]
except __HOLE__:
# oops, flag is invalid
LOG.warning(_LW("Flag sr_matching_filter '%s' does not respect "
"formatting convention"),
CONF.xenserver.sr_matching_filter)
return None
if filter_criteria == 'other-config':
key, value = filter_pattern.split('=', 1)
for sr_ref, sr_rec in session.get_all_refs_and_recs('SR'):
if not (key in sr_rec['other_config'] and
sr_rec['other_config'][key] == value):
continue
for pbd_ref in sr_rec['PBDs']:
pbd_rec = session.get_rec('PBD', pbd_ref)
if pbd_rec and pbd_rec['host'] == host:
return sr_ref
elif filter_criteria == 'default-sr' and filter_pattern == 'true':
pool_ref = session.call_xenapi('pool.get_all')[0]
sr_ref = session.call_xenapi('pool.get_default_SR', pool_ref)
if sr_ref:
return sr_ref
# No SR found!
LOG.error(_LE("XenAPI is unable to find a Storage Repository to "
"install guest instances on. Please check your "
"configuration (e.g. set a default SR for the pool) "
"and/or configure the flag 'sr_matching_filter'."))
return None
|
IndexError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/vm_utils.py/_find_sr
|
5,016 |
def _get_rrd(server, vm_uuid):
"""Return the VM RRD XML as a string."""
try:
xml = urllib.urlopen("%s://%s:%s@%s/vm_rrd?uuid=%s" % (
server[0],
CONF.xenserver.connection_username,
CONF.xenserver.connection_password,
server[1],
vm_uuid))
return xml.read()
except __HOLE__:
LOG.exception(_LE('Unable to obtain RRD XML for VM %(vm_uuid)s with '
'server details: %(server)s.'),
{'vm_uuid': vm_uuid, 'server': server})
return None
|
IOError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/vm_utils.py/_get_rrd
|
5,017 |
def get_this_vm_uuid(session):
if session and session.is_local_connection:
# UUID is the control domain running on this host
vms = session.call_xenapi("VM.get_all_records_where",
'field "is_control_domain"="true" and '
'field "resident_on"="%s"' %
session.host_ref)
return vms[list(vms.keys())[0]]['uuid']
try:
return _get_sys_hypervisor_uuid()
except __HOLE__:
# Some guest kernels (without 5c13f8067745efc15f6ad0158b58d57c44104c25)
# cannot read from uuid after a reboot. Fall back to trying xenstore.
# See https://bugs.launchpad.net/ubuntu/+source/xen-api/+bug/1081182
domid, _ = utils.execute('xenstore-read', 'domid', run_as_root=True)
vm_key, _ = utils.execute('xenstore-read',
'/local/domain/%s/vm' % domid.strip(),
run_as_root=True)
return vm_key.strip()[4:]
|
IOError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/vm_utils.py/get_this_vm_uuid
|
5,018 |
def input(self, datum, *a, **kw):
if datum == NOTHING:
return
try:
output, newstate = self.states[self.state][datum]
except __HOLE__:
self.invalidInput(datum)
else:
OLDSTATE = self.state.upper()
NEWSTATE = newstate.upper()
DATUM = datum.upper()
self.transition(OLDSTATE, NEWSTATE, DATUM, *a, **kw)
self.output(output, *a, **kw)
|
KeyError
|
dataset/ETHPy150Open twisted/vertex/vertex/statemachine.py/StateMachine.input
|
5,019 |
def _find_element_by_class_name(self, class_name, index_or_name):
elements = self._find_elements_by_class_name(class_name)
if self._is_index(index_or_name):
try:
index = int(index_or_name.split('=')[-1])
element = elements[index]
except (IndexError, __HOLE__):
raise Exception, 'Cannot find the element with index "%s"' % index_or_name
else:
found = False
for element in elements:
self._info("'%s'." % element.text)
if element.text == index_or_name:
found = True
break
if not found:
raise Exception, 'Cannot find the element with name "%s"' % index_or_name
return element
|
TypeError
|
dataset/ETHPy150Open jollychang/robotframework-appiumlibrary/src/AppiumLibrary/keywords/_element.py/_ElementKeywords._find_element_by_class_name
|
5,020 |
def main(app):
target_dir = os.path.join(app.builder.srcdir, 'book_figures')
source_dir = os.path.abspath(app.builder.srcdir + '/../' + 'book_figures')
try:
plot_gallery = eval(app.builder.config.plot_gallery)
except __HOLE__:
plot_gallery = bool(app.builder.config.plot_gallery)
if not os.path.exists(source_dir):
os.makedirs(source_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
EB = ExampleBuilder(source_dir, target_dir,
execute_files=plot_gallery,
contents_file='contents.txt',
dir_info_file='README.rst',
sphinx_tag_base='book_fig',
template_example=RST_TEMPLATE)
EB.run()
|
TypeError
|
dataset/ETHPy150Open cigroup-ol/windml/doc/sphinxext/gen_figure_rst.py/main
|
5,021 |
def is_classic_task(tup):
"""
Takes (name, object) tuple, returns True if it's a non-Fab public callable.
"""
name, func = tup
try:
is_classic = (
callable(func) and (func not in _internals) and not
name.startswith('_')
)
# Handle poorly behaved __eq__ implementations
except (__HOLE__, TypeError):
is_classic = False
return is_classic
|
ValueError
|
dataset/ETHPy150Open prestodb/presto-admin/prestoadmin/main.py/is_classic_task
|
5,022 |
def run_tasks(task_list):
for name, args, kwargs, arg_hosts, arg_roles, arg_excl_hosts in task_list:
try:
if state.env.nodeps and name.strip() != 'package.install':
sys.stderr.write('Invalid argument --nodeps to task: %s\n'
% name)
display_command(name, 2)
return execute(
name,
hosts=state.env.hosts,
roles=arg_roles,
exclude_hosts=state.env.exclude_hosts,
*args, **kwargs
)
except __HOLE__ as e:
if is_arguments_error(e):
print("Incorrect number of arguments to task.\n")
_LOGGER.error('Incorrect number of arguments to task',
exc_info=True)
display_command(name, 2)
else:
raise
except BaseException as e:
raise
|
TypeError
|
dataset/ETHPy150Open prestodb/presto-admin/prestoadmin/main.py/run_tasks
|
5,023 |
def _handle_generic_set_env_vars(non_default_options):
if not hasattr(non_default_options, 'env_settings'):
return non_default_options
# Allow setting of arbitrary env keys.
# This comes *before* the "specific" env_options so that those may
# override these ones. Specific should override generic, if somebody
# was silly enough to specify the same key in both places.
# E.g. "fab --set shell=foo --shell=bar" should have env.shell set to
# 'bar', not 'foo'.
for pair in _escape_split(',', non_default_options.env_settings):
pair = _escape_split('=', pair)
# "--set x" => set env.x to True
# "--set x=" => set env.x to ""
key = pair[0]
value = True
if len(pair) == 2:
try:
value = _to_boolean(pair[1])
except __HOLE__:
value = pair[1]
state.env[key] = value
non_default_options_dict = vars(non_default_options)
del non_default_options_dict['env_settings']
return Values(non_default_options_dict)
|
ValueError
|
dataset/ETHPy150Open prestodb/presto-admin/prestoadmin/main.py/_handle_generic_set_env_vars
|
5,024 |
def _get_config_callback(commands_to_run):
config_callback = None
if len(commands_to_run) != 1:
raise Exception('Multiple commands are not supported')
c = commands_to_run[0][0]
module, command = c.split('.')
module_dict = state.commands[module]
command_callable = module_dict[command]
try:
config_callback = command_callable.pa_config_callback
except __HOLE__:
pass
return config_callback
|
AttributeError
|
dataset/ETHPy150Open prestodb/presto-admin/prestoadmin/main.py/_get_config_callback
|
5,025 |
def parse_and_validate_commands(args=sys.argv[1:]):
# Find local fabfile path or abort
fabfile = "prestoadmin"
# Store absolute path to fabfile in case anyone needs it
state.env.real_fabfile = fabfile
# Load fabfile (which calls its module-level code, including
# tweaks to env values) and put its commands in the shared commands
# dict
docstring, callables = load_fabfile(fabfile)
state.commands.update(callables)
# Parse command line options
parser = parser_for_options()
# Unless you pass in values, optparse fills in the default values for all
# of the options. We want to save the version of the options without
# default values, because that takes precedence over all other env vars.
non_default_options, arguments = parser.parse_args(args, values=Values())
options, arguments = parser.parse_args(args)
default_options = get_default_options(options, non_default_options)
# Handle regular args vs -- args
arguments = parser.largs
if len(parser.rargs) > 0:
warn("Arbitrary remote shell commands not supported.")
show_commands(None, 'normal', 2)
if options.extended_help:
parser.print_extended_help()
sys.exit(0)
# If user didn't specify any commands to run, show help
if not arguments:
parser.print_help()
sys.exit(0) # don't consider this an error
# Parse arguments into commands to run (plus args/kwargs/hosts)
commands_to_run = None
try:
commands_to_run = parse_arguments(arguments, state.commands)
except __HOLE__ as e:
warn(e.message)
_LOGGER.warn("Unable to parse arguments", exc_info=True)
parser.print_help()
sys.exit(2)
# Handle show (command-specific help) option
if options.display:
display_command(commands_to_run[0][0])
load_config_callback = _get_config_callback(commands_to_run)
_update_env(default_options, non_default_options, load_config_callback)
if not options.serial:
state.env.parallel = True
state.env.warn_only = False
# Initial password prompt, if requested
if options.initial_password_prompt:
prompt = "Initial value for env.password: "
state.env.password = getpass.getpass(prompt)
state.env['tasks'] = [x[0] for x in commands_to_run]
return commands_to_run
|
NameError
|
dataset/ETHPy150Open prestodb/presto-admin/prestoadmin/main.py/parse_and_validate_commands
|
5,026 |
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except __HOLE__:
return self._reverse[key]
|
KeyError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/parallel/util.py/ReverseDict.__getitem__
|
5,027 |
def get(self, key, default=None):
try:
return self[key]
except __HOLE__:
return default
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
|
KeyError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/parallel/util.py/ReverseDict.get
|
5,028 |
def validate_url(url):
"""validate a url for zeromq"""
if not isinstance(url, str):
raise TypeError("url must be a string, not %r"%type(url))
url = url.lower()
proto_addr = url.split('://')
assert len(proto_addr) == 2, 'Invalid url: %r'%url
proto, addr = proto_addr
assert proto in ['tcp','pgm','epgm','ipc','inproc'], "Invalid protocol: %r"%proto
# domain pattern adapted from http://www.regexlib.com/REDetails.aspx?regexp_id=391
# author: Remi Sabourin
pat = re.compile(r'^([\w\d]([\w\d\-]{0,61}[\w\d])?\.)*[\w\d]([\w\d\-]{0,61}[\w\d])?$')
if proto == 'tcp':
lis = addr.split(':')
assert len(lis) == 2, 'Invalid url: %r'%url
addr,s_port = lis
try:
port = int(s_port)
except __HOLE__:
raise AssertionError("Invalid port %r in url: %r"%(port, url))
assert addr == '*' or pat.match(addr) is not None, 'Invalid url: %r'%url
else:
# only validate tcp urls currently
pass
return True
|
ValueError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/parallel/util.py/validate_url
|
5,029 |
def disambiguate_ip_address(ip, location=None):
"""turn multi-ip interfaces '0.0.0.0' and '*' into connectable
ones, based on the location (default interpretation of location is localhost)."""
if ip in ('0.0.0.0', '*'):
try:
external_ips = socket.gethostbyname_ex(socket.gethostname())[2]
except (socket.gaierror, __HOLE__):
# couldn't identify this machine, assume localhost
external_ips = []
if location is None or location in external_ips or not external_ips:
# If location is unspecified or cannot be determined, assume local
ip='127.0.0.1'
elif location:
return location
return ip
|
IndexError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/parallel/util.py/disambiguate_ip_address
|
5,030 |
def disambiguate_url(url, location=None):
"""turn multi-ip interfaces '0.0.0.0' and '*' into connectable
ones, based on the location (default interpretation is localhost).
This is for zeromq urls, such as tcp://*:10101."""
try:
proto,ip,port = split_url(url)
except __HOLE__:
# probably not tcp url; could be ipc, etc.
return url
ip = disambiguate_ip_address(ip,location)
return "%s://%s:%s"%(proto,ip,port)
|
AssertionError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/parallel/util.py/disambiguate_url
|
5,031 |
def integer_loglevel(loglevel):
try:
loglevel = int(loglevel)
except __HOLE__:
if isinstance(loglevel, str):
loglevel = getattr(logging, loglevel)
return loglevel
|
ValueError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/parallel/util.py/integer_loglevel
|
5,032 |
def api_request(self, **kwargs):
from gcloud.exceptions import NotFound
self._requested.append(kwargs)
try:
return self._responses.pop(0)
except __HOLE__:
raise NotFound('miss')
|
IndexError
|
dataset/ETHPy150Open GoogleCloudPlatform/gcloud-python/gcloud/monitoring/test_metric.py/_Connection.api_request
|
5,033 |
def _get_commit_title(self):
# Check if we're inside a git checkout
try:
subp = subprocess.Popen( # nosec
['git', 'rev-parse', '--show-toplevel'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
gitdir = subp.communicate()[0].rstrip()
except __HOLE__:
# "git" was not found
return None
if not os.path.exists(gitdir):
return None
# Get title of most recent commit
subp = subprocess.Popen( # nosec
['git', 'log', '--no-merges', '--pretty=%s', '-1'],
stdout=subprocess.PIPE)
title = subp.communicate()[0]
if subp.returncode:
raise Exception("git log failed with code %s" % subp.returncode)
return title.decode('utf-8')
|
OSError
|
dataset/ETHPy150Open openstack/sahara/sahara/utils/hacking/commit_message.py/GitCheck._get_commit_title
|
5,034 |
def table_context(request,
table,
links=None,
paginate_by=None,
page=None,
extra_context=None,
context_processors=None,
paginator=None,
show_hits=False,
hit_label='Items'):
"""
:type table: Table
"""
if extra_context is None: # pragma: no cover
extra_context = {}
assert table.data is not None
grouped_links = {}
if links is not None:
links = evaluate_recursive(links, table=table)
links = [link for link in links if link.show and link.url]
grouped_links = groupby((link for link in links if link.group is not None), key=lambda l: l.group)
grouped_links = [(g, slugify(g), list(lg)) for g, lg in grouped_links] # because django templates are crap!
links = [link for link in links if link.group is None]
base_context = {
'links': links,
'grouped_links': grouped_links,
'table': table,
}
if paginate_by:
try:
paginate_by = int(request.GET.get('page_size', paginate_by))
except __HOLE__: # pragma: no cover
pass
if paginator is None:
paginator = Paginator(table.data, paginate_by)
object_list = None
else: # pragma: no cover
object_list = table.data
if not page:
page = request.GET.get('page', 1)
try:
page = int(page)
if page < 1: # pragma: no cover
page = 1
if page > paginator.num_pages: # pragma: no cover
page = paginator.num_pages
if object_list is None:
table.data = paginator.page(page).object_list
except (InvalidPage, ValueError): # pragma: no cover
if page == 1:
table.data = []
else:
raise Http404
base_context.update({
'request': request,
'is_paginated': paginator.num_pages > 1,
'results_per_page': paginate_by,
'has_next': paginator.num_pages > page,
'has_previous': page > 1,
'page_size': paginate_by,
'page': page,
'next': page + 1,
'previous': page - 1,
'pages': paginator.num_pages,
'hits': paginator.count,
'show_hits': show_hits,
'hit_label': hit_label})
else: # pragma: no cover
base_context.update({
'is_paginated': False})
base_context.update(extra_context)
return RequestContext(request, base_context, context_processors)
|
ValueError
|
dataset/ETHPy150Open TriOptima/tri.table/lib/tri/table/__init__.py/table_context
|
5,035 |
def send_message(source_jid, password, target_jid, body, subject = None,
message_type = "chat", message_thread = None, settings = None):
"""Star an XMPP session and send a message, then exit.
:Parameters:
- `source_jid`: sender JID
- `password`: sender password
- `target_jid`: recipient JID
- `body`: message body
- `subject`: message subject
- `message_type`: message type
- `message_thread`: message thread id
- `settings`: other settings
:Types:
- `source_jid`: `pyxmpp2.jid.JID` or `basestring`
- `password`: `basestring`
- `target_jid`: `pyxmpp.jid.JID` or `basestring`
- `body`: `basestring`
- `subject`: `basestring`
- `message_type`: `basestring`
- `settings`: `pyxmpp2.settings.XMPPSettings`
"""
# pylint: disable=R0913,R0912
if sys.version_info.major < 3:
# pylint: disable-msg=W0404
from locale import getpreferredencoding
encoding = getpreferredencoding()
if isinstance(source_jid, str):
source_jid = source_jid.decode(encoding)
if isinstance(password, str):
password = password.decode(encoding)
if isinstance(target_jid, str):
target_jid = target_jid.decode(encoding)
if isinstance(body, str):
body = body.decode(encoding)
if isinstance(message_type, str):
message_type = message_type.decode(encoding)
if isinstance(message_thread, str):
message_thread = message_thread.decode(encoding)
if not isinstance(source_jid, JID):
source_jid = JID(source_jid)
if not isinstance(target_jid, JID):
target_jid = JID(target_jid)
msg = Message(to_jid = target_jid, body = body, subject = subject,
stanza_type = message_type)
def action(client):
"""Send a mesage `msg` via a client."""
client.stream.send(msg)
if settings is None:
settings = XMPPSettings({"starttls": True, "tls_verify_peer": False})
if password is not None:
settings["password"] = password
handler = FireAndForget(source_jid, action, settings)
try:
handler.run()
except __HOLE__:
handler.disconnect()
raise
# vi: sts=4 et sw=4
|
KeyboardInterrupt
|
dataset/ETHPy150Open kuri65536/python-for-android/python3-alpha/python-libs/pyxmpp2/simple.py/send_message
|
5,036 |
def parse_hl_lines(expr):
"""Support our syntax for emphasizing certain lines of code.
expr should be like '1 2' to emphasize lines 1 and 2 of a code block.
Returns a list of ints, the line numbers to emphasize.
"""
if not expr:
return []
try:
return list(map(int, expr.split()))
except __HOLE__:
return []
# ------------------ The Main CodeHilite Class ----------------------
|
ValueError
|
dataset/ETHPy150Open dragondjf/QMarkdowner/markdown/extensions/codehilite.py/parse_hl_lines
|
5,037 |
def hilite(self):
"""
Pass code to the [Pygments](http://pygments.pocoo.org/) highliter with
optional line numbers. The output should then be styled with css to
your liking. No styles are applied by default - only styling hooks
(i.e.: <span class="k">).
returns : A string of html.
"""
self.src = self.src.strip('\n')
if self.lang is None:
self._parseHeader()
if pygments and self.use_pygments:
try:
lexer = get_lexer_by_name(self.lang)
except ValueError:
try:
if self.guess_lang:
lexer = guess_lexer(self.src)
else:
lexer = get_lexer_by_name('text')
except __HOLE__:
lexer = get_lexer_by_name('text')
formatter = get_formatter_by_name('html',
linenos=self.linenums,
cssclass=self.css_class,
style=self.style,
noclasses=self.noclasses,
hl_lines=self.hl_lines)
return highlight(self.src, lexer, formatter)
else:
# just escape and build markup usable by JS highlighting libs
txt = self.src.replace('&', '&')
txt = txt.replace('<', '<')
txt = txt.replace('>', '>')
txt = txt.replace('"', '"')
classes = []
if self.lang:
classes.append('language-%s' % self.lang)
if self.linenums:
classes.append('linenums')
class_str = ''
if classes:
class_str = ' class="%s"' % ' '.join(classes)
return '<pre class="%s"><code%s>%s</code></pre>\n' % \
(self.css_class, class_str, txt)
|
ValueError
|
dataset/ETHPy150Open dragondjf/QMarkdowner/markdown/extensions/codehilite.py/CodeHilite.hilite
|
5,038 |
def _parseHeader(self):
"""
Determines language of a code block from shebang line and whether said
line should be removed or left in place. If the sheband line contains a
path (even a single /) then it is assumed to be a real shebang line and
left alone. However, if no path is given (e.i.: #!python or :::python)
then it is assumed to be a mock shebang for language identifitation of
a code fragment and removed from the code block prior to processing for
code highlighting. When a mock shebang (e.i: #!python) is found, line
numbering is turned on. When colons are found in place of a shebang
(e.i.: :::python), line numbering is left in the current state - off
by default.
Also parses optional list of highlight lines, like:
:::python hl_lines="1 3"
"""
import re
# split text into lines
lines = self.src.split("\n")
# pull first line to examine
fl = lines.pop(0)
c = re.compile(r'''
(?:(?:^::+)|(?P<shebang>^[#]!)) # Shebang or 2 or more colons
(?P<path>(?:/\w+)*[/ ])? # Zero or 1 path
(?P<lang>[\w+-]*) # The language
\s* # Arbitrary whitespace
# Optional highlight lines, single- or double-quote-delimited
(hl_lines=(?P<quot>"|')(?P<hl_lines>.*?)(?P=quot))?
''', re.VERBOSE)
# search first line for shebang
m = c.search(fl)
if m:
# we have a match
try:
self.lang = m.group('lang').lower()
except __HOLE__:
self.lang = None
if m.group('path'):
# path exists - restore first line
lines.insert(0, fl)
if self.linenums is None and m.group('shebang'):
# Overridable and Shebang exists - use line numbers
self.linenums = True
self.hl_lines = parse_hl_lines(m.group('hl_lines'))
else:
# No match
lines.insert(0, fl)
self.src = "\n".join(lines).strip("\n")
# ------------------ The Markdown Extension -------------------------------
|
IndexError
|
dataset/ETHPy150Open dragondjf/QMarkdowner/markdown/extensions/codehilite.py/CodeHilite._parseHeader
|
5,039 |
def parse_authorization_header(value, charset='utf-8'):
'''Parse an HTTP basic/digest authorisation header.
:param value: the authorisation header to parse.
:return: either `None` if the header was invalid or
not given, otherwise an :class:`Auth` object.
'''
if not value:
return
try:
auth_type, auth_info = value.split(None, 1)
auth_type = auth_type.lower()
except __HOLE__:
return
if auth_type == 'basic':
try:
up = b64decode(auth_info.encode(DEFAULT_CHARSET)).decode(charset)
username, password = up.split(':', 1)
except Exception:
return
return BasicAuth(username, password)
elif auth_type == 'digest':
auth_map = parse_dict_header(auth_info)
if not digest_parameters.difference(auth_map):
return DigestAuth(auth_map.pop('username'), options=auth_map)
|
ValueError
|
dataset/ETHPy150Open quantmind/pulsar/pulsar/apps/wsgi/auth.py/parse_authorization_header
|
5,040 |
def request(self, is_idempotent=False, **kwargs):
"""
Execute an HTTP request with the current client session.
Use the retry policy configured in the client when is_idempotent is True
"""
kwargs.setdefault('timeout', self.connection_timeout)
def invoke_request():
r = self.session.request(**kwargs)
try:
r.raise_for_status()
return r
except __HOLE__:
logger.debug('%s: %s', r, r.content)
raise
if is_idempotent:
return self.retrier.call(invoke_request)
else:
return invoke_request()
|
HTTPError
|
dataset/ETHPy150Open scrapinghub/python-hubstorage/hubstorage/client.py/HubstorageClient.request
|
5,041 |
def charset__set(self, charset):
if charset is None:
del self.charset
return
try:
header = self.headers.pop('content-type')
except __HOLE__:
raise AttributeError(
"You cannot set the charset when no content-type is defined")
match = _CHARSET_RE.search(header)
if match:
header = header[:match.start()] + header[match.end():]
header += '; charset=%s' % charset
self.headers['content-type'] = header
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/wsgiwrappers.py/WSGIResponse.charset__set
|
5,042 |
def charset__del(self):
try:
header = self.headers.pop('content-type')
except __HOLE__:
# Don't need to remove anything
return
match = _CHARSET_RE.search(header)
if match:
header = header[:match.start()] + header[match.end():]
self.headers['content-type'] = header
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/wsgiwrappers.py/WSGIResponse.charset__del
|
5,043 |
def content_type__del(self):
try:
del self.headers['content-type']
except __HOLE__:
pass
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/wsgiwrappers.py/WSGIResponse.content_type__del
|
5,044 |
def init():
"""
Some ORMs, e.g. Django, may require initialization to be performed, but only at some certain
point, e.g. after some variables are set and so on, this function provides an ability to run
this initialization for all supported ORMs automatically but only when needed by the caller
"""
import os
import pkgutil
for _, name, is_pkg in pkgutil.iter_modules([os.path.dirname(__file__)]):
if is_pkg:
try:
__import__(name, level=0) # try to import globally and see if ORM is installed
except __HOLE__:
pass
else:
getattr(__import__(name, globals(), level=1), 'init', lambda: None)() # if yes, run init() for this ORM
|
ImportError
|
dataset/ETHPy150Open maxtepkeev/architect/architect/orms/__init__.py/init
|
5,045 |
def __init__(self, spec, image_size=None):
super(Transform, self).__init__(spec)
self.flag = getattr(Image, self[0].upper())
try:
axis = (None, 0, 1) + TRANSFORM_AXIS[self.flag]
except __HOLE__:
raise ValueError('unknown transform %r' % self[0])
if len(self) != len(axis):
raise ValueError('expected %d transform values; got %d' % (len(axis), len(self)))
for i in xrange(1, len(self)):
v = self[i]
if isinstance(v, basestring):
if v[-1:] in ('%', 'p'): # Percentages.
if axis[i] is None:
raise ValueError('unknown dimension for %s value %d' % (self[0], i))
if image_size is None:
raise ValueError('no image size with relative transform')
self[i] = image_size[axis[i]] * float(v[:-1]) / 100
else:
self[i] = float(v)
# Finalize the size.
if not self[1] or not self[2]:
if not image_size:
ValueError('no image size or transform size')
self[1] = int(self[1] or image_size[0])
self[2] = int(self[2] or image_size[1])
|
KeyError
|
dataset/ETHPy150Open mikeboers/Flask-Images/flask_images/transform.py/Transform.__init__
|
5,046 |
def put_state(self, request_dict):
registration = request_dict['params'].get('registration', None)
if registration:
s, created = ActivityState.objects.get_or_create(state_id=request_dict['params']['stateId'], agent=self.Agent,
activity_id=request_dict['params']['activityId'], registration_id=request_dict['params']['registration'])
else:
s, created = ActivityState.objects.get_or_create(state_id=request_dict['params']['stateId'], agent=self.Agent,
activity_id=request_dict['params']['activityId'])
if "application/json" not in request_dict['headers']['CONTENT_TYPE']:
try:
post_state = ContentFile(request_dict['state'].read())
except:
try:
post_state = ContentFile(request_dict['state'])
except:
post_state = ContentFile(str(request_dict['state']))
# If a state already existed with the profileId and activityId
if not created:
etag.check_preconditions(request_dict, s)
if s.state:
try:
s.state.delete()
except __HOLE__:
# probably was json before
s.json_state = {}
self.save_non_json_state(s, post_state, request_dict)
# State being PUT is json
else:
if not created:
etag.check_preconditions(request_dict, s)
the_state = request_dict['state']
s.json_state = the_state
s.content_type = request_dict['headers']['CONTENT_TYPE']
s.etag = etag.create_tag(the_state)
#Set updated
if 'updated' in request_dict['headers'] and request_dict['headers']['updated']:
s.updated = request_dict['headers']['updated']
else:
s.updated = datetime.datetime.utcnow().replace(tzinfo=utc)
s.save()
|
OSError
|
dataset/ETHPy150Open adlnet/ADL_LRS/lrs/managers/ActivityStateManager.py/ActivityStateManager.put_state
|
5,047 |
def status(host=DEFAULT_HOST, port=DEFAULT_PORT):
'''
Get memcached status
CLI Example:
.. code-block:: bash
salt '*' memcached.status
'''
conn = _connect(host, port)
try:
stats = _check_stats(conn)[0]
except (CommandExecutionError, __HOLE__):
return False
else:
return {stats[0]: stats[1]}
|
IndexError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/memcached.py/status
|
5,048 |
def increment(key, delta=1, host=DEFAULT_HOST, port=DEFAULT_PORT):
'''
Increment the value of a key
CLI Example:
.. code-block:: bash
salt '*' memcached.increment <key>
salt '*' memcached.increment <key> 2
'''
conn = _connect(host, port)
_check_stats(conn)
cur = get(key)
if cur is None:
raise CommandExecutionError('Key \'{0}\' does not exist'.format(key))
elif not isinstance(cur, integer_types):
raise CommandExecutionError(
'Value for key \'{0}\' must be an integer to be '
'incremented'.format(key)
)
try:
return conn.incr(key, delta)
except __HOLE__:
raise SaltInvocationError('Delta value must be an integer')
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/memcached.py/increment
|
5,049 |
def decrement(key, delta=1, host=DEFAULT_HOST, port=DEFAULT_PORT):
'''
Decrement the value of a key
CLI Example:
.. code-block:: bash
salt '*' memcached.decrement <key>
salt '*' memcached.decrement <key> 2
'''
conn = _connect(host, port)
_check_stats(conn)
cur = get(key)
if cur is None:
raise CommandExecutionError('Key \'{0}\' does not exist'.format(key))
elif not isinstance(cur, integer_types):
raise CommandExecutionError(
'Value for key \'{0}\' must be an integer to be '
'decremented'.format(key)
)
try:
return conn.decr(key, delta)
except __HOLE__:
raise SaltInvocationError('Delta value must be an integer')
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/modules/memcached.py/decrement
|
5,050 |
def info_prep(r):
"""
Preprocessor for CAP Info segments
- whether accessed via /eden/info or /eden/alert/x/info
"""
if s3.debug:
s3.scripts.append("/%s/static/scripts/S3/s3.cap.js" % appname)
else:
s3.scripts.append("/%s/static/scripts/S3/s3.cap.min.js" % appname)
s3.stylesheets.append("S3/cap.css")
table = db.cap_info
post_vars = request.post_vars
template_id = None
if post_vars.get("language", False):
if r.tablename == "cap_info":
# cap/info controller
try:
template_id = db(table.id == r.id).select(table.template_info_id,
limitby=(0, 1)
).first().template_info_id
except __HOLE__, KeyError:
pass
elif r.component_name == "info":
# cap/x/info component tab
try:
template_id = r.component.get_id()
# this will error out if component is not yet saved
except:
pass
if template_id:
# Read template and copy locked fields to post_vars
template = db(table.id == template_id).select(limitby=(0, 1)).first()
settings = json.loads(template.template_settings)
if isinstance(settings.get("locked", False), dict):
locked_fields = [lf for lf in settings["locked"] if settings["locked"]]
for lf in locked_fields:
post_vars[lf] = template[lf]
return True
# -----------------------------------------------------------------------------
|
AttributeError
|
dataset/ETHPy150Open sahana/eden/controllers/cap.py/info_prep
|
5,051 |
def alert():
""" REST controller for CAP Alerts and Components """
tablename = "cap_alert"
def prep(r):
from s3 import S3OptionsFilter
itable = s3db.cap_info
rows = db(itable.expires < request.utcnow).select(itable.id,
orderby=itable.id)
if rows:
expired_ids = ",".join([str(row.id) for row in rows])
else:
expired_ids = "*"
rows = db(itable.expires >= request.utcnow).select(itable.id,
orderby=itable.id)
if rows:
unexpired_ids = ",".join([str(row.id) for row in rows])
else:
unexpired_ids = "*"
filter_widgets = s3db.get_config(tablename, "filter_widgets")
filter_widgets.insert(0, S3OptionsFilter("info.id",
label = T("Expiration"),
options = OrderedDict(
[(expired_ids, T("Expired")),
(unexpired_ids, T("Unexpired")),
("*", T("All")),
]),
cols = 3,
multiple = False,
))
# No need to put them back - the edit happens in-place
#s3db.configure(tablename,
# filter_widgets = filter_widgets,
# )
if r.representation == "dl":
# DataList: match list_layout
list_fields = ["msg_type",
"info.headline",
"area.name",
"info.priority",
"status",
"scope",
"info.event_type_id",
"info.certainty",
"info.severity",
"info.urgency",
"info.sender_name",
"sent",
]
s3db.configure(tablename,
list_fields = list_fields,
)
elif r.representation == "json":
# @ToDo: fix JSON representation's ability to use component list_fields
list_fields = ["id",
"identifier",
"msg_type",
"sender",
"sent",
"scope",
"status",
"template_id",
"restriction",
"info.description",
"info.category",
"info.certainty",
"info.effective",
"info.event_type_id",
"info.event_type_id$name",
"info.expires",
"info.headline",
"info.onset",
"info.priority",
"info.response_type",
"info.severity",
"info.urgency",
"area.name",
]
s3db.configure(tablename,
list_fields = list_fields,
)
#elif r.representation == "cap":
# # This is either importing from or exporting to cap format. Set both
# # postprocessing hooks so we don't have to enumerate methods.
# s3db.configure("gis_location",
# xml_post_parse = s3db.cap_gis_location_xml_post_parse,
# xml_post_render = s3db.cap_gis_location_xml_post_render,
# )
if r.id:
if r.record.is_template:
redirect(URL(c="cap", f="template",
args = request.args,
vars = request.vars))
if r.record.approved_by is not None:
# Once approved, don't allow to edit
# Don't allow to delete
s3db.configure(tablename,
editable=False,
deletable=False,
insertable=False,
)
if r.record.reference is not None:
# Don't show template_id for Updated/Cancelled/Error/Relay Alert
r.table.template_id.readable = False
r.table.template_id.writable = False
if settings.get_cap_restrict_fields():
if r.record.msg_type in ("Update", "Cancel", "Error"):
# Use case for change in msg_type
atable = r.table
for f in ("template_id",
"sender",
"status",
"msg_type",
"source",
"scope",
"restriction",
"addresses",
"codes",
"note",
"reference",
"incidents",
):
atable[f].writable = False
else:
r.resource.add_filter(r.table.is_template == False)
s3.formats["cap"] = r.url() # .have added by JS
if r.interactive:
if not r.component:
if r.method == "profile":
# Provide a nice display of the Alert details
# Hide the side menu
current.menu.options = None
# Header
record = r.record
profile_header = DIV(SPAN(SPAN("%s :: " % T("Message ID"),
_class="cap-label upper"
),
SPAN(record.identifier,
_class="cap-strong"
),
_class="medium-6 columns",
),
SPAN(SPAN("%s :: " % T("Source"),
_class="cap-label upper"
),
SPAN(record.source,
_class="cap-strong"
),
_class="medium-6 columns",
),
_class="row"
)
# Read the Components
alert_id = record.id
# Info
# @ToDo: handle multiple languages
itable = s3db.cap_info
info = db(itable.alert_id == alert_id).select(itable.language,
itable.category,
itable.event_type_id,
itable.response_type,
itable.urgency,
itable.severity,
itable.certainty,
itable.audience,
itable.effective,
itable.onset,
itable.expires,
itable.sender_name,
itable.headline,
itable.description,
itable.instruction,
itable.contact,
itable.web,
itable.parameter,
limitby=(0, 1)
).first()
# Area
# @ToDo: handle multiple areas
atable = s3db.cap_area
area = db(atable.alert_id == alert_id).select(atable.name,
limitby=(0, 1)).first()
# Map
ftable = s3db.gis_layer_feature
if auth.s3_logged_in():
fn = "alert"
else:
fn = "public"
query = (ftable.controller == "cap") & \
(ftable.function == fn)
layer = db(query).select(ftable.layer_id,
limitby=(0, 1)
).first()
try:
layer = dict(active = True,
layer_id = layer.layer_id,
filter = "~.id=%s" % alert_id,
name = record.identifier,
id = "profile-header-%s-%s" % (tablename, alert_id),
)
except:
# No suitable prepop found
layer = None
# Location
# @ToDo: Support multiple Locations
gtable = db.gis_location
ltable = db.cap_area_location
query = (ltable.alert_id == alert_id) & \
(ltable.location_id == gtable.id)
location = db(query).select(gtable.lat_max,
gtable.lon_max,
gtable.lat_min,
gtable.lon_min,
limitby=(0, 1)).first()
if location:
bbox = {"lat_max" : location.lat_max,
"lon_max" : location.lon_max,
"lat_min" : location.lat_min,
"lon_min" : location.lon_min
}
else:
# Default bounds
bbox = {}
label = TAG[""](SPAN("%s :: " % T("Area"),
_class="cap-label upper"
),
SPAN(area.name,
_class="cap-value"
),
)
map_widget = dict(label = label,
type = "map",
#context = "alert",
icon = "icon-map",
#height = 383,
#width = 568,
bbox = bbox,
)
table = r.table
def custom_widget_fn_1(r, **attr):
return DIV(DIV(SPAN("%s :: " % T("Headline"),
_class="cap-label upper"
),
SPAN(info.headline,
_class="cap-value"
),
),
DIV(" "),
DIV(SPAN("%s :: " % T("Description"),
_class="cap-label upper"
),
SPAN(info.description,
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Response Type"),
_class="cap-label upper"
),
SPAN(" ".join(info.response_type) if info.response_type is not None else None,
_class="cap-strong"
),
),
DIV(SPAN("%s :: " % T("Instructions"),
_class="cap-label upper"
),
SPAN(info.instruction,
_class="cap-value"
),
),
)
custom_widget_1 = dict(type = "custom",
fn = custom_widget_fn_1,
)
def custom_widget_fn_2(r, **attr):
return DIV(DIV(SPAN("%s " % T("Information"),
_class="cap-value upper"
),
SPAN("%s :: " % T("Event"),
_class="cap-label upper"
),
SPAN(itable.event_type_id.represent(info.event_type_id),
_class="cap-strong"
),
),
DIV(_class="cap-label underline"
),
DIV(SPAN("%s :: " % T("Language"),
_class="cap-label"
),
SPAN(itable.language.represent(info.language),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Category"),
_class="cap-label"
),
SPAN(itable.category.represent(info.category),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Urgency"),
_class="cap-label"
),
SPAN(itable.urgency.represent(info.urgency),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Severity"),
_class="cap-label"
),
SPAN(itable.severity.represent(info.severity),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Certainty"),
_class="cap-label"
),
SPAN(itable.certainty.represent(info.certainty),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Audience"),
_class="cap-label"
),
SPAN(info.audience,
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Effective Date"),
_class="cap-label"
),
SPAN(itable.effective.represent(info.effective),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Onset Date"),
_class="cap-label"
),
SPAN(itable.onset.represent(info.onset),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Expiry Date"),
_class="cap-label"
),
SPAN(itable.expires.represent(info.expires),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Sender"),
_class="cap-label"
),
SPAN(info.sender_name,
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Information URL"),
_class="cap-label"
),
SPAN(info.web,
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Contact Info"),
_class="cap-label"
),
SPAN(info.contact,
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Parameters"),
_class="cap-label"
),
SPAN(itable.parameter.represent(info.parameter),
_class="cap-value"
),
),
)
custom_widget_2 = dict(type = "custom",
fn = custom_widget_fn_2,
)
def custom_widget_fn_3(r, **attr):
return DIV(DIV(SPAN(T("Alert Qualifiers"),
_class="cap-value upper"
),
),
DIV(_class="underline"
),
DIV(SPAN("%s :: " % T("Sender ID"),
_class="cap-label"
),
SPAN(record.sender,
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Sent Date/Time"),
_class="cap-label"
),
SPAN(table.sent.represent(record.sent),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Message Status"),
_class="cap-label"
),
SPAN(table.status.represent(record.status),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Message Type"),
_class="cap-label"
),
SPAN(table.msg_type.represent(record.msg_type),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Scope"),
_class="cap-label"
),
SPAN(table.scope.represent(record.scope),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Handling Code"),
_class="cap-label"
),
SPAN(table.codes.represent(record.codes),
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Note"),
_class="cap-label"
),
SPAN(record.note,
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Reference ID"),
_class="cap-label"
),
SPAN(record.reference,
_class="cap-value"
),
),
DIV(SPAN("%s :: " % T("Incident IDs"),
_class="cap-label"
),
SPAN(table.incidents.represent(record.incidents),
_class="cap-value"
),
),
DIV(_class="underline"
),
DIV(SPAN(T("Resources"),
_class="cap-value upper"
),
),
)
custom_widget_3 = dict(type = "custom",
fn = custom_widget_fn_3,
)
s3db.configure(tablename,
profile_header = profile_header,
profile_layers = (layer,),
profile_widgets = (custom_widget_1,
map_widget,
custom_widget_2,
custom_widget_3,
),
)
response.s3.stylesheets.append("../themes/default/cap.css")
elif r.method == "assign":
translate = settings.get_L10n_translate_cap_area()
if translate:
if session.s3.language == settings.get_L10n_default_language():
translate = False
if translate:
# Represent each row with local name if available
from s3 import S3Represent
atable = s3db.cap_area
cap_area_options = cap_AreaRowOptionsBuilder(r.id,
caller=r.method)
atable.name.represent = S3Represent(options=cap_area_options)
elif r.method != "import" and not get_vars.get("_next"):
s3.crud.submit_style = "hide"
s3.crud.custom_submit = (("edit_info",
T("Save and edit information"),
"button small",
),)
elif r.component_name == "info":
itable = r.component.table
# Do not show this as overwritten in onaccept
itable.web.readable = False
itable.web.writable = False
alert_id = request.args(0)
# Check for prepopulate
if alert_id:
atable = r.table
itable.web.default = settings.get_base_public_url()+\
URL(c="cap", f="alert", args=alert_id)
row = db(atable.id == alert_id).select(atable.event_type_id,
limitby=(0, 1)).first()
itable.event_type_id.default = row.event_type_id
if r.record.approved_by is not None:
# Once approved, don't allow info segment to edit
# Don't allow to delete
s3db.configure("cap_info",
deletable = False,
editable = False,
insertable = False,
)
if settings.get_cap_restrict_fields():
if r.record.msg_type in ("Update", "Cancel", "Error"):
# Use case for change in msg_type
for f in ("language",
"category",
"event",
"event_type_id",
"audience",
"event_code",
"sender_name",
"parameter",
):
itable[f].writable = False
elif r.component_name == "area":
atable = r.component.table
list_fields = ["name",
"altitude",
"ceiling",
"location.location_id",
]
s3db.configure("cap_area",
list_fields = list_fields,
)
for f in ("event_type_id", "priority"):
# Do not show for the actual area
field = atable[f]
field.writable = field.readable = False
translate = settings.get_L10n_translate_cap_area()
if translate:
if session.s3.language == settings.get_L10n_default_language():
translate = False
if translate:
# Represent each row with local name if available
from s3 import S3Represent
cap_area_options = cap_AreaRowOptionsBuilder(r.id)
atable.name.represent = S3Represent(options=cap_area_options)
if r.record.approved_by is not None:
# Once approved, don't allow area segment to edit
# Don't allow to delete
s3db.configure("cap_area",
deletable = False,
editable = False,
insertable = False,
)
elif r.component_name == "resource":
if r.record.approved_by is not None:
# Once approved, don't allow resource segment to edit
# Don't allow to delete
s3db.configure("cap_resource",
deletable = False,
editable = False,
insertable = False,
)
# @ToDo: Move inside correct component context (None?)
post_vars = request.post_vars
if post_vars.get("edit_info", False):
tid = post_vars["template_id"]
if tid:
# Read template and copy locked fields to post_vars
table = db.cap_alert
template = db(table.id == tid).select(table.template_settings,
limitby=(0, 1)).first()
try:
tsettings = json.loads(template.template_settings)
except __HOLE__:
tsettings = dict()
if isinstance(tsettings.get("locked", False), dict):
locked_fields = [lf for lf in tsettings["locked"] if tsettings["locked"]]
for lf in locked_fields:
post_vars[lf] = template[lf]
info_prep(r)
return True
s3.prep = prep
def postp(r, output):
# Check to see if "Save and add information" was pressed
lastid = r.resource.lastid
if lastid and request.post_vars.get("edit_info", False):
table = db.cap_alert
itable = s3db.cap_info
alert = db(table.id == lastid).select(table.template_id,
limitby=(0, 1)).first()
iquery = (itable.alert_id == alert.template_id) & \
(itable.deleted != True)
irows = db(iquery).select(itable.id)
iquery_ = (itable.alert_id == lastid) & \
(itable.deleted != True)
irows_ = db(iquery_).select(itable.template_info_id)
if alert and not \
(set([irow.id for irow in irows]) == set([irow_.template_info_id for irow_ in irows_])):
# Clone all cap_info entries from the alert template
# If already created dont copy again
unwanted_fields = set(("deleted_rb",
"owned_by_user",
"approved_by",
"mci",
"deleted",
"modified_on",
"realm_entity",
"uuid",
"created_on",
"deleted_fk",
# Don't copy this: make an
# Ajax call instead
"template_settings",
))
fields = [itable[f] for f in itable.fields
if f not in unwanted_fields]
rows = db(itable.alert_id == alert.template_id).select(*fields)
for row in rows:
row_clone = row.as_dict()
del row_clone["id"]
row_clone["alert_id"] = lastid
row_clone["template_info_id"] = row.id
row_clone["is_template"] = False
row_clone["effective"] = request.utcnow
row_clone["expires"] = s3db.cap_expiry_date()
row_clone["sender_name"] = s3db.cap_sender_name()
itable.insert(**row_clone)
# Clone all cap_resource entries from the alert template
# First get the info_id
rows = db(itable.alert_id == lastid).select(itable.id)
rtable = s3db.cap_resource
r_unwanted_fields = set(s3base.s3_all_meta_field_names())
rfields = [rtable[f] for f in rtable.fields
if f not in r_unwanted_fields]
rows_ = db(rtable.alert_id == alert.template_id).select(*rfields)
for row in rows_:
row_clone = row.as_dict()
del row_clone["id"]
row_clone["alert_id"] = lastid
row_clone["is_template"] = False
rtable.insert(**row_clone)
rows = db(itable.alert_id == lastid).select(itable.id)
if len(rows) == 1:
r.next = URL(c="cap", f="alert", args=[lastid, "info", rows.first().id, "update"])
elif len(rows) > 1:
r.next = URL(c="cap", f="alert", args=[lastid, "info"])
else:
r.next = URL(c="cap", f="alert", args=[lastid, "info", "create"])
if r.interactive:
if get_vars.get("_next"):
r.next = get_vars.get("_next")
if isinstance(output, dict) and "form" in output:
if not r.component and \
r.method not in ("import", "import_feed", "profile"):
form = output["form"]
form.update(_class="cap_alert_form")
set_priority_js()
elif r.representation == "plain":
# Map Popup: style like the dataList
list_fields = ["info.headline",
"area.name",
"info.priority",
"status",
"scope",
"info.event_type_id",
"info.description",
"info.response_type",
"info.sender_name",
]
record = r.resource.select(list_fields,
as_rows=True,
#represent=True,
#show_links=False,
).first()
output = s3db.cap_alert_list_layout("map_popup", # list_id
"map_popup", # item_id
None, #r.resource,
None, # rfields
record
)
return output
s3.postp = postp
output = s3_rest_controller("cap", "alert",
rheader = s3db.cap_rheader,
)
return output
# -----------------------------------------------------------------------------
|
ValueError
|
dataset/ETHPy150Open sahana/eden/controllers/cap.py/alert
|
5,052 |
def notify_approver():
"""
Send message to the people with role of Alert Approval
"""
if settings.has_module("msg"):
# Notify People with the role of Alert Approval via email and SMS
alert_id = get_vars.get("cap_alert.id")
atable = s3db.cap_alert
if not alert_id and not auth.s3_has_permission("update", atable,
record_id=alert_id):
auth.permission.fail()
row = db(atable.id == alert_id).select(atable.approved_by,
limitby=(0, 1)).first()
if not row.approved_by:
# Get the user ids for the role alert_approver
agtable = db.auth_group
group_row = db(agtable.role == "Alert Approver").select(\
agtable.id,
limitby=(0, 1)).first()
if group_row:
user_pe_id = auth.s3_user_pe_id
user_ids = auth.s3_group_members(group_row.id) # List of user_ids
pe_ids = [] # List of pe_ids
pe_append = pe_ids.append
for user_id in user_ids:
pe_append(user_pe_id(int(user_id)))
subject = "%s: Alert Approval Required" % settings.get_system_name_short()
url = "%s%s" % (settings.get_base_public_url(),
URL(c="cap", f="alert", args=[alert_id, "review"]))
message = "You are requested to take action on this alert:\n\n%s" % url
msg.send_by_pe_id(pe_ids, subject, message)
try:
msg.send_by_pe_id(pe_ids, subject, message, contact_method = "SMS")
except __HOLE__:
current.log.error("No SMS Handler defined!")
session.confirmation = T("Alert Approval Notified")
else:
session.error = T("Alert already approved")
redirect(URL(c="cap", f="alert"))
# -----------------------------------------------------------------------------
|
ValueError
|
dataset/ETHPy150Open sahana/eden/controllers/cap.py/notify_approver
|
5,053 |
@classmethod
def setupClass(cls):
global numpy
try:
import numpy.linalg
import scipy.sparse
except __HOLE__:
raise SkipTest('SciPy not available.')
|
ImportError
|
dataset/ETHPy150Open networkx/networkx/networkx/linalg/tests/test_algebraic_connectivity.py/TestAlgebraicConnectivity.setupClass
|
5,054 |
@classmethod
def setupClass(cls):
global numpy
try:
import numpy.linalg
import scipy.sparse
except __HOLE__:
raise SkipTest('SciPy not available.')
|
ImportError
|
dataset/ETHPy150Open networkx/networkx/networkx/linalg/tests/test_algebraic_connectivity.py/TestSpectralOrdering.setupClass
|
5,055 |
@defer.inlineCallbacks
def getPath(netstatus, guard_manager, exit_request=None, fast=True,
stable=True, internal=False):
# Raises:
# - NoUsableGuardsException (i.e. we need to find a new guard)
# - PathSelectionFailedException
consensus = yield netstatus.getMicroconsensus()
descriptors = yield netstatus.getMicrodescriptorsForCircuit()
guards = yield guard_manager.getUsableGuards()
cons_rel_stats = consensus.routers
cons_bw_weights = consensus.bandwidth_weights
port = exit_request.port if exit_request else None
cons_bwweightscale = DEFAULT_BWWEIGHTSCALE
try:
exit_fprint = selectExitNode(cons_bw_weights, cons_bwweightscale,
cons_rel_stats, descriptors, fast, stable,
internal, port)
exit_status_entry = consensus.routers[exit_fprint]
exit_desc = descriptors[exit_status_entry.digest]
guard_fprint = selectGuardNode(cons_rel_stats, descriptors, guards,
fast, stable, exit_desc,
exit_status_entry)
guard_status_entry = consensus.routers[guard_fprint]
guard_desc = descriptors[guard_status_entry.digest]
middle_fprint = selectMiddleNode(cons_bw_weights, cons_bwweightscale,
cons_rel_stats, descriptors, fast,
stable, exit_desc, exit_status_entry,
guard_desc, guard_status_entry)
middle_status_entry = consensus.routers[middle_fprint]
middle_desc = descriptors[middle_status_entry.digest]
path = Path(PathNode(guard_desc, guard_status_entry),
PathNode(middle_desc, middle_status_entry),
PathNode(exit_desc, exit_status_entry))
defer.returnValue(path)
except __HOLE__ as e:
raise PathSelectionFailedException("Unable to select a valid path. "
"Reason: {}".format(e))
|
ValueError
|
dataset/ETHPy150Open nskinkel/oppy/oppy/path/path.py/getPath
|
5,056 |
def selectGuardNode(cons_rel_stats, descriptors, guards, fast, stable,
exit_desc, exit_status_entry):
try:
guard_candidates = [g for g in guards
if guardFilter(g, cons_rel_stats, descriptors,
fast, stable, exit_desc,
exit_status_entry)]
return random.choice(guard_candidates)
except __HOLE__:
raise NoUsableGuardsException("No usable guard nodes for requested "
"path.")
|
IndexError
|
dataset/ETHPy150Open nskinkel/oppy/oppy/path/path.py/selectGuardNode
|
5,057 |
def exitFilter(exit_fprint, cons_rel_stats, descriptors, fast, stable,
internal, port):
try:
rel_stat = cons_rel_stats[exit_fprint]
desc = descriptors[rel_stat.digest]
except __HOLE__:
return False
if desc.ntor_onion_key is None:
return False
if Flag.BADEXIT in rel_stat.flags:
return False
if Flag.RUNNING not in rel_stat.flags:
return False
if Flag.VALID not in rel_stat.flags:
return False
if (fast is True) and (Flag.FAST not in rel_stat.flags):
return False
if (stable is True) and (Flag.STABLE not in rel_stat.flags):
return False
# we don't care about the exit policy if exit is for an internal circuit
if internal is True:
return True
elif port is not None:
return desc.exit_policy.can_exit_to(port=port)
else:
return desc.exit_policy.is_exiting_allowed
|
KeyError
|
dataset/ETHPy150Open nskinkel/oppy/oppy/path/path.py/exitFilter
|
5,058 |
def guardFilter(guard, cons_rel_stats, descriptors, fast, stable, exit_desc,
exit_status_entry):
try:
rel_stat = cons_rel_stats[guard]
guard_desc = descriptors[rel_stat.digest]
except __HOLE__:
return False
if (fast is True) and (Flag.FAST not in rel_stat.flags):
return False
if (stable is True) and (Flag.STABLE not in rel_stat.flags):
return False
return path_util.nodeUsableWithOther(guard_desc, rel_stat,
exit_desc, exit_status_entry)
|
KeyError
|
dataset/ETHPy150Open nskinkel/oppy/oppy/path/path.py/guardFilter
|
5,059 |
def middleFilter(node, cons_rel_stats, descriptors, exit_desc,
exit_status_entry, guard_desc, guard_status_entry,
fast=False, stable=False):
try:
rel_stat = cons_rel_stats[node]
node_desc = descriptors[rel_stat.digest]
except __HOLE__:
return False
if node_desc.ntor_onion_key is None:
return False
# Note that we intentionally allow non-Valid routers for middle
# as per path-spec.txt default config
if Flag.RUNNING not in rel_stat.flags:
return False
if (fast is True) and (Flag.FAST not in rel_stat.flags):
return False
if (stable is True) and (Flag.STABLE not in rel_stat.flags):
return False
if path_util.nodeUsableWithOther(exit_desc, exit_status_entry, node_desc,
rel_stat) is False:
return False
return path_util.nodeUsableWithOther(guard_desc, guard_status_entry,
node_desc, rel_stat)
|
KeyError
|
dataset/ETHPy150Open nskinkel/oppy/oppy/path/path.py/middleFilter
|
5,060 |
def coerce( self, value ):
"""Coerce the value to one of our types"""
if self.check( value ):
return value
best = self.bestMatch( value )
if best is not None:
return best.coerce( value )
else:
err = None
for typ in self:
try:
return typ.coerce( value )
except (ValueError,__HOLE__), err:
pass
raise TypeError( """Couldn't convert %r value to any of %r (%s)"""%(
value, tuple(self), err
))
|
TypeError
|
dataset/ETHPy150Open correl/Transmission-XBMC/resources/lib/basictypes/typeunion.py/TypeUnion.coerce
|
5,061 |
def call_with_stack(self, glob, start, stack):
old_sep = self.next.separator
self.next.separator = self.separator
self.next.call(glob, start)
self.next.separator = old_sep
while stack:
path = stack.pop()
try:
entries = os.listdir(path)
except __HOLE__:
continue
for ent in entries:
full = self.path_join(path, ent)
if isdir(full) and (self.allow_dots() or ent[0] != "."):
stack.append(full)
self.next.call(glob, full)
|
OSError
|
dataset/ETHPy150Open topazproject/topaz/topaz/utils/glob.py/RecursiveDirectories.call_with_stack
|
5,062 |
def call(self, glob, path):
if path and not os.path.exists(path + "/."):
return
try:
entries = [".", ".."] + os.listdir(path if path else ".")
except __HOLE__:
return
for ent in entries:
if self.ismatch(glob.cache, ent):
glob.append_match(self.path_join(path, ent))
|
OSError
|
dataset/ETHPy150Open topazproject/topaz/topaz/utils/glob.py/EntryMatch.call
|
5,063 |
def test(_platform=None):
failed = False
libs = 'cublas cusparse cufft curand nvvm'.split()
for lib in libs:
path = get_cudalib(lib, _platform)
print('Finding', lib)
if path:
print('\tlocated at', path)
else:
print('\tERROR: can\'t locate lib')
failed = True
if not failed and _platform in (None, sys.platform):
try:
print('\ttrying to open library', end='...')
open_cudalib(lib, ccc=True)
print('\tok')
except __HOLE__ as e:
print('\tERROR: failed to open %s:\n%s' % (lib, e))
# NOTE: ignore failure of dlopen on cuBlas on OSX 10.5
failed = True if not _if_osx_10_5() else False
archs = 'compute_20', 'compute_30', 'compute_35'
for arch in archs:
print('\tfinding libdevice for', arch, end='...')
path = get_libdevice(arch)
if path:
print('\tok')
else:
print('\tERROR: can\'t open libdevice for %s' % arch)
failed = True
return not failed
|
OSError
|
dataset/ETHPy150Open numba/numba/numba/cuda/cudadrv/libs.py/test
|
5,064 |
def _load_module(self, module):
try:
importlib.import_module(module)
except __HOLE__:
raise ImportError("Failed to import hook module '%s'. "
"Verify it exists in PYTHONPATH" % (module))
|
ImportError
|
dataset/ETHPy150Open softlayer/jumpgate/jumpgate/common/hooks/__init__.py/APIHooks.__APIHooks._load_module
|
5,065 |
def __new__(mcs, cls, bases, attrs):
def make_test(fname):
fpath = osp.join(mcs.dpath, fname)
module = osp.splitext(fname)[0]
def test(self):
try:
load_source(module, fpath)
except __HOLE__:
# Unmet dependency.
raise SkipTest
test.__name__ = 'test_%s' % (module, )
test.__doc__ = 'Test for example %s.' % (fpath, )
return test
for fname in os.listdir(mcs.dpath):
if osp.splitext(fname)[1] == '.py':
test = make_test(fname)
attrs[test.__name__] = test
return super(_ExamplesType, mcs).__new__(mcs, cls, bases, attrs)
|
ImportError
|
dataset/ETHPy150Open mtth/hdfs/test/test_examples.py/_ExamplesType.__new__
|
5,066 |
@lib.api_call
def rotate(self, rate):
"""
Pass (angular) rate as -100 to +100
(positive is counterclockwise).
"""
# NOTE(Vijay): to be tested
# Validate params
self.logger.debug("Rotating with angular rate: {}".format(rate))
try:
assert OmniDriver.min_angular_rate <= rate <= \
OmniDriver.max_angular_rate
except __HOLE__:
raise AssertionError("Angular rate is out of bounds")
self.set_motor("north", -rate)
self.set_motor("south", rate)
self.set_motor("west", -rate)
self.set_motor("east", rate)
|
AssertionError
|
dataset/ETHPy150Open IEEERobotics/bot/bot/driver/omni_driver.py/OmniDriver.rotate
|
5,067 |
@lib.api_call
def move(self, speed, angle=0):
"""Move holonomically without rotation.
:param speed: Magnitude of robot's translation speed (% of max).
:type speed: float
:param angle: Angle of translation in degrees (90=left, 270=right).
:type angle: float
"""
# TODO(Vijay): Test this functionality
# Validate params
self.logger.debug("speed: {}, angle: {}".format(speed, angle))
try:
assert OmniDriver.min_speed <= speed <= OmniDriver.max_speed
except AssertionError:
raise AssertionError("Speed is out of bounds")
# Angle bounds may be unnecessary
try:
assert OmniDriver.min_angle <= angle <= OmniDriver.max_angle
except __HOLE__:
raise AssertionError("Angle is out of bounds")
# Handle zero speed, prevent divide-by-zero error
if speed == 0: # TODO deadband (epsilon) check?
self.logger.debug("Special case for speed == 0")
self.set_motor("north", 0)
self.set_motor("south", 0)
self.set_motor("east", 0)
self.set_motor("west", 0)
return
# Calculate motor speeds
north = speed * -sin(angle * pi / 180)
south = speed * -sin(angle * pi / 180)
west = speed * cos(angle * pi / 180)
east = speed * cos(angle * pi / 180)
self.logger.debug((
"pre-scale : north: {:6.2f}, south: {:6.2f},"
" east: {:6.2f}, west: {:6.2f}").format(
north, south, east, west))
# Find largest motor speed,
# use that to normalize multipliers and maintain maximum efficiency
max_wheel_speed = max(
[fabs(north), fabs(south),
fabs(east), fabs(west)]
)
north = north * speed / max_wheel_speed
south = south * speed / max_wheel_speed
west = west * speed / max_wheel_speed
east = east * speed / max_wheel_speed
self.logger.debug(
("post-scale: north: {:6.2f}, south: {:6.2f},"
" west: {:6.2f}, east: {:6.2f}").format(
north, south, west, east))
# Set motor speeds
self.set_motor("north", north)
self.set_motor("south", south)
self.set_motor("west", west)
self.set_motor("east", east)
# @lib.api_call
# NOTE(Vijay): Not yet implemented
|
AssertionError
|
dataset/ETHPy150Open IEEERobotics/bot/bot/driver/omni_driver.py/OmniDriver.move
|
5,068 |
def add(self, command):
"""
Adds a command object.
If a command with the same name already exists, it will be overridden.
:param command: A Command object or a dictionary defining the command
:type command: Command or dict
:return: The registered command
:rtype: Command
"""
if isinstance(command, dict):
command = Command.from_dict(command)
command.set_application(self)
if not command.is_enabled():
command.set_application(None)
return
try:
command.get_definition()
except __HOLE__:
raise Exception(
'Command class "%s" is not correctly initialized.'
'You probably forgot to call the parent constructor.'
% command.__class__.__name__
)
self._commands[command.get_name()] = command
for alias in command.get_aliases():
self._commands[alias] = command
return command
|
AttributeError
|
dataset/ETHPy150Open sdispater/cleo/cleo/application.py/Application.add
|
5,069 |
def configure_io(self, input_, output_):
"""
Configures the input and output instances based on the user arguments and options.
:param input_: An Input instance
:type input_: Input
:param output_: An Output instance
:type output_: Output
"""
if input_.has_parameter_option('--ansi'):
output_.set_decorated(True)
elif input_.has_parameter_option('--no-ansi'):
output_.set_decorated(False)
if input_.has_parameter_option(['--no-interaction', '-n']):
input_.set_interactive(False)
elif self.get_helper_set().has('question'):
input_stream = self.get_helper_set().get('question').input_stream
try:
is_atty = hasattr(input_stream, 'fileno')
if hasattr(input_stream, 'isatty'):
is_atty = is_atty and input_stream.isatty()
else:
is_atty = is_atty and os.isatty(input_stream)
except (UnsupportedOperation, __HOLE__):
is_atty = False
if not is_atty:
input_.set_interactive(False)
if input_.has_parameter_option(['--quiet', '-q']):
output_.set_verbosity(Output.VERBOSITY_QUIET)
elif input_.has_parameter_option('-vvv')\
or input_.has_parameter_option('--verbose=3')\
or input_.get_parameter_option('--verbose') == 3:
output_.set_verbosity(Output.VERBOSITY_DEBUG)
elif input_.has_parameter_option('-vv')\
or input_.has_parameter_option('--verbose=2')\
or input_.get_parameter_option('--verbose') == 2:
output_.set_verbosity(Output.VERBOSITY_VERY_VERBOSE)
elif input_.has_parameter_option('-v')\
or input_.has_parameter_option('--verbose=1')\
or input_.get_parameter_option('--verbose') == 1:
output_.set_verbosity(Output.VERBOSITY_VERBOSE)
|
TypeError
|
dataset/ETHPy150Open sdispater/cleo/cleo/application.py/Application.configure_io
|
5,070 |
def create(vm_):
'''
Create a single VM from a data dict
'''
try:
# Check for required profile parameters before sending any API calls.
if vm_['profile'] and config.is_profile_configured(__opts__,
__active_provider_name__ or 'azure',
vm_['profile'],
vm_=vm_) is False:
return False
except __HOLE__:
pass
# Since using "provider: <provider-engine>" is deprecated, alias provider
# to use driver: "driver: <provider-engine>"
if 'provider' in vm_:
vm_['driver'] = vm_.pop('provider')
salt.utils.cloud.fire_event(
'event',
'starting create',
'salt/cloud/{0}/creating'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['driver'],
},
transport=__opts__['transport']
)
log.info('Creating Cloud VM {0}'.format(vm_['name']))
conn = get_conn()
label = vm_.get('label', vm_['name'])
service_name = vm_.get('service_name', vm_['name'])
service_kwargs = {
'service_name': service_name,
'label': label,
'description': vm_.get('desc', vm_['name']),
}
loc_error = False
if 'location' in vm_:
if 'affinity_group' in vm_:
loc_error = True
else:
service_kwargs['location'] = vm_['location']
elif 'affinity_group' in vm_:
service_kwargs['affinity_group'] = vm_['affinity_group']
else:
loc_error = True
if loc_error:
raise SaltCloudSystemExit(
'Either a location or affinity group must be specified, but not both'
)
ssh_port = config.get_cloud_config_value('port', vm_, __opts__,
default='22', search_global=True)
ssh_endpoint = azure.servicemanagement.ConfigurationSetInputEndpoint(
name='SSH',
protocol='TCP',
port=ssh_port,
local_port='22',
)
network_config = azure.servicemanagement.ConfigurationSet()
network_config.input_endpoints.input_endpoints.append(ssh_endpoint)
network_config.configuration_set_type = 'NetworkConfiguration'
if 'win_username' in vm_:
system_config = azure.servicemanagement.WindowsConfigurationSet(
computer_name=vm_['name'],
admin_username=vm_['win_username'],
admin_password=vm_['win_password'],
)
smb_port = '445'
if 'smb_port' in vm_:
smb_port = vm_['smb_port']
smb_endpoint = azure.servicemanagement.ConfigurationSetInputEndpoint(
name='SMB',
protocol='TCP',
port=smb_port,
local_port=smb_port,
)
network_config.input_endpoints.input_endpoints.append(smb_endpoint)
# Domain and WinRM configuration not yet supported by Salt Cloud
system_config.domain_join = None
system_config.win_rm = None
else:
system_config = azure.servicemanagement.LinuxConfigurationSet(
host_name=vm_['name'],
user_name=vm_['ssh_username'],
user_password=vm_['ssh_password'],
disable_ssh_password_authentication=False,
)
# TODO: Might need to create a storage account
media_link = vm_['media_link']
# TODO: Probably better to use more than just the name in the media_link
media_link += '/{0}.vhd'.format(vm_['name'])
os_hd = azure.servicemanagement.OSVirtualHardDisk(vm_['image'], media_link)
vm_kwargs = {
'service_name': service_name,
'deployment_name': service_name,
'deployment_slot': vm_['slot'],
'label': label,
'role_name': vm_['name'],
'system_config': system_config,
'os_virtual_hard_disk': os_hd,
'role_size': vm_['size'],
'network_config': network_config,
}
if 'virtual_network_name' in vm_:
vm_kwargs['virtual_network_name'] = vm_['virtual_network_name']
if 'subnet_name' in vm_:
network_config.subnet_names.append(vm_['subnet_name'])
log.debug('vm_kwargs: {0}'.format(vm_kwargs))
event_kwargs = {'service_kwargs': service_kwargs.copy(),
'vm_kwargs': vm_kwargs.copy()}
del event_kwargs['vm_kwargs']['system_config']
del event_kwargs['vm_kwargs']['os_virtual_hard_disk']
del event_kwargs['vm_kwargs']['network_config']
salt.utils.cloud.fire_event(
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(vm_['name']),
event_kwargs,
transport=__opts__['transport']
)
log.debug('vm_kwargs: {0}'.format(vm_kwargs))
# Azure lets you open winrm on a new VM
# Can open up specific ports in Azure; but not on Windows
try:
conn.create_hosted_service(**service_kwargs)
except AzureConflictHttpError:
log.debug('Cloud service already exists')
except Exception as exc:
error = 'The hosted service name is invalid.'
if error in str(exc):
log.error(
'Error creating {0} on Azure.\n\n'
'The hosted service name is invalid. The name can contain '
'only letters, numbers, and hyphens. The name must start with '
'a letter and must end with a letter or a number.'.format(
vm_['name']
),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
else:
log.error(
'Error creating {0} on Azure\n\n'
'The following exception was thrown when trying to '
'run the initial deployment: \n{1}'.format(
vm_['name'], str(exc)
),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return False
try:
result = conn.create_virtual_machine_deployment(**vm_kwargs)
log.debug('Request ID for machine: {0}'.format(result.request_id))
_wait_for_async(conn, result.request_id)
except AzureConflictHttpError:
log.debug('Conflict error. The deployment may already exist, trying add_role')
# Deleting two useless keywords
del vm_kwargs['deployment_slot']
del vm_kwargs['label']
del vm_kwargs['virtual_network_name']
result = conn.add_role(**vm_kwargs)
_wait_for_async(conn, result.request_id)
except Exception as exc:
error = 'The hosted service name is invalid.'
if error in str(exc):
log.error(
'Error creating {0} on Azure.\n\n'
'The VM name is invalid. The name can contain '
'only letters, numbers, and hyphens. The name must start with '
'a letter and must end with a letter or a number.'.format(
vm_['name']
),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
else:
log.error(
'Error creating {0} on Azure.\n\n'
'The Virtual Machine could not be created. If you '
'are using an already existing Cloud Service, '
'make sure you set up the `port` variable corresponding '
'to the SSH port exists and that the port number is not '
'already in use.\nThe following exception was thrown when trying to '
'run the initial deployment: \n{1}'.format(
vm_['name'], str(exc)
),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return False
def wait_for_hostname():
'''
Wait for the IP address to become available
'''
try:
conn.get_role(service_name, service_name, vm_['name'])
data = show_instance(vm_['name'], call='action')
if 'url' in data and data['url'] != str(''):
return data['url']
except AzureMissingResourceHttpError:
pass
time.sleep(1)
return False
hostname = salt.utils.cloud.wait_for_fun(
wait_for_hostname,
timeout=config.get_cloud_config_value(
'wait_for_fun_timeout', vm_, __opts__, default=15 * 60),
)
if not hostname:
log.error('Failed to get a value for the hostname.')
return False
vm_['ssh_host'] = hostname.replace('http://', '').replace('/', '')
vm_['password'] = config.get_cloud_config_value(
'ssh_password', vm_, __opts__
)
ret = salt.utils.cloud.bootstrap(vm_, __opts__)
# Attaching volumes
volumes = config.get_cloud_config_value(
'volumes', vm_, __opts__, search_global=True
)
if volumes:
salt.utils.cloud.fire_event(
'event',
'attaching volumes',
'salt/cloud/{0}/attaching_volumes'.format(vm_['name']),
{'volumes': volumes},
transport=__opts__['transport']
)
log.info('Create and attach volumes to node {0}'.format(vm_['name']))
created = create_attach_volumes(
vm_['name'],
{
'volumes': volumes,
'service_name': service_name,
'deployment_name': vm_['name'],
'media_link': media_link,
'role_name': vm_['name'],
'del_all_vols_on_destroy': vm_.get('set_del_all_vols_on_destroy', False)
},
call='action'
)
ret['Attached Volumes'] = created
data = show_instance(vm_['name'], call='action')
log.info('Created Cloud VM \'{0[name]}\''.format(vm_))
log.debug(
'\'{0[name]}\' VM creation details:\n{1}'.format(
vm_, pprint.pformat(data)
)
)
ret.update(data)
salt.utils.cloud.fire_event(
'event',
'created instance',
'salt/cloud/{0}/created'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['driver'],
},
transport=__opts__['transport']
)
return ret
|
AttributeError
|
dataset/ETHPy150Open saltstack/salt/salt/cloud/clouds/msazure.py/create
|
5,071 |
def HandleCommand(self, chan, user, cmd, args=None):
client = self.client._protocol.clientFromUsername(user)
cmd = cmd.lower()
if chan in self._root.channels:
channel = self._root.channels[chan]
access = channel.getAccess(client)
if cmd == 'info':
founder = self.client._protocol.clientFromID(channel.owner)
if founder: founder = 'Founder is <%s>' % founder.username
else: founder = 'No founder is registered'
admins = []
for admin in channel.admins:
client = self.client._protocol.clientFromID(admin)
if client: admins.append(client.username)
users = channel.users
antispam = 'on' if channel.antispam else 'off'
if not admins: mods = 'no operators are registered'
else: mods = '%i registered operator(s) are <%s>' % (len(admins), '>, <'.join(admins))
if len(users) == 1: users = '1 user is'
else: users = '%i users are' % len(users)
return '#%s info: Anti-spam protection is %s. %s, %s. %s currently in the channel.' % (chan, antispam, founder, mods, users)
elif cmd == 'topic':
if access in ['mod', 'founder', 'op']:
args = args or ''
channel.setTopic(client, args)
return '#%s: Topic changed' % chan
else:
return '#%s: You do not have permission to set the topic' % chan
elif cmd == 'unregister':
if access in ['mod', 'founder']:
channel.owner = ''
channel.channelMessage('#%s has been unregistered'%chan)
self.Send('LEAVE %s' % chan)
return '#%s: Successfully unregistered.' % chan
else:
return '#%s: You must contact one of the server moderators or the owner of the channel to unregister a channel' % chan
elif cmd == 'changefounder':
if access in ['mod', 'founder']:
if not args: return '#%s: You must specify a new founder' % chan
target = self.client._protocol.clientFromUsername(args)
if not target: return '#%s: cannot assign founder status to a user who does not exist'
channel.changeFounder(client, target)
channel.channelMessage('%s Founder has been changed to <%s>' % (chan, args))
return '#%s: Successfully changed founder to <%s>' % (chan, args)
else:
return '#%s: You must contact one of the server moderators or the owner of the channel to change the founder' % chan
elif cmd == 'spamprotection':
if access in ['mod', 'founder']:
if args == 'on':
channel.antispam = True
channel.channelMessage('%s Anti-spam protection was enabled by <%s>' % (chan, user))
return '#%s: Anti-spam protection is on.' % chan
elif args == 'off':
channel.antispam = False
channel.channelMessage('%s Anti-spam protection was disabled by <%s>' % (chan, user))
return '#%s: Anti-spam protection is off.' % chan
status = 'off'
if channel.antispam: status = 'on'
return '#%s: Anti-spam protection is %s' % (chan, status)
elif cmd == 'op':
if access in ['mod', 'founder']:
if not args: return '#%s: You must specify a user to op' % chan
target = self.client._protocol.clientFromUsername(args)
if target and channel.isOp(target): return '#%s: <%s> was already an op' % (chan, args)
channel.opUser(client, target)
else:
return '#%s: You do not have permission to op users' % chan
elif cmd == 'deop':
if access in ['mod', 'founder']:
if not args: return '#%s: You must specify a user to deop' % chan
target = self.client._protocol.clientFromUsername(args)
if target and not channel.isOp(target): return '#%s: <%s> was not an op' % (chan, args)
channel.deopUser(client, target)
else:
return '#%s: You do not have permission to deop users' % chan
elif cmd == 'chanmsg':
if access in ['mod', 'founder', 'op']:
if not args: return '#%s: You must specify a channel message' % chan
target = self.client._protocol.clientFromUsername(args)
if target and channel.isOp(target): args = 'issued by <%s>: %s' % (user, args)
channel.channelMessage(args)
return #return '#%s: insert chanmsg here'
else:
return '#%s: You do not have permission to issue a channel message' % chan
elif cmd == 'lock':
if access in ['mod', 'founder', 'op']:
if not args: return '#%s: You must specify a channel key to lock a channel' % chan
channel.setKey(client, args)
## STUBS ARE BELOW
return '#%s: Locked' % chan
else:
return '#%s: You do not have permission to lock the channel' % chan
elif cmd == 'unlock':
if access in ['mod', 'founder', 'op']:
channel.setKey(client, '*')
return '#%s: Unlocked' % chan
else:
return '#%s: You do not have permission to unlock the channel' % chan
elif cmd == 'kick':
if access in ['mod', 'founder', 'op']:
if not args: return '#%s: You must specify a user to kick from the channel' % chan
if args.count(' '):
target, reason = args.split(' ', 1)
else:
target = args
reason = None
if target in channel.users:
target = self.client._protocol.clientFromUsername(target)
channel.kickUser(client, target, reason)
return '#%s: <%s> kicked' % (chan, target.username)
else: return '#%s: <%s> not in channel' % (chan, target)
else:
return '#%s: You do not have permission to kick users from the channel' % chan
elif cmd == 'mute':
if access in ['mod', 'founder', 'op']:
if not args: return '#%s: You must specify a user to mute' % chan
else:
if args.count(' '): target, duration = args.split(' ', 1)
else:
target = args
duration = -1
try:
duration = float(duration)
except __HOLE__:
return '#%s: Duration must be an integer!' % chan
target = self.client._protocol.clientFromUsername(target)
channel.muteUser(client, target, duration)
else:
return '#%s: You do not have permission to mute users' % chan
elif cmd == 'unmute':
if access in ['mod', 'founder', 'op']:
if not args: return '#%s: You must specify a user to unmute' % chan
target = self.client._protocol.clientFromUsername(args)
channel.unmuteUser(client, target)
else:
return '#%s: You do not have permission to unmute users' % chan
elif cmd == 'mutelist':
if channel.mutelist:
mutelist = dict(channel.mutelist)
muted = ['#%s: Mute list (%i entries): '%(chan, len(mutelist))]
for user in mutelist:
m = mutelist[user].copy()
client = self.client._protocol.clientFromID(user)
if not client:
del mutelist[user]
continue
message = self.client._protocol._format_time(m['expires']) + (' by IP.' if m['ip'] else '.')
muted.append('%s, %s' % (client.username, message))
return muted
else:
return '#%s: Mute list is empty!' % chan
if cmd == 'register':
if client.isMod():
if not args: args = user
self.Send('JOIN %s' % chan)
channel = self._root.channels[chan]
target = self.client._protocol.clientFromUsername(args)
if target:
channel.setFounder(client, target)
return '#%s: Successfully registered to <%s>' % (chan, args.split(' ',1)[0])
else:
return '#%s: User <%s> does not exist.' % (chan, args)
elif not chan in self._root.channels:
return '#%s: You must contact one of the server moderators or the owner of the channel to register a channel' % chan
return ''
|
ValueError
|
dataset/ETHPy150Open lunixbochs/uberserver/ChanServ.py/ChanServ.HandleCommand
|
5,072 |
def _instantiate(self, cmd):
"""
checks so that object is an instantiated command and not, say
a cmdclass. If it is, instantiate it. Other types, like
strings, are passed through.
Args:
cmd (any): Entity to analyze.
Returns:
result (any): An instantiated Command or the input unmodified.
"""
try:
return cmd()
except __HOLE__:
return cmd
|
TypeError
|
dataset/ETHPy150Open evennia/evennia/evennia/commands/cmdset.py/CmdSet._instantiate
|
5,073 |
def add(self, cmd):
"""
Add a new command or commands to this CmdSetcommand, a list of
commands or a cmdset to this cmdset. Note that this is *not*
a merge operation (that is handled by the + operator).
Args:
cmd (Command, list, Cmdset): This allows for adding one or
more commands to this Cmdset in one go. If another Cmdset
is given, all its commands will be added.
Notes:
If cmd already exists in set, it will replace the old one
(no priority checking etc happens here). This is very useful
when overloading default commands).
If cmd is another cmdset class or -instance, the commands of
that command set is added to this one, as if they were part of
the original cmdset definition. No merging or priority checks
are made, rather later added commands will simply replace
existing ones to make a unique set.
"""
if inherits_from(cmd, "evennia.commands.cmdset.CmdSet"):
# cmd is a command set so merge all commands in that set
# to this one. We raise a visible error if we created
# an infinite loop (adding cmdset to itself somehow)
try:
cmd = self._instantiate(cmd)
except __HOLE__:
string = "Adding cmdset %(cmd)s to %(class)s lead to an "
string += "infinite loop. When adding a cmdset to another, "
string += "make sure they are not themself cyclically added to "
string += "the new cmdset somewhere in the chain."
raise RuntimeError(_(string) % {"cmd": cmd,
"class": self.__class__})
cmds = cmd.commands
elif is_iter(cmd):
cmds = [self._instantiate(c) for c in cmd]
else:
cmds = [self._instantiate(cmd)]
commands = self.commands
system_commands = self.system_commands
for cmd in cmds:
# add all commands
if not hasattr(cmd, 'obj'):
cmd.obj = self.cmdsetobj
try:
ic = commands.index(cmd)
commands[ic] = cmd # replace
except ValueError:
commands.append(cmd)
# extra run to make sure to avoid doublets
self.commands = list(set(commands))
# add system_command to separate list as well,
# for quick look-up
if cmd.key.startswith("__"):
try:
ic = system_commands.index(cmd)
system_commands[ic] = cmd # replace
except ValueError:
system_commands.append(cmd)
|
RuntimeError
|
dataset/ETHPy150Open evennia/evennia/evennia/commands/cmdset.py/CmdSet.add
|
5,074 |
def remove(self, cmd):
"""
Remove a command instance from the cmdset.
Args:
cmd (Command or str): Either the Command object to remove
or the key of such a command.
"""
cmd = self._instantiate(cmd)
if cmd.key.startswith("__"):
try:
ic = self.system_commands.index(cmd)
del self.system_commands[ic]
except __HOLE__:
pass
else:
self.commands = [oldcmd for oldcmd in self.commands if oldcmd != cmd]
|
ValueError
|
dataset/ETHPy150Open evennia/evennia/evennia/commands/cmdset.py/CmdSet.remove
|
5,075 |
def TestFindFileInDirectoryOrAncestors():
""" Test FindFileInDirectoryOrAncestors"""
# Create an empty temp directory
root_dir = tempfile.mkdtemp()
a_dir = os.path.join(root_dir, "a")
b_dir = os.path.join(a_dir, "b")
# Create subdirectories
os.mkdir(a_dir)
os.mkdir(b_dir)
# Create temporary file
filepath = os.path.join(root_dir, "test_file")
fp = open(filepath, "w")
fp.close()
print(root_dir, a_dir, b_dir, filepath)
# Check if test_file can be found
assert(U.FindFileInDirectoryOrAncestors("test_file", b_dir) ==
filepath)
# Ensure that adding /. to the path does not
# change the result of the test
c_dir = os.path.join(b_dir, os.path.curdir)
assert(U.FindFileInDirectoryOrAncestors("test_file", c_dir) ==
filepath)
# Change Permissions to 000 and ensure that an
# IOError is thrown
os.chmod(filepath, 0)
raised = False
try:
U.FindFileInDirectoryOrAncestors("test_file", c_dir)
except __HOLE__ as e:
raised = True
assert(raised)
# Remove the file and temporary directories
os.remove(filepath)
assert(U.FindFileInDirectoryOrAncestors("test_file", b_dir) ==
None)
os.removedirs(b_dir)
return
|
IOError
|
dataset/ETHPy150Open spranesh/Redhawk/redhawk/test/test_utils_util.py/TestFindFileInDirectoryOrAncestors
|
5,076 |
def onClickOpen(self, sender):
global has_getAsText
data = None
filename = None
if self.files:
if self.files.length == 0:
return
if self.files.length > 1:
alert("Cannot open more than one file")
return
file = self.files.item(0)
filename = file.fileName
try:
data = file.getAsText("")
except __HOLE__, e:
has_getAsText = False
alert("Sorry. cannot retrieve file in this browser.\nTry again.")
else:
elem = self.iframe.getElement()
# On firefox, this runs into:
# Permission denied to get property Window.document
# when the file is not in the current domain
body = elem.contentWindow.document.body
try:
filename = '' + elem.contentWindow.location
except:
filename = None
if body.childNodes.length == 1:
data = '' + body.childNodes.item(0).innerHTML
else:
data = '' + body.innerHTML
self.hide()
if data:
self.data = data
self.filename = filename
|
AttributeError
|
dataset/ETHPy150Open pyjs/pyjs/examples/timesheet/libtimesheet/view/components/FileOpenDlg.py/FileOpenDlg.onClickOpen
|
5,077 |
def test_expect_setecho_off(self):
'''This tests that echo may be toggled off.
'''
p = pexpect.spawn('cat', echo=True, timeout=5)
try:
self._expect_echo_toggle(p)
except __HOLE__:
if sys.platform.lower().startswith('sunos'):
if hasattr(unittest, 'SkipTest'):
raise unittest.SkipTest("Not supported on this platform.")
return 'skip'
raise
|
IOError
|
dataset/ETHPy150Open Calysto/metakernel/metakernel/tests/test_expect.py/ExpectTestCase.test_expect_setecho_off
|
5,078 |
def test_expect_setecho_off_exact(self):
p = pexpect.spawn('cat', echo=True, timeout=5)
p.expect = p.expect_exact
try:
self._expect_echo_toggle(p)
except __HOLE__:
if sys.platform.lower().startswith('sunos'):
if hasattr(unittest, 'SkipTest'):
raise unittest.SkipTest("Not supported on this platform.")
return 'skip'
raise
|
IOError
|
dataset/ETHPy150Open Calysto/metakernel/metakernel/tests/test_expect.py/ExpectTestCase.test_expect_setecho_off_exact
|
5,079 |
def test_waitnoecho(self):
" Tests setecho(False) followed by waitnoecho() "
p = pexpect.spawn('cat', echo=False, timeout=5)
try:
p.setecho(False)
p.waitnoecho()
except __HOLE__:
if sys.platform.lower().startswith('sunos'):
if hasattr(unittest, 'SkipTest'):
raise unittest.SkipTest("Not supported on this platform.")
return 'skip'
raise
|
IOError
|
dataset/ETHPy150Open Calysto/metakernel/metakernel/tests/test_expect.py/ExpectTestCase.test_waitnoecho
|
5,080 |
def test_waitnoecho_order(self):
''' This tests that we can wait on a child process to set echo mode.
For example, this tests that we could wait for SSH to set ECHO False
when asking of a password. This makes use of an external script
_echo_wait.py. '''
p1 = pexpect.spawn('%s _echo_wait.py' % self.PYTHONBIN)
start = time.time()
try:
p1.waitnoecho(timeout=10)
except __HOLE__:
if sys.platform.lower().startswith('sunos'):
if hasattr(unittest, 'SkipTest'):
raise unittest.SkipTest("Not supported on this platform.")
return 'skip'
raise
end_time = time.time() - start
assert end_time < 10 and end_time > 2, "waitnoecho did not set ECHO off in the expected window of time."
# test that we actually timeout and return False if ECHO is never set off.
p1 = pexpect.spawn('cat')
start = time.time()
retval = p1.waitnoecho(timeout=4)
end_time = time.time() - start
assert end_time > 3, "waitnoecho should have waited longer than 2 seconds. retval should be False, retval=%d"%retval
assert retval==False, "retval should be False, retval=%d"%retval
# This one is mainly here to test default timeout for code coverage.
p1 = pexpect.spawn('%s _echo_wait.py' % self.PYTHONBIN)
start = time.time()
p1.waitnoecho()
end_time = time.time() - start
assert end_time < 10, "waitnoecho did not set ECHO off in the expected window of time."
|
IOError
|
dataset/ETHPy150Open Calysto/metakernel/metakernel/tests/test_expect.py/ExpectTestCase.test_waitnoecho_order
|
5,081 |
def takeSomeFromQueue(self):
"""Use self.queue, which is a collections.deque, to pop up to
settings.MAX_DATAPOINTS_PER_MESSAGE items from the left of the
queue.
"""
def yield_max_datapoints():
for count in range(settings.MAX_DATAPOINTS_PER_MESSAGE):
try:
yield self.queue.popleft()
except __HOLE__:
raise StopIteration
return list(yield_max_datapoints())
|
IndexError
|
dataset/ETHPy150Open graphite-project/carbon/lib/carbon/client.py/CarbonClientFactory.takeSomeFromQueue
|
5,082 |
def process_request(self, request_items):
# set all required simpleapi arguments
access_key = request_items.pop('_access_key', None)
method = request_items.pop('_call', None)
if self.restful:
method = self.sapi_request.method.lower()
data = request_items.pop('_data', None)
# update session
self.session.request = self.sapi_request
self.session.mimetype = self.mimetype
self.session.callback = self.callback
self.session.access_key = access_key
# make all uploaded files available
if self.route.is_django():
self.session.files = self.sapi_request.FILES
# instantiate namespace
local_namespace = self.namespace['class'](self)
self.session._internal.namespace = {
'nmap': self.namespace,
'instance': local_namespace
}
# check the method
if not method:
raise RequestException(u'Method must be provided.')
# check whether method exists
if not self.namespace['functions'].has_key(method):
raise RequestException(u'Method %s does not exist.' % method)
# check authentication
if not self.namespace['authentication'](local_namespace, access_key):
raise RequestException(u'Authentication failed.')
# check ip address
if not self.namespace['ip_restriction'](local_namespace, \
self.sapi_request.remote_addr):
raise RequestException(u'You are not allowed to access.')
function = self.namespace['functions'][method]
self.session.function = function
# check allowed HTTP methods
if not function['methods']['function'](self.sapi_request.method, function['methods']['allowed_methods']):
raise RequestException(u'Method not allowed: %s' % self.sapi_request.method)
# if data is set, make sure input formatter is not ValueFormatter
if data:
if isinstance(self.input_formatter, formatters['value']):
raise RequestException(u'If you\'re using _data please make ' \
'sure you set _input and _input is not ' \
'\'value\'.')
try:
request_items = self.input_formatter.kwargs(data, 'parse')
except ValueError, _:
raise RequestException(u'Data couldn\'t be decoded. ' \
'Please check _input and your _data')
else:
if not isinstance(request_items, dict):
raise RequestException(u'_data must be an array/dictionary')
# check whether all obligatory arguments are given
ungiven_obligatory_args = list(set(function['args']['obligatory']) - \
set(request_items.keys()))
if ungiven_obligatory_args:
raise RequestException(u'Obligatory argument(s) missing: %s' % \
", ".join(ungiven_obligatory_args))
# check whether there are more arguments than needed
if not function['args']['kwargs_allowed']:
unused_arguments = list(set(request_items.keys()) - \
set(function['args']['all']))
if unused_arguments:
if not self.ignore_unused_args:
raise RequestException(u'Unused arguments: %s' % \
", ".join(unused_arguments))
else:
for key in unused_arguments:
del request_items[key]
# decode incoming variables (only if _data is not set!)
if not data:
new_request_items = {}
for key, value in request_items.iteritems():
try:
new_request_items[str(key)] = self.input_formatter.kwargs(value, 'parse')
except ValueError, _:
raise
raise RequestException(u'Value for %s couldn\'t be decoded.' % \
key)
request_items = new_request_items
else:
# make sure all keys are strings, not unicodes (for compatibility
# issues: Python < 2.6.5)
new_request_items = {}
for key, value in request_items.iteritems():
new_request_items[str(key)] = value
request_items = new_request_items
# check constraints
for key, value in request_items.iteritems():
try:
request_items[key] = function['constraints']['function'](
local_namespace, key, value)
except (__HOLE__,):
raise RequestException(u'Constraint failed for argument: %s' % key)
# we're done working on arguments, pass it to the session
self.session.arguments = request_items
# call feature: handle_request
try:
for feature in self.namespace['features']:
feature._handle_request(self)
except FeatureContentResponse, e:
result = e
else:
# call before_request
if hasattr(local_namespace, 'before_request'):
getattr(local_namespace, 'before_request')(self, self.session)
# make the call
try:
if self.debug:
_, fname = tempfile.mkstemp()
self.route.logger.debug(u"Profiling call '%s': %s" % \
(method, fname))
self.route.logger.debug(u"Calling parameters: %s" % \
pprint.pformat(request_items))
profile = cProfile.Profile()
result = profile.runcall(getattr(local_namespace, method),
**request_items)
profile.dump_stats(fname)
self.route.logger.debug(u"Loading stats...")
stats = pstats.Stats(fname)
stats.strip_dirs().sort_stats('time', 'calls') \
.print_stats(25)
else:
result = getattr(local_namespace, method)(**request_items)
except Exception, e:
if has_django and isinstance(e, django_notexist):
raise RequestException(e)
elif has_mongoengine and isinstance(e, mongoengine_notexist):
raise RequestException(e)
else:
raise
# if result is not a Response, create one
if not isinstance(result, Response):
response = Response(
sapi_request=self.sapi_request,
namespace=self.namespace,
result=result,
output_formatter=self.output_formatter,
wrapper=self.wrapper,
mimetype=self.mimetype,
function=function
)
else:
response = result
return response
|
ValueError
|
dataset/ETHPy150Open flosch/simpleapi/simpleapi/server/request.py/Request.process_request
|
5,083 |
def suggestFile(filename, folder='.', threshold=2, includeIdenticalFilename=False):
'''Returns the closest matching filename to the filenames in the folder. If
there are multiple files with the same edit distance, the one returned is
undefined. Returns None if there are no suggestions within the threshold.
Args:
filename (str) - The typo'd filename the user supplied.
folder (str, optional) - The folder with filenames the user could have
meant. Default is the . current folder.
threshold (int, optional) - A levenshtein edit distance above this
threshold will exclude a file from being suggested. Default is 2.
includeIdenticalFilename (bool, optional) - If True, a filename identical
to the filename arg will be included in the suggestions. Default is
False.'''
try:
genObj = suggestAllFiles(filename, folder, threshold, includeIdenticalFilename)
if runningOnPython2:
return genObj.next() # Python 2 code
else:
return genObj.__next__() # Python 3 code
except __HOLE__:
return None
|
StopIteration
|
dataset/ETHPy150Open asweigart/pydidyoumean/pydidyoumean/__init__.py/suggestFile
|
5,084 |
def suggest(name, possibleSuggestions=None, threshold=2, includeIdenticalName=False):
'''Returns the closest matching name to the suggestions in
possibleSuggestions. Pass a list of all possible matches for the
possibleSuggestions parameter. If there are multiple names with the same edit
distance, the one returned is undefined. Returns None if there are no
suggestions within the threshold.
Args:
name (str) - The typo'd name the user supplied.
possibleSuggestions (sequence of str, optional) - A sequence of strings
for all possible suggestions that the name could match to. This
function will pull suggestions from this list.
threshold (int, optional) - A levenshtein edit distance above this
threshold will exclude a name from being suggested. Default is 2.
includeIdenticalName (bool, optional) - If True, a name identical to
the name arg will be included in the suggestions. Default is False.'''
try:
genObj = suggestAll(name, possibleSuggestions, threshold, includeIdenticalName)
if runningOnPython2:
return genObj.next() # Python 2 code
else:
return genObj.__next__() # Python 3 code
except __HOLE__:
return None
|
StopIteration
|
dataset/ETHPy150Open asweigart/pydidyoumean/pydidyoumean/__init__.py/suggest
|
5,085 |
def intersection(*entities):
"""The intersection of a collection of GeometryEntity instances.
Parameters
==========
entities : sequence of GeometryEntity
Returns
=======
intersection : list of GeometryEntity
Raises
======
NotImplementedError
When unable to calculate intersection.
Notes
=====
The intersection of any geometrical entity with itself should return
a list with one item: the entity in question.
An intersection requires two or more entities. If only a single
entity is given then the function will return an empty list.
It is possible for `intersection` to miss intersections that one
knows exists because the required quantities were not fully
simplified internally.
Reals should be converted to Rationals, e.g. Rational(str(real_num))
or else failures due to floating point issues may result.
See Also
========
sympy.geometry.entity.GeometryEntity.intersection
Examples
========
>>> from sympy.geometry import Point, Line, Circle, intersection
>>> p1, p2, p3 = Point(0, 0), Point(1, 1), Point(-1, 5)
>>> l1, l2 = Line(p1, p2), Line(p3, p2)
>>> c = Circle(p2, 1)
>>> intersection(l1, p2)
[Point2D(1, 1)]
>>> intersection(l1, l2)
[Point2D(1, 1)]
>>> intersection(c, p2)
[]
>>> intersection(c, Point(1, 0))
[Point2D(1, 0)]
>>> intersection(c, l2)
[Point2D(-sqrt(5)/5 + 1, 2*sqrt(5)/5 + 1),
Point2D(sqrt(5)/5 + 1, -2*sqrt(5)/5 + 1)]
"""
from .entity import GeometryEntity
from .point import Point
if len(entities) <= 1:
return []
# entities may be an immutable tuple
entities = list(entities)
for i, e in enumerate(entities):
if not isinstance(e, GeometryEntity):
try:
entities[i] = Point(e)
except __HOLE__:
raise ValueError('%s is not a GeometryEntity and cannot be made into Point' % str(e))
res = entities[0].intersection(entities[1])
for entity in entities[2:]:
newres = []
for x in res:
newres.extend(x.intersection(entity))
res = newres
return res
|
NotImplementedError
|
dataset/ETHPy150Open sympy/sympy/sympy/geometry/util.py/intersection
|
5,086 |
def convex_hull(*args, **kwargs):
"""The convex hull surrounding the Points contained in the list of entities.
Parameters
==========
args : a collection of Points, Segments and/or Polygons
Returns
=======
convex_hull : Polygon if ``polygon`` is True else as a tuple `(U, L)` where ``L`` and ``U`` are the lower and upper hulls, respectively.
Notes
=====
This can only be performed on a set of points whose coordinates can
be ordered on the number line.
References
==========
[1] http://en.wikipedia.org/wiki/Graham_scan
[2] Andrew's Monotone Chain Algorithm
(A.M. Andrew,
"Another Efficient Algorithm for Convex Hulls in Two Dimensions", 1979)
http://geomalgorithms.com/a10-_hull-1.html
See Also
========
sympy.geometry.point.Point, sympy.geometry.polygon.Polygon
Examples
========
>>> from sympy.geometry import Point, convex_hull
>>> points = [(1, 1), (1, 2), (3, 1), (-5, 2), (15, 4)]
>>> convex_hull(*points)
Polygon(Point2D(-5, 2), Point2D(1, 1), Point2D(3, 1), Point2D(15, 4))
>>> convex_hull(*points, **dict(polygon=False))
([Point2D(-5, 2), Point2D(15, 4)],
[Point2D(-5, 2), Point2D(1, 1), Point2D(3, 1), Point2D(15, 4)])
"""
from .entity import GeometryEntity
from .point import Point
from .line import Segment
from .polygon import Polygon
polygon = kwargs.get('polygon', True)
p = set()
for e in args:
if not isinstance(e, GeometryEntity):
try:
e = Point(e)
except __HOLE__:
raise ValueError('%s is not a GeometryEntity and cannot be made into Point' % str(e))
if isinstance(e, Point):
p.add(e)
elif isinstance(e, Segment):
p.update(e.points)
elif isinstance(e, Polygon):
p.update(e.vertices)
else:
raise NotImplementedError(
'Convex hull for %s not implemented.' % type(e))
# make sure all our points are of the same dimension
if any(len(x) != 2 for x in p):
raise ValueError('Can only compute the convex hull in two dimensions')
p = list(p)
if len(p) == 1:
return p[0] if polygon else (p[0], None)
elif len(p) == 2:
s = Segment(p[0], p[1])
return s if polygon else (s, None)
def _orientation(p, q, r):
'''Return positive if p-q-r are clockwise, neg if ccw, zero if
collinear.'''
return (q.y - p.y)*(r.x - p.x) - (q.x - p.x)*(r.y - p.y)
# scan to find upper and lower convex hulls of a set of 2d points.
U = []
L = []
try:
p.sort(key=lambda x: x.args)
except TypeError:
raise ValueError("The points could not be sorted.")
for p_i in p:
while len(U) > 1 and _orientation(U[-2], U[-1], p_i) <= 0:
U.pop()
while len(L) > 1 and _orientation(L[-2], L[-1], p_i) >= 0:
L.pop()
U.append(p_i)
L.append(p_i)
U.reverse()
convexHull = tuple(L + U[1:-1])
if len(convexHull) == 2:
s = Segment(convexHull[0], convexHull[1])
return s if polygon else (s, None)
if polygon:
return Polygon(*convexHull)
else:
U.reverse()
return (U, L)
|
NotImplementedError
|
dataset/ETHPy150Open sympy/sympy/sympy/geometry/util.py/convex_hull
|
5,087 |
def closest_points(*args):
"""Return the subset of points from a set of points that were
the closest to each other in the 2D plane.
Parameters
==========
args : a collection of Points on 2D plane.
Notes
=====
This can only be performed on a set of points whose coordinates can
be ordered on the number line. If there are no ties then a single
pair of Points will be in the set.
References
==========
[1] http://www.cs.mcgill.ca/~cs251/ClosestPair/ClosestPairPS.html
[2] Sweep line algorithm
https://en.wikipedia.org/wiki/Sweep_line_algorithm
Examples
========
>>> from sympy.geometry import closest_points, Point2D, Triangle
>>> Triangle(sss=(3, 4, 5)).args
(Point2D(0, 0), Point2D(3, 0), Point2D(3, 4))
>>> closest_points(*_)
set([(Point2D(0, 0), Point2D(3, 0))])
"""
from collections import deque
from math import hypot, sqrt as _sqrt
from sympy.functions.elementary.miscellaneous import sqrt
p = [Point2D(i) for i in set(args)]
if len(p) < 2:
raise ValueError('At least 2 distinct points must be given.')
try:
p.sort(key=lambda x: x.args)
except __HOLE__:
raise ValueError("The points could not be sorted.")
if any(not i.is_Rational for j in p for i in j.args):
def hypot(x, y):
arg = x*x + y*y
if arg.is_Rational:
return _sqrt(arg)
return sqrt(arg)
rv = [(0, 1)]
best_dist = hypot(p[1].x - p[0].x, p[1].y - p[0].y)
i = 2
left = 0
box = deque([0, 1])
while i < len(p):
while left < i and p[i][0] - p[left][0] > best_dist:
box.popleft()
left += 1
for j in box:
d = hypot(p[i].x - p[j].x, p[i].y - p[j].y)
if d < best_dist:
rv = [(j, i)]
elif d == best_dist:
rv.append((j, i))
else:
continue
best_dist = d
box.append(i)
i += 1
return {tuple([p[i] for i in pair]) for pair in rv}
|
TypeError
|
dataset/ETHPy150Open sympy/sympy/sympy/geometry/util.py/closest_points
|
5,088 |
def are_similar(e1, e2):
"""Are two geometrical entities similar.
Can one geometrical entity be uniformly scaled to the other?
Parameters
==========
e1 : GeometryEntity
e2 : GeometryEntity
Returns
=======
are_similar : boolean
Raises
======
GeometryError
When `e1` and `e2` cannot be compared.
Notes
=====
If the two objects are equal then they are similar.
See Also
========
sympy.geometry.entity.GeometryEntity.is_similar
Examples
========
>>> from sympy import Point, Circle, Triangle, are_similar
>>> c1, c2 = Circle(Point(0, 0), 4), Circle(Point(1, 4), 3)
>>> t1 = Triangle(Point(0, 0), Point(1, 0), Point(0, 1))
>>> t2 = Triangle(Point(0, 0), Point(2, 0), Point(0, 2))
>>> t3 = Triangle(Point(0, 0), Point(3, 0), Point(0, 1))
>>> are_similar(t1, t2)
True
>>> are_similar(t1, t3)
False
"""
from .exceptions import GeometryError
if e1 == e2:
return True
try:
return e1.is_similar(e2)
except __HOLE__:
try:
return e2.is_similar(e1)
except AttributeError:
n1 = e1.__class__.__name__
n2 = e2.__class__.__name__
raise GeometryError(
"Cannot test similarity between %s and %s" % (n1, n2))
|
AttributeError
|
dataset/ETHPy150Open sympy/sympy/sympy/geometry/util.py/are_similar
|
5,089 |
def main():
flock = FlockManager(
match_found,
config["BLEET_TIMEOUT"],
config["SERVICE_TIMEOUT"]
)
logger.info("Shepherd starting.")
while True:
# Wait until either the public or sheep socket has messages waiting
zmq.select([public, sheep], [], [], timeout = 5)
# Will grab all of the outstanding messages from the outside and place them
# in the request queue
while public.getsockopt(zmq.EVENTS) & zmq.POLLIN != 0:
request = public.recv_json()
logger.debug("Raw test request: %s", str(request))
request = TestRequest.from_dict(request)
try:
submission = \
Submission.objects.get(id = ObjectId(request.submission_id))
except Submission.DoesNotExist as e:
logger.warning(
"Received test request for non-existant submission [%s].",
str(request.submission_id)
)
continue
except bson.errors.InvalidId as e:
logger.warning("Received malformed test request. %s", str(e))
continue
try:
assignment = Assignment.objects.get(id = submission.assignment)
except Assignment.DoesNotExist as e:
logger.error(
"Received test request for a submission [%s] referencing "
"an invalid assignment [%s].",
str(submission.id),
str(submission.assignment)
)
continue
if not assignment.test_harness:
logger.warning(
"Received test request for a submission [%s] referencing "
"an assignment [%s] that does not have a test harness "
"associated with it.",
str(submission.id),
str(submission.assignment)
)
continue
try:
test_harness = \
TestHarness.objects.get(id = assignment.test_harness)
except TestHarness.DoesNotExit as e:
logger.error(
"Received test request for a submission [%s] referencing "
"an assignment [%s] that references a non-existant test "
"harness [%s].",
str(submission.id),
str(submission.assignment),
str(assignment.test_harness)
)
continue
# Gather all the necessary information from the test request
# received from the outside.
processed_request = InternalTestRequest(
submission.id,
test_harness.config.get("galah/timeout",
config["BLEET_TIMEOUT"].seconds),
test_harness.config.get("galah/environment", {})
)
logger.info("Received test request.")
flock.received_request(processed_request)
# Will grab all of the outstanding messages from the sheep and process them
while sheep.getsockopt(zmq.EVENTS) & zmq.POLLIN != 0:
try:
sheep_identity, sheep_message = router_recv_json(sheep)
sheep_message = FlockMessage.from_dict(sheep_message)
logger.debug(
"Received message from sheep: %s",
str(sheep_message)
)
except __HOLE__ as e:
logger.error("Could not decode sheep's message: %s", str(e))
logger.debug(
"Exception thrown while decoding sheep's message...",
exc_info = sys.exc_info()
)
continue
if sheep_message.type == "distress":
logger.warn("Received distress message. Sending bloot.")
router_send_json(
sheep, sheep_identity, FlockMessage("bloot", "").to_dict()
)
elif sheep_message.type == "bleet":
logger.debug(
"Sheep [%s] bleeted. Sending bloot.",
repr(sheep_identity)
)
result = flock.sheep_bleeted(sheep_identity)
# Under certain circumstances we want to completely ignore a
# bleet (see FlockManager.sheep_bleeted() for more details)
if result is FlockManager.IGNORE:
logger.debug("Ignoring bleet.")
continue
if not result:
router_send_json(
sheep,
sheep_identity,
FlockMessage("identify", "").to_dict()
)
logger.info(
"Unrecognized sheep [%s] connected, identify sent.",
repr(sheep_identity)
)
continue
router_send_json(
sheep,
sheep_identity,
FlockMessage("bloot", "").to_dict()
)
elif sheep_message.type == "environment":
if not flock.manage_sheep(sheep_identity, sheep_message.body):
logger.warn(
"Received environment from an already-recognized sheep."
)
elif sheep_message.type == "result":
logger.info("Received test result from sheep.")
logger.debug(
"Received test result from sheep: %s",
str(sheep_message.body)
)
try:
submission_id = ObjectId(sheep_message.body["id"])
submission = Submission.objects.get(id = submission_id)
test_result = TestResult.from_dict(sheep_message.body)
try:
test_result.save()
except InvalidDocument:
logger.warn(
"Test result is too large for the database.",
exc_info = True
)
test_result = TestResult(failed = True)
test_result.save()
submission.test_results = test_result.id
submission.save()
except (InvalidId, Submission.DoesNotExist) as e:
logger.warn(
"Could not retrieve submission [%s] for test result "
"received from sheep [%s].",
str(submission_id),
repr(sheep_identity)
)
continue
router_send_json(
sheep,
sheep_identity,
FlockMessage(
"bloot", sheep_message.body["id"]
).to_dict()
)
if not flock.sheep_finished(sheep_identity):
logger.info(
"Got result from sheep [%s] who was not processing "
"a test request.",
repr(sheep_identity)
)
# Let the flock manager get rid of any dead or killed sheep.
lost_sheep, killed_sheep = flock.cleanup()
if lost_sheep:
logger.warn(
"%d sheep lost due to bleet timeout: %s",
len(lost_sheep),
str([repr(i) for i in lost_sheep])
)
if killed_sheep:
logger.warn(
"%d sheep lost due to request timeout: %s",
len(killed_sheep),
str([repr(i) for i in killed_sheep])
)
|
ValueError
|
dataset/ETHPy150Open ucrcsedept/galah/galah/shepherd/shepherd.py/main
|
5,090 |
@cache_page(LONG_CACHE_TIME)
def committee_search_html(request):
params = request.GET
committees = None
try:
committee_name_fragment = params['name']
if len(committee_name_fragment) > 3:
print committee_name_fragment
committees = Committee_Overlay.objects.filter(Q(name__icontains=committee_name_fragment) | Q(curated_candidate__name__icontains=committee_name_fragment)).select_related('curated_candidate')
else:
committees = None
except __HOLE__:
committees = None
return render_to_response('datapages/committee_search.html',
{
'committees':committees,
}
)
|
KeyError
|
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/public_views/datapages/views.py/committee_search_html
|
5,091 |
def weekly_comparison(request, race_list, blog_or_feature):
print "weekly comparison"
if not (blog_or_feature in ['feature', 'blog', 'narrow']):
raise Http404
race_ids = race_list.split('-')
if len(race_ids) == 0 or len(race_ids) > 6:
raise Http404
race_id_text = ",".join(race_ids)
chart_title = ""
partisan_colors = 'false'
try:
chart_data = chart_name_reference[race_list]
chart_title = chart_data['name']
partisan_colors = chart_data['partisan']
except KeyError:
for i,id in enumerate(race_ids):
try:
series_name = weekly_dump_data_series[int(id)]['data_series_name']
if i>0:
chart_title = chart_title + " and "
chart_title = chart_title + series_name
except __HOLE__:
continue
chart_title = chart_title + ", weekly"
return render_to_response('datapages/comparisons_chart.html',
{
'race_id_text':race_id_text,
'chart_title': chart_title,
'blog_or_feature':blog_or_feature,
'partisan_colors':partisan_colors,
'data_source': '/static/data/weekly_ies.csv',
#'data_source': '/static/realtimefec/js/weekly_ies.csv',
'period_description':'previous seven days',
'start_month':5,
'start_year':2014,
},
context_instance=RequestContext(request)
)
|
IndexError
|
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/public_views/datapages/views.py/weekly_comparison
|
5,092 |
def weekly_comparison_cumulative(request, race_list, blog_or_feature):
print "weekly comparison"
if not (blog_or_feature in ['feature', 'blog', 'narrow']):
raise Http404
race_ids = race_list.split('-')
if len(race_ids) == 0 or len(race_ids) > 6:
raise Http404
race_id_text = ",".join(race_ids)
chart_title = ""
partisan_colors = 'false'
try:
chart_data = chart_name_reference[race_list]
chart_title = chart_data['name']
partisan_colors = chart_data['partisan']
except __HOLE__:
for i,id in enumerate(race_ids):
try:
series_name = weekly_dump_data_series[int(id)]['data_series_name']
if i>0:
chart_title = chart_title + " and "
chart_title = chart_title + series_name
except IndexError:
continue
chart_title = chart_title + ", weekly"
return render_to_response('datapages/comparisons_chart.html',
{
'race_id_text':race_id_text,
'chart_title': chart_title,
'blog_or_feature':blog_or_feature,
'partisan_colors':partisan_colors,
'data_source': '/static/data/weekly_ies_cumulative.csv',
#'data_source': '/static/realtimefec/js/weekly_ies_cumulative.csv',
'period_description':'cycle through date shown',
'start_month':5,
'start_year':2014,
},
context_instance=RequestContext(request)
)
|
KeyError
|
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/public_views/datapages/views.py/weekly_comparison_cumulative
|
5,093 |
def contrib_comparison(request, race_list, blog_or_feature):
print "weekly comparison"
if not (blog_or_feature in ['feature', 'blog', 'narrow']):
raise Http404
race_ids = race_list.split('-')
if len(race_ids) == 0 or len(race_ids) > 6:
raise Http404
race_id_text = ",".join(race_ids)
chart_title = ""
partisan_colors = 'false'
try:
chart_data = chart_donor_name_reference[race_list]
chart_title = chart_data['name']
partisan_colors = chart_data['partisan']
except __HOLE__:
for i,id in enumerate(race_ids):
try:
series_name = weekly_dump_data_series[int(id)]['data_series_name']
if i>0:
chart_title = chart_title + " and "
chart_title = chart_title + series_name
except IndexError:
continue
chart_title = chart_title + ", weekly"
return render_to_response('datapages/comparisons_chart.html',
{
'race_id_text':race_id_text,
'chart_title': chart_title,
'blog_or_feature':blog_or_feature,
'partisan_colors':partisan_colors,
'data_source': '/static/data/weekly_superpac_donations.csv',
'period_description':'previous seven days',
'start_month':5,
'start_year':2014,
},
context_instance=RequestContext(request)
)
|
KeyError
|
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/public_views/datapages/views.py/contrib_comparison
|
5,094 |
def contrib_comparison_cumulative(request, race_list, blog_or_feature):
print "weekly comparison"
if not (blog_or_feature in ['feature', 'blog', 'narrow']):
raise Http404
race_ids = race_list.split('-')
if len(race_ids) == 0 or len(race_ids) > 6:
raise Http404
race_id_text = ",".join(race_ids)
chart_title = ""
partisan_colors = 'false'
try:
chart_data = chart_donor_name_reference[race_list]
chart_title = chart_data['name']
partisan_colors = chart_data['partisan']
except __HOLE__:
for i,id in enumerate(race_ids):
try:
series_name = weekly_dump_data_series[int(id)]['data_series_name']
if i>0:
chart_title = chart_title + " and "
chart_title = chart_title + series_name
except IndexError:
continue
chart_title = chart_title + ", weekly"
return render_to_response('datapages/comparisons_chart.html',
{
'race_id_text':race_id_text,
'chart_title': chart_title,
'blog_or_feature':blog_or_feature,
'partisan_colors':partisan_colors,
'data_source': '/static/data/weekly_superpac_donations_cumulative.csv',
'period_description':'cycle through date shown',
'start_month':5,
'start_year':2014,
},
context_instance=RequestContext(request)
)
|
KeyError
|
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/public_views/datapages/views.py/contrib_comparison_cumulative
|
5,095 |
def run(self, meth, args=(), kwargs={}, argiterator=None):
res = FunctionTimerResult(_form_name(meth, args, kwargs))
if argiterator:
iterator = itertools.cycle(argiterator)
else:
iterator = itertools.repeat(args)
start = now()
try:
_next = iterator.__next__
except __HOLE__:
_next = iterator.next
for i in range(self._iter):
args = _next()
rv = meth(*args, **kwargs)
end = now()
res.runtime = ((end - start)/self._iter) - self._overhead
res.overhead = self._overhead
res.returnvalue = rv
return res
|
AttributeError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/benchmarks.py/FunctionTimer.run
|
5,096 |
@public
def field_isomorphism(a, b, **args):
"""Construct an isomorphism between two number fields. """
a, b = sympify(a), sympify(b)
if not a.is_AlgebraicNumber:
a = AlgebraicNumber(a)
if not b.is_AlgebraicNumber:
b = AlgebraicNumber(b)
if a == b:
return a.coeffs()
n = a.minpoly.degree()
m = b.minpoly.degree()
if n == 1:
return [a.root]
if m % n != 0:
return None
if args.get('fast', True):
try:
result = field_isomorphism_pslq(a, b)
if result is not None:
return result
except __HOLE__:
pass
return field_isomorphism_factor(a, b)
|
NotImplementedError
|
dataset/ETHPy150Open sympy/sympy/sympy/polys/numberfields.py/field_isomorphism
|
5,097 |
def loadAllMayaPlugins():
'''will load all maya-installed plugins
WARNING: tthe act of loading all the plugins may crash maya, especially if
done from a non-GUI session
'''
import logging
logger = logging.getLogger('pymel')
logger.debug("loading all maya plugins...")
for plugin in mayaPlugins():
try:
maya.cmds.loadPlugin( plugin, quiet=1 )
except __HOLE__: pass
logger.debug("...done loading all maya plugins")
|
RuntimeError
|
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.5/pymel/api/plugins.py/loadAllMayaPlugins
|
5,098 |
def pluginCommands(pluginName, reportedOnly=False):
'''Returns the list of all commands that the plugin provides, to the best
of our knowledge.
Note that depending on your version of maya, this may not actually be the
list of all commands provided.
'''
import logging
logger = logging.getLogger('pymel')
commands = []
for cmdType, pluginToCmds in UNREPORTED_COMMANDS.iteritems():
try:
moreCmds = maya.cmds.pluginInfo(pluginName, query=1, **{cmdType:1})
except __HOLE__: # will get this if it's a flag pluginInfo doesn't know
if reportedOnly:
moreCmds = []
else:
moreCmds = pluginToCmds.get(pluginName, [])
except Exception:
logger.error("Failed to get %s list from %s" % (cmdType, pluginName))
moreCmds = []
# moreCmds may be None, as pluginInfo will return None
if moreCmds:
commands.extend(moreCmds)
return commands
|
TypeError
|
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.5/pymel/api/plugins.py/pluginCommands
|
5,099 |
def __call__(self, method):
def wrapped_method(*args, **kwargs):
try:
for data in self.neede_data:
data_value = args[1][data]
if self.options.get('force_int', False):
try:
args[1][data] = int(data_value)
except (ValueError, TypeError):
raise HTTPException(log_message='field "%s" for check_for_data decorated method "%s" with option "force_int" needs to be an integer' % (data, method.__name__))
except (KeyError, __HOLE__):
raise HTTPException(log_message='check_for_data decorated method "%s" needs "%s" data' % (method.__name__, data))
method(*args, **kwargs)
return wrapped_method
|
IndexError
|
dataset/ETHPy150Open ierror/BeautifulMind.io/beautifulmind/mindmaptornado/decorators.py/check_for_data.__call__
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.