Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
7,000 | def _test_tags_read(filename=None):
import sys
if filename is None:
if len(sys.argv) != 2:
filename = '/tmp/libtiff_tags_write.tiff'
if not os.path.isfile (filename):
print 'Run `%s <filename>` for testing.' % (__file__)
return
else:
filename = sys.argv[1]
tiff = TIFF.open(filename)
tmp = tiff.GetField("Artist")
assert tmp=="A Name","Tag 'Artist' did not read the correct value (Got '%s'; Expected 'A Name')" % (tmp,)
tmp = tiff.GetField("DocumentName")
assert tmp=="","Tag 'DocumentName' did not read the correct value (Got '%s'; Expected empty string)" % (tmp,)
tmp = tiff.GetField("PrimaryChromaticities")
assert tmp==[1,2,3,4,5,6],"Tag 'PrimaryChromaticities' did not read the correct value (Got '%r'; Expected '[1,2,3,4,5,6]'" % (tmp,)
tmp = tiff.GetField("BitsPerSample")
assert tmp==8,"Tag 'BitsPerSample' did not read the correct value (Got %s; Expected 8)" % (str(tmp),)
tmp = tiff.GetField("ColorMap")
try:
assert len(tmp) == 3,"Tag 'ColorMap' should be three arrays, found %d" % len(tmp)
assert len(tmp[0])==256,"Tag 'ColorMap' should be three arrays of 256 elements, found %d elements" % len(tmp[0])
assert len(tmp[1])==256,"Tag 'ColorMap' should be three arrays of 256 elements, found %d elements" % len(tmp[1])
assert len(tmp[2])==256,"Tag 'ColorMap' should be three arrays of 256 elements, found %d elements" % len(tmp[2])
except __HOLE__:
print "Tag 'ColorMap' has the wrong shape of 3 arrays of 256 elements each"
return
print "Tag Read: SUCCESS" | TypeError | dataset/ETHPy150Open pearu/pylibtiff/libtiff/libtiff_ctypes.py/_test_tags_read |
7,001 | @record
def test_message_bytes_fails(self):
# Arrange
queue_name = self._get_queue_reference()
# Action.
try:
message = b'xyz'
self.qs.put_message(queue_name, message)
self.fail('Passing binary to text encoder should fail.')
except __HOLE__ as e:
self.assertTrue(str(e).startswith('message should be of type'))
# Asserts | TypeError | dataset/ETHPy150Open Azure/azure-storage-python/tests/test_queue_encodings.py/StorageQueueEncodingTest.test_message_bytes_fails |
7,002 | @record
def test_message_text_fails(self):
# Arrange
qs2 = self._create_storage_service(QueueService, self.settings)
qs2.encode_function = QueueMessageFormat.binary_base64encode
qs2.decode_function = QueueMessageFormat.binary_base64decode
queue_name = self._get_queue_reference()
# Action.
try:
message = u'xyz'
qs2.put_message(queue_name, message)
self.fail('Passing text to binary encoder should fail.')
except __HOLE__ as e:
self.assertEqual(str(e), 'message should be of type bytes.')
# Asserts | TypeError | dataset/ETHPy150Open Azure/azure-storage-python/tests/test_queue_encodings.py/StorageQueueEncodingTest.test_message_text_fails |
7,003 | @record
def test_message_base64_decode_fails(self):
# Arrange
qs2 = self._create_storage_service(QueueService, self.settings)
qs2.encode_function = QueueMessageFormat.text_xmlencode
qs2.decode_function = QueueMessageFormat.binary_base64decode
queue_name = self._create_queue()
message = u'xyz'
qs2.put_message(queue_name, message)
# Action.
try:
qs2.get_messages(queue_name)
self.fail('Decoding unicode string as base64 should fail.')
except __HOLE__ as e:
self.assertEqual(str(e), 'message is not a valid base64 value.')
# Asserts
#------------------------------------------------------------------------------ | ValueError | dataset/ETHPy150Open Azure/azure-storage-python/tests/test_queue_encodings.py/StorageQueueEncodingTest.test_message_base64_decode_fails |
7,004 | def read_hdf(path_or_buf, key=None, **kwargs):
""" read from the store, close it if we opened it
Retrieve pandas object stored in file, optionally based on where
criteria
Parameters
----------
path_or_buf : path (string), or buffer to read from
key : group identifier in the store. Can be omitted a HDF file contains
a single pandas object.
where : list of Term (or convertable) objects, optional
start : optional, integer (defaults to None), row number to start
selection
stop : optional, integer (defaults to None), row number to stop
selection
columns : optional, a list of columns that if not None, will limit the
return columns
iterator : optional, boolean, return an iterator, default False
chunksize : optional, nrows to include in iteration, return an iterator
Returns
-------
The selected object
"""
# grab the scope
if 'where' in kwargs:
kwargs['where'] = _ensure_term(kwargs['where'], scope_level=1)
if isinstance(path_or_buf, string_types):
try:
exists = os.path.exists(path_or_buf)
# if filepath is too long
except (TypeError, __HOLE__):
exists = False
if not exists:
raise IOError('File %s does not exist' % path_or_buf)
# can't auto open/close if we are using an iterator
# so delegate to the iterator
store = HDFStore(path_or_buf, **kwargs)
auto_close = True
elif isinstance(path_or_buf, HDFStore):
if not path_or_buf.is_open:
raise IOError('The HDFStore must be open for reading.')
store = path_or_buf
auto_close = False
else:
raise NotImplementedError('Support for generic buffers has not been '
'implemented.')
try:
if key is None:
keys = store.keys()
if len(keys) != 1:
raise ValueError('key must be provided when HDF file contains '
'multiple datasets.')
key = keys[0]
return store.select(key, auto_close=auto_close, **kwargs)
except:
# if there is an error, close the store
try:
store.close()
except:
pass
raise | ValueError | dataset/ETHPy150Open pydata/pandas/pandas/io/pytables.py/read_hdf |
7,005 | def __init__(self, path, mode=None, complevel=None, complib=None,
fletcher32=False, **kwargs):
try:
import tables # noqa
except __HOLE__ as ex: # pragma: no cover
raise ImportError('HDFStore requires PyTables, "{ex}" problem '
'importing'.format(ex=str(ex)))
if complib not in (None, 'blosc', 'bzip2', 'lzo', 'zlib'):
raise ValueError("complib only supports 'blosc', 'bzip2', lzo' "
"or 'zlib' compression.")
self._path = path
if mode is None:
mode = 'a'
self._mode = mode
self._handle = None
self._complevel = complevel
self._complib = complib
self._fletcher32 = fletcher32
self._filters = None
self.open(mode=mode, **kwargs) | ImportError | dataset/ETHPy150Open pydata/pandas/pandas/io/pytables.py/HDFStore.__init__ |
7,006 | def convert(self, values, nan_rep, encoding):
"""set the data from this selection (and convert to the correct dtype
if we can)
"""
# values is a recarray
if values.dtype.fields is not None:
values = values[self.cname]
self.set_data(values)
# use the meta if needed
meta = _ensure_decoded(self.meta)
# convert to the correct dtype
if self.dtype is not None:
dtype = _ensure_decoded(self.dtype)
# reverse converts
if dtype == u('datetime64'):
# recreate with tz if indicated
self.data = _set_tz(self.data, self.tz, coerce=True)
elif dtype == u('timedelta64'):
self.data = np.asarray(self.data, dtype='m8[ns]')
elif dtype == u('date'):
try:
self.data = np.asarray(
[date.fromordinal(v) for v in self.data], dtype=object)
except __HOLE__:
self.data = np.asarray(
[date.fromtimestamp(v) for v in self.data],
dtype=object)
elif dtype == u('datetime'):
self.data = np.asarray(
[datetime.fromtimestamp(v) for v in self.data],
dtype=object)
elif meta == u('category'):
# we have a categorical
categories = self.metadata
self.data = Categorical.from_codes(self.data.ravel(),
categories=categories,
ordered=self.ordered)
else:
try:
self.data = self.data.astype(dtype, copy=False)
except:
self.data = self.data.astype('O', copy=False)
# convert nans / decode
if _ensure_decoded(self.kind) == u('string'):
self.data = _unconvert_string_array(
self.data, nan_rep=nan_rep, encoding=encoding)
return self | ValueError | dataset/ETHPy150Open pydata/pandas/pandas/io/pytables.py/DataCol.convert |
7,007 | def write_array(self, key, value, items=None):
if key in self.group:
self._handle.remove_node(self.group, key)
# Transform needed to interface with pytables row/col notation
empty_array = self._is_empty_array(value.shape)
transposed = False
if com.is_categorical_dtype(value):
raise NotImplementedError("cannot store a category dtype")
if not empty_array:
value = value.T
transposed = True
if self._filters is not None:
atom = None
try:
# get the atom for this datatype
atom = _tables().Atom.from_dtype(value.dtype)
except __HOLE__:
pass
if atom is not None:
# create an empty chunked array and fill it from value
if not empty_array:
ca = self._handle.create_carray(self.group, key, atom,
value.shape,
filters=self._filters)
ca[:] = value
getattr(self.group, key)._v_attrs.transposed = transposed
else:
self.write_array_empty(key, value)
return
if value.dtype.type == np.object_:
# infer the type, warn if we have a non-string type here (for
# performance)
inferred_type = lib.infer_dtype(value.ravel())
if empty_array:
pass
elif inferred_type == 'string':
pass
else:
try:
items = list(items)
except:
pass
ws = performance_doc % (inferred_type, key, items)
warnings.warn(ws, PerformanceWarning, stacklevel=7)
vlarr = self._handle.create_vlarray(self.group, key,
_tables().ObjectAtom())
vlarr.append(value)
else:
if empty_array:
self.write_array_empty(key, value)
else:
if com.is_datetime64_dtype(value.dtype):
self._handle.create_array(
self.group, key, value.view('i8'))
getattr(
self.group, key)._v_attrs.value_type = 'datetime64'
elif com.is_datetime64tz_dtype(value.dtype):
# store as UTC
# with a zone
self._handle.create_array(self.group, key,
value.asi8)
node = getattr(self.group, key)
node._v_attrs.tz = _get_tz(value.tz)
node._v_attrs.value_type = 'datetime64'
elif com.is_timedelta64_dtype(value.dtype):
self._handle.create_array(
self.group, key, value.view('i8'))
getattr(
self.group, key)._v_attrs.value_type = 'timedelta64'
else:
self._handle.create_array(self.group, key, value)
getattr(self.group, key)._v_attrs.transposed = transposed | ValueError | dataset/ETHPy150Open pydata/pandas/pandas/io/pytables.py/GenericFixed.write_array |
7,008 | def validate_multiindex(self, obj):
"""validate that we can store the multi-index; reset and return the
new object
"""
levels = [l if l is not None else "level_{0}".format(i)
for i, l in enumerate(obj.index.names)]
try:
return obj.reset_index(), levels
except __HOLE__:
raise ValueError("duplicate names/columns in the multi-index when "
"storing as a table") | ValueError | dataset/ETHPy150Open pydata/pandas/pandas/io/pytables.py/Table.validate_multiindex |
7,009 | def create_axes(self, axes, obj, validate=True, nan_rep=None,
data_columns=None, min_itemsize=None, **kwargs):
""" create and return the axes
leagcy tables create an indexable column, indexable index,
non-indexable fields
Parameters:
-----------
axes: a list of the axes in order to create (names or numbers of
the axes)
obj : the object to create axes on
validate: validate the obj against an existing object already
written
min_itemsize: a dict of the min size for a column in bytes
nan_rep : a values to use for string column nan_rep
encoding : the encoding for string values
data_columns : a list of columns that we want to create separate to
allow indexing (or True will force all columns)
"""
# set the default axes if needed
if axes is None:
try:
axes = _AXES_MAP[type(obj)]
except:
raise TypeError("cannot properly create the storer for: "
"[group->%s,value->%s]"
% (self.group._v_name, type(obj)))
# map axes to numbers
axes = [obj._get_axis_number(a) for a in axes]
# do we have an existing table (if so, use its axes & data_columns)
if self.infer_axes():
existing_table = self.copy()
existing_table.infer_axes()
axes = [a.axis for a in existing_table.index_axes]
data_columns = existing_table.data_columns
nan_rep = existing_table.nan_rep
self.encoding = existing_table.encoding
self.info = copy.copy(existing_table.info)
else:
existing_table = None
# currently support on ndim-1 axes
if len(axes) != self.ndim - 1:
raise ValueError(
"currently only support ndim-1 indexers in an AppendableTable")
# create according to the new data
self.non_index_axes = []
self.data_columns = []
# nan_representation
if nan_rep is None:
nan_rep = 'nan'
self.nan_rep = nan_rep
# create axes to index and non_index
index_axes_map = dict()
for i, a in enumerate(obj.axes):
if i in axes:
name = obj._AXIS_NAMES[i]
index_axes_map[i] = _convert_index(
a, self.encoding, self.format_type
).set_name(name).set_axis(i)
else:
# we might be able to change the axes on the appending data if
# necessary
append_axis = list(a)
if existing_table is not None:
indexer = len(self.non_index_axes)
exist_axis = existing_table.non_index_axes[indexer][1]
if append_axis != exist_axis:
# ahah! -> reindex
if sorted(append_axis) == sorted(exist_axis):
append_axis = exist_axis
# the non_index_axes info
info = _get_info(self.info, i)
info['names'] = list(a.names)
info['type'] = a.__class__.__name__
self.non_index_axes.append((i, append_axis))
# set axis positions (based on the axes)
self.index_axes = [
index_axes_map[a].set_pos(j).update_info(self.info)
for j, a in enumerate(axes)
]
j = len(self.index_axes)
# check for column conflicts
if validate:
for a in self.axes:
a.maybe_set_size(min_itemsize=min_itemsize)
# reindex by our non_index_axes & compute data_columns
for a in self.non_index_axes:
obj = _reindex_axis(obj, a[0], a[1])
def get_blk_items(mgr, blocks):
return [mgr.items.take(blk.mgr_locs) for blk in blocks]
# figure out data_columns and get out blocks
block_obj = self.get_object(obj).consolidate()
blocks = block_obj._data.blocks
blk_items = get_blk_items(block_obj._data, blocks)
if len(self.non_index_axes):
axis, axis_labels = self.non_index_axes[0]
data_columns = self.validate_data_columns(
data_columns, min_itemsize)
if len(data_columns):
mgr = block_obj.reindex_axis(
Index(axis_labels).difference(Index(data_columns)),
axis=axis
)._data
blocks = list(mgr.blocks)
blk_items = get_blk_items(mgr, blocks)
for c in data_columns:
mgr = block_obj.reindex_axis([c], axis=axis)._data
blocks.extend(mgr.blocks)
blk_items.extend(get_blk_items(mgr, mgr.blocks))
# reorder the blocks in the same order as the existing_table if we can
if existing_table is not None:
by_items = dict([(tuple(b_items.tolist()), (b, b_items))
for b, b_items in zip(blocks, blk_items)])
new_blocks = []
new_blk_items = []
for ea in existing_table.values_axes:
items = tuple(ea.values)
try:
b, b_items = by_items.pop(items)
new_blocks.append(b)
new_blk_items.append(b_items)
except:
raise ValueError(
"cannot match existing table structure for [%s] on "
"appending data" % ','.join(pprint_thing(item) for
item in items))
blocks = new_blocks
blk_items = new_blk_items
# add my values
self.values_axes = []
for i, (b, b_items) in enumerate(zip(blocks, blk_items)):
# shape of the data column are the indexable axes
klass = DataCol
name = None
# we have a data_column
if (data_columns and len(b_items) == 1 and
b_items[0] in data_columns):
klass = DataIndexableCol
name = b_items[0]
self.data_columns.append(name)
# make sure that we match up the existing columns
# if we have an existing table
if existing_table is not None and validate:
try:
existing_col = existing_table.values_axes[i]
except:
raise ValueError("Incompatible appended table [%s] with "
"existing table [%s]"
% (blocks, existing_table.values_axes))
else:
existing_col = None
try:
col = klass.create_for_block(
i=i, name=name, version=self.version)
col.set_atom(block=b, block_items=b_items,
existing_col=existing_col,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
encoding=self.encoding,
info=self.info,
**kwargs)
col.set_pos(j)
self.values_axes.append(col)
except (__HOLE__, ValueError, TypeError) as e:
raise e
except Exception as detail:
raise Exception(
"cannot find the correct atom type -> "
"[dtype->%s,items->%s] %s"
% (b.dtype.name, b_items, str(detail))
)
j += 1
# validate our min_itemsize
self.validate_min_itemsize(min_itemsize)
# validate our metadata
self.validate_metadata(existing_table)
# validate the axes if we have an existing table
if validate:
self.validate(existing_table) | NotImplementedError | dataset/ETHPy150Open pydata/pandas/pandas/io/pytables.py/Table.create_axes |
7,010 | def generate(self, where):
""" where can be a : dict,list,tuple,string """
if where is None:
return None
q = self.table.queryables()
try:
return Expr(where, queryables=q, encoding=self.table.encoding)
except __HOLE__:
# raise a nice message, suggesting that the user should use
# data_columns
raise ValueError(
"The passed where expression: {0}\n"
" contains an invalid variable reference\n"
" all of the variable refrences must be a "
"reference to\n"
" an axis (e.g. 'index' or 'columns'), or a "
"data_column\n"
" The currently defined references are: {1}\n"
.format(where, ','.join(q.keys()))
) | NameError | dataset/ETHPy150Open pydata/pandas/pandas/io/pytables.py/Selection.generate |
7,011 | def process_response(self, request, response):
"""
If request.session was modified, or if the configuration is to save the
session every time, save the changes and set a session cookie or delete
the session cookie if the session has been emptied.
"""
try:
accessed = request.session.accessed
modified = request.session.modified
empty = request.session.is_empty()
except __HOLE__:
pass
else:
# First check if we need to delete this cookie.
# The session should be deleted only if the session is entirely empty
if settings.SESSION_COOKIE_NAME in request.COOKIES and empty:
response.delete_cookie(settings.SESSION_COOKIE_NAME, domain=settings.SESSION_COOKIE_DOMAIN)
else:
if accessed:
patch_vary_headers(response, ('Cookie',))
if (modified or settings.SESSION_SAVE_EVERY_REQUEST) and not empty:
if request.session.get_expire_at_browser_close():
max_age = None
expires = None
else:
max_age = request.session.get_expiry_age()
expires_time = time.time() + max_age
expires = cookie_date(expires_time)
# Save the session data and refresh the client cookie.
# Skip session save for 500 responses, refs #3881.
if response.status_code != 500:
try:
request.session.save()
except UpdateError:
# The user is now logged out; redirecting to same
# page will result in a redirect to the login page
# if required.
return redirect(request.path)
response.set_cookie(
settings.SESSION_COOKIE_NAME,
request.session.session_key, max_age=max_age,
expires=expires, domain=settings.SESSION_COOKIE_DOMAIN,
path=settings.SESSION_COOKIE_PATH,
secure=settings.SESSION_COOKIE_SECURE or None,
httponly=settings.SESSION_COOKIE_HTTPONLY or None,
)
return response | AttributeError | dataset/ETHPy150Open django/django/django/contrib/sessions/middleware.py/SessionMiddleware.process_response |
7,012 | def ajax_list(self):
field_name = request.args.get('field')
prev_page = 0
next_page = 0
try:
models = path_to_models(self.model, field_name)
except __HOLE__:
data = []
else:
field = self.model._meta.fields[field_name]
rel_model = models.pop()
rel_field = rel_model._meta.fields[self.foreign_key_lookups[field_name]]
query = rel_model.select().order_by(rel_field)
query_string = request.args.get('query')
if query_string:
query = query.where(rel_field ** ('%%%s%%' % query_string))
pq = PaginatedQuery(query, self.filter_paginate_by)
current_page = pq.get_page()
if current_page > 1:
prev_page = current_page - 1
if current_page < pq.get_pages():
next_page = current_page + 1
data = []
# if the field is nullable, include the "None" option at the top of the list
if field.null:
data.append({'id': '__None', 'repr': 'None'})
data.extend([{'id': obj.get_id(), 'repr': unicode(obj)} for obj in pq.get_list()])
json_data = json.dumps({'prev_page': prev_page, 'next_page': next_page, 'object_list': data})
return Response(json_data, mimetype='application/json') | AttributeError | dataset/ETHPy150Open coleifer/flask-peewee/flask_peewee/admin.py/ModelAdmin.ajax_list |
7,013 | def get_model_field(self, model, field):
try:
attr = getattr(model, field)
except AttributeError:
model_admin = self.admin[type(model)]
try:
attr = getattr(model_admin, field)
except __HOLE__:
raise AttributeError('Could not find attribute or method '
'named "%s".' % field)
else:
return attr(model)
else:
if callable(attr):
attr = attr()
return attr | AttributeError | dataset/ETHPy150Open coleifer/flask-peewee/flask_peewee/admin.py/AdminTemplateHelper.get_model_field |
7,014 | def get_verbose_name(self, model, column_name):
try:
field = model._meta.fields[column_name]
except __HOLE__:
return self.fix_underscores(column_name)
else:
return field.verbose_name | KeyError | dataset/ETHPy150Open coleifer/flask-peewee/flask_peewee/admin.py/AdminTemplateHelper.get_verbose_name |
7,015 | def uuid_or_400(f):
@wraps(f)
def wrapper(request, *args, **kwds):
try:
uuid.UUID(args[0])
except __HOLE__:
return HttpResponseBadRequest()
return f(request, *args, **kwds)
return wrapper | ValueError | dataset/ETHPy150Open healthchecks/healthchecks/hc/api/decorators.py/uuid_or_400 |
7,016 | def check_api_key(f):
@wraps(f)
def wrapper(request, *args, **kwds):
try:
data = json.loads(request.body.decode("utf-8"))
except __HOLE__:
return make_error("could not parse request body")
api_key = str(data.get("api_key", ""))
if api_key == "":
return make_error("wrong api_key")
try:
user = User.objects.get(profile__api_key=api_key)
except User.DoesNotExist:
return make_error("wrong api_key")
request.json = data
request.user = user
return f(request, *args, **kwds)
return wrapper | ValueError | dataset/ETHPy150Open healthchecks/healthchecks/hc/api/decorators.py/check_api_key |
7,017 | def _get_repo_names(project_list):
# take a list of project names, like ['anchor', 'barbican'], get the
# corresponding repos for each. Return a dictionary with the project
# as the key and the repo as the value.
project_repos = {key: None for key in project_list}
yaml_data = get_yaml("{0}{1}".format(BASE_URL, PATH_PROJECT_LIST))
for project in yaml_data:
try:
# if one of the projects we're looking for is listed as a
# deliverable for this project, look for the first listed repo
# for that deliverable
for deliverable in yaml_data[project]['deliverables']:
if deliverable in project_list:
# the deliverable name is the project we're looking for,
# store the listed repo name for it
project_repos[deliverable] = (yaml_data[project]
['deliverables']
[deliverable]['repos'][0])
except (KeyError, __HOLE__):
# improperly formatted entry, keep going
pass
return project_repos | IndexError | dataset/ETHPy150Open openstack/bandit/tools/openstack_coverage.py/_get_repo_names |
7,018 | def clone_projects(project_list):
# clone all of the projects, return the directory name they are cloned in
project_locations = _get_repo_names(project_list)
orig_dir = os.path.abspath(os.getcwd())
# create directory for projects
try:
dir_name = 'project-source-{}'.format(datetime.datetime.utcnow().
strftime('%Y-%m-%d-%H-%M-%S'))
os.mkdir(dir_name)
os.chdir(dir_name)
except __HOLE__:
print("Unable to create directory for cloning projects")
return None
for project in project_locations:
print '=' * len(TITLE)
print("Cloning project: {} from repo {} into {}".
format(project, project_locations[project], dir_name))
try:
subprocess.check_call(['git', 'clone',
GIT_BASE + project_locations[project]])
except subprocess.CalledProcessError:
print("Unable to clone project from repo: {}".
format(project_locations[project]))
os.chdir(orig_dir)
return os.path.abspath(dir_name) | OSError | dataset/ETHPy150Open openstack/bandit/tools/openstack_coverage.py/clone_projects |
7,019 | def run_bandit(source_dir):
# go through each source directory in the directory which contains source,
# run Bandit with the established tox job, save results
orig_dir = os.path.abspath(os.getcwd())
try:
fail_results_dir = os.path.abspath('fail_results')
os.mkdir(fail_results_dir)
except __HOLE__:
print ("Unable to make results directory")
os.chdir(source_dir)
run_success = {}
for d in os.listdir(os.getcwd()):
os.chdir(d)
print '=' * len(TITLE)
print 'Running tox Bandit in directory {}'.format(d)
try:
subprocess.check_output(['tox', '-e', 'bandit'],
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as exc:
run_success[d] = False
# write log containing the process output
fail_log_path = fail_results_dir + '/' + d
with open(fail_log_path, 'w') as f:
f.write(exc.output)
print("Bandit tox failed, wrote failure log to {}".
format(fail_log_path))
else:
run_success[d] = True
os.chdir(source_dir)
os.chdir(orig_dir)
return run_success | OSError | dataset/ETHPy150Open openstack/bandit/tools/openstack_coverage.py/run_bandit |
7,020 | def principal():
print('(principal) iniciando...')
co = corrotina()
print('(principal) invocando next(co)...')
next(co)
print('(principal) invocando co.send(88)...')
co.send(88)
try:
print('(principal) invocando co.send(99)...')
co.send(99)
# o print a seguir nunca vai acontecer
print('(principal) invocado co.send(99)')
except __HOLE__:
print('(principal) a corotina nao tem mais valores a produzir') | StopIteration | dataset/ETHPy150Open fluentpython/example-code/attic/control/exemplo0.py/principal |
7,021 | def handle(self, *args, **options):
quiet = options.get('quiet', False)
if not quiet:
self.stdout.write("Export of DMS MDT's command called.\n")
# Try and Fetch all mdts
manager = MetaDataTemplateManager()
mdts = manager.get_all_mdts()
# Validating all OK
if not mdts:
if not quiet:
self.stderr.write('DMS has no MDT-s.')
return
for mdt_link in mdts.itervalues():
filename = mdt_link['mdt_id']
mdt_instance = manager.get_mdts_by_name([filename, ])
filename += '.json'
file_obj = open(os.path.join(filename), 'w+')
file_obj.seek(0)
mdt_obj = mdt_instance['1']
try:
del mdt_obj["doc_type"]
except __HOLE__:
pass
mdt_jsoned = json.dumps(mdt_obj, sort_keys=True, indent=4)
file_obj.write(mdt_jsoned)
if not quiet:
self.stderr.write('Exported to file: %s\n'% filename)
if not quiet:
self.stdout.write("Exporting %s MDT's" % str(len(mdts)))
self.stdout.write('\n') | KeyError | dataset/ETHPy150Open adlibre/Adlibre-DMS/adlibre_dms/couchapps/mdtcouch/management/commands/export_mdts.py/Command.handle |
7,022 | def tearDown(self):
os.chdir(self.test_dir)
try:
self._rmrf('locale/%s' % LOCALE)
except __HOLE__:
pass
os.chdir(self._cwd) | OSError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/tests/regressiontests/makemessages/extraction.py/ExtractorTests.tearDown |
7,023 | def tearDown(self):
super(SymlinkExtractorTests, self).tearDown()
os.chdir(self.test_dir)
try:
os.remove(self.symlinked_dir)
except __HOLE__:
pass
os.chdir(self._cwd) | OSError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/tests/regressiontests/makemessages/extraction.py/SymlinkExtractorTests.tearDown |
7,024 | def _url_for_fetch(self, mapping):
try:
return mapping['pre_processed_url']
except __HOLE__:
return mapping['raw_url'] | KeyError | dataset/ETHPy150Open openelections/openelections-core/openelex/us/az/datasource.py/Datasource._url_for_fetch |
7,025 | def unplug_device(conf, device):
orig_log_fail_as_error = device.get_log_fail_as_error()
device.set_log_fail_as_error(False)
try:
device.link.delete()
except __HOLE__:
device.set_log_fail_as_error(orig_log_fail_as_error)
# Maybe the device is OVS port, so try to delete
ovs = ovs_lib.BaseOVS()
bridge_name = ovs.get_bridge_for_iface(device.name)
if bridge_name:
bridge = ovs_lib.OVSBridge(bridge_name)
bridge.delete_port(device.name)
else:
LOG.debug('Unable to find bridge for device: %s', device.name)
finally:
device.set_log_fail_as_error(orig_log_fail_as_error) | RuntimeError | dataset/ETHPy150Open openstack/neutron/neutron/cmd/netns_cleanup.py/unplug_device |
7,026 | @staticmethod
def create_repo(cwd=TEMP_DIR):
try:
shell_out(["git", "init"], cwd=cwd)
return True
except (__HOLE__, CalledProcessError):
return False | OSError | dataset/ETHPy150Open hayd/pep8radius/tests/util_vcs.py/MixinGit.create_repo |
7,027 | @staticmethod
def successfully_commit_files(file_names,
commit="initial_commit",
cwd=TEMP_DIR):
try:
shell_out(["git", "add"] + file_names, cwd=cwd)
shell_out(["git", "commit", "-m", commit], cwd=cwd)
with from_dir(cwd):
return Git().current_branch()
except (__HOLE__, CalledProcessError):
return False | OSError | dataset/ETHPy150Open hayd/pep8radius/tests/util_vcs.py/MixinGit.successfully_commit_files |
7,028 | @staticmethod
def create_repo(cwd=TEMP_DIR):
try:
shell_out(["hg", "init"], cwd=cwd)
return True
except (__HOLE__, CalledProcessError):
return False | OSError | dataset/ETHPy150Open hayd/pep8radius/tests/util_vcs.py/MixinHg.create_repo |
7,029 | @staticmethod
def successfully_commit_files(file_names,
commit="initial_commit",
cwd=TEMP_DIR):
try:
shell_out(["hg", "add"] + file_names, cwd=cwd)
shell_out(["hg", "commit", "-m", commit], cwd=cwd)
with from_dir(cwd):
return Hg().current_branch()
except (__HOLE__, CalledProcessError):
return False | OSError | dataset/ETHPy150Open hayd/pep8radius/tests/util_vcs.py/MixinHg.successfully_commit_files |
7,030 | @staticmethod
def create_repo(cwd=TEMP_DIR):
try:
shell_out(["bzr", "init"], cwd=cwd)
return True
except (__HOLE__, CalledProcessError):
return False | OSError | dataset/ETHPy150Open hayd/pep8radius/tests/util_vcs.py/MixinBzr.create_repo |
7,031 | @staticmethod
def successfully_commit_files(file_names,
commit="initial_commit",
cwd=TEMP_DIR):
try:
shell_out(["bzr", "add"] + file_names, cwd=cwd)
shell_out(["bzr", "commit", "-m", commit], cwd=cwd)
with from_dir(cwd):
return Bzr().current_branch()
except (__HOLE__, CalledProcessError):
return False | OSError | dataset/ETHPy150Open hayd/pep8radius/tests/util_vcs.py/MixinBzr.successfully_commit_files |
7,032 | def delete_topic(topic):
if topic == 'General':
print('Default topic "General" cannot be removed.')
exit()
try:
del d[topic]
print('Removed "{}" from your library.'.format(topic))
except __HOLE__:
print('"{}" is not in your library!'.format(topic))
exit()
# if __name__ == '__main__':
# for l in read('News'):
# print(l)
# remove_link('http://rt.com/rss/')
# add_link('http://rt.com/rss/', 'News')
# for l in read('News'):
# print(l) | KeyError | dataset/ETHPy150Open iamaziz/TermFeed/termfeed/dbop.py/delete_topic |
7,033 | def generate_doc(config):
docdir = os.path.join(cwd,'documentation')
if not os.path.exists(docdir):
docdir = os.path.join(cwd,'..','documentation')
if not os.path.exists(docdir):
print "Couldn't find documentation file at: %s" % docdir
return None
try:
import markdown2 as markdown
except __HOLE__:
import markdown
documentation = []
for file in os.listdir(docdir):
if file in ignoreFiles or os.path.isdir(os.path.join(docdir, file)):
continue
md = open(os.path.join(docdir,file)).read()
html = markdown.markdown(md)
documentation.append({file:html});
return documentation | ImportError | dataset/ETHPy150Open jaraen/TiAccurateTimer/iphone/build.py/generate_doc |
7,034 | def _format_prefixes(subnetpool):
try:
return '\n'.join(pool for pool in subnetpool['prefixes'])
except (TypeError, __HOLE__):
return subnetpool['prefixes'] | KeyError | dataset/ETHPy150Open openstack/python-neutronclient/neutronclient/neutron/v2_0/subnetpool.py/_format_prefixes |
7,035 | def _posix_shell(self, chan, raw=True, initial_input=None):
"""
Create a loop which redirects sys.stdin/stdout into this channel.
The loop ends when channel.recv() returns 0.
Code inspired by the Paramiko interactive demo.
"""
result = []
password_sent = False
# Set terminal in raw mode
if raw:
context = raw_mode(self.pty.stdin)
else:
context = contextlib.nested()
assert self.pty.set_ssh_channel_size
with context:
# Make channel non blocking.
chan.settimeout(0.0)
# When initial input has been given, send this first
if initial_input:
time.sleep(0.2) # Wait a very short while for the channel to be initialized, before sending.
chan.send(initial_input)
reading_from_stdin = True
# Read/write loop
while True:
# Don't wait for any input when an exit status code has been
# set already. (But still wait for the output to finish.)
if chan.status_event.isSet():
reading_from_stdin = False
# When the channel is closed, and there's nothing to read
# anymore. We can return what we got from Paramiko. (Not
# sure why this happens. Most of the time, select() still
# returns and chan.recv() returns an empty string, but when
# read_ready is False, select() doesn't return anymore.)
if chan.closed and not chan.in_buffer.read_ready():
break
channels = [self.pty.stdin, chan] if reading_from_stdin else [chan]
r, w, e = select(channels, [], [], 1)
# Note the select-timeout. That is required in order to
# check for the status_event every second.
# Receive stream
if chan in r:
try:
x = chan.recv(1024)
# Received length 0 -> end of stream
if len(x) == 0:
break
# Write received characters to stdout and flush
while True:
try:
self.pty.stdout.write(x)
break
except IOError as e:
# Sometimes, when we have a lot of output, we get here:
# IOError: [Errno 11] Resource temporarily unavailable
# Just waiting a little, and retrying seems to work.
# See also: deployer.run.socket_client for a similar issue.
time.sleep(0.2)
self.pty.stdout.flush()
# Also remember received output.
# We want to return what's written to stdout.
result.append(x)
# Do we need to send the sudo password? (It's when the
# magic prompt has been detected in the stream) Note
# that we only monitor the last part of 'result', it's
# a bit fuzzy, but works.
if not password_sent and self.magic_sudo_prompt in ''.join(result[-32:]):
chan.send(self.password)
chan.send('\n')
password_sent = True
except socket.timeout:
pass
# Send stream (one by one character)
# (use 'elif', read stdin only when there is no more output to be received.)
elif self.pty.stdin in r:
try:
# Make stdin non-blocking. (The select call already
# blocked for us, we want sys.stdin.read() to read
# as many bytes as possible without blocking.)
try:
fdesc.setNonBlocking(self.pty.stdin)
x = self.pty.stdin.read(1024)
finally:
# Set stdin blocking again
# (Writing works better in blocking mode.
# Especially OS X seems to be very sensitive if we
# write lange amounts [>1000 bytes] nonblocking to
# stdout. That causes a lot of IOErrors.)
fdesc.setBlocking(self.pty.stdin)
# We receive \n from stdin, but \r is required to
# send. (Until now, the only place where the
# difference became clear is in redis-cli, which
# only accepts \r as confirmation.)
x = x.replace('\n', '\r')
except __HOLE__ as e:
# What to do with IOError exceptions?
# (we see what happens in the next select-call.)
continue
# Received length 0
# There's no more at stdin to read.
if len(x) == 0:
# However, we should go on processing the input
# from the remote end, until the process finishes
# there (because it was done or processed Ctrl-C or
# Ctrl-D/EOF.)
#
# The end of the input stream happens when we are
# using StringIO at the client side, and we're not
# attached to a real pseudo terminal. (For
# unit-testing, or background commands.)
reading_from_stdin = False
continue
# Write to channel
chan.send(x)
# Not sure about this. Sometimes, when pasting large data
# in the command line, the many concecutive read or write
# commands will make Paramiko hang somehow... (This was
# the case, when still using a blocking pty.stdin.read(1)
# instead of a non-blocking readmany.
time.sleep(0.01)
return ''.join(result)
# =====[ SFTP operations ]==== | IOError | dataset/ETHPy150Open jonathanslenders/python-deployer/deployer/host/base.py/Host._posix_shell |
7,036 | def specification(self, of, defaults=True, via=None):
"""Build our internal specification of the target callable.
Optionally, pre-populate the callbacks for --help/-h and --version/-V.
"""
cmd = Bunch(target=of)
cmd.doc = partitionhelp(getdoc(cmd.target)) if getdoc(cmd.target) else None
cmd.cls = isclass(cmd.target)
cmd.fn = isfunction(cmd.target)
cmd.method = ismethod(cmd.target)
cmd.callable = cmd.target.__init__ if cmd.cls else cmd.target
try:
cmd.spec = getargspec(cmd.callable)
except __HOLE__:
# __init__ of built-in class, such as object
cmd.spec = Bunch(args=['self'], varargs=None, keywords=None, defaults=None)
cmd.trans = dict((i.replace('_', '-'), i) for i in cmd.spec.args)
cmd.positional = cmd.spec.args[:len(cmd.spec.args)-len(cmd.spec.defaults or [])]
cmd.named = cmd.spec.args[len(cmd.spec.args)-len(cmd.spec.defaults or []):]
cmd.defaults = dict((i, j) for i, j in zip(reversed(cmd.spec.args), reversed(cmd.spec.defaults or [])))
cmd.instance = None
cmd.indexed = cmd.spec.varargs
cmd.keyed = cmd.spec.keywords
cmd.docs = dict()
cmd.parent = via
if cmd.cls or cmd.method:
cmd.positional = cmd.positional[1:]
cmd.range = (len(cmd.positional), 65535 if cmd.spec.varargs else len(cmd.positional))
cast = dict()
short = dict()
callbacks = dict()
if defaults:
cmd.named.extend(('help', 'version'))
callbacks.update(help=self.help, version=self.version)
short.update(h='help', V='version')
cast.update(help=boolean, version=boolean)
for name in sorted(cmd.defaults):
# Determine typecasting information.
if isinstance(cmd.defaults[name], bool): cast[name] = boolean
elif isinstance(cmd.defaults[name], (list, tuple)): cast[name] = array
elif cmd.defaults[name] is not None: cast[name] = type(cmd.defaults[name])
# Determine abbreviations.
for char in "".join(i for j in zip(name, name.upper()) for i in j):
if char in short: continue
short[char] = name
break
cmd.cast = cast
cmd.short = short
cmd.callbacks = callbacks
return cmd | TypeError | dataset/ETHPy150Open marrow/script/marrow/script/core.py/Parser.specification |
7,037 | def update (self, pairs):
try:
for key, value in pairs.iteritems ():
if not self.has_key (key):
self._keys.append (key)
except __HOLE__:
for pair in pairs:
if not self.has_key (pair[0]):
self._keys.append (pair[0])
dict.update (self, pairs) | AttributeError | dataset/ETHPy150Open dotskapes/dotSkapes/modules/geo/utils.py/ordered_dict.update |
7,038 | @staticmethod
def chooseBoard(dap_class=DAPAccess, blocking=True, return_first=False,
board_id=None, target_override=None, frequency=1000000,
init_board=True):
"""
Allow you to select a board among all boards connected
"""
all_mbeds = MbedBoard.getAllConnectedBoards(dap_class, False, blocking,
target_override, frequency)
# If a board ID is specified close all other boards
if board_id != None:
new_mbed_list = []
for mbed in all_mbeds:
if mbed.unique_id == (board_id):
new_mbed_list.append(mbed)
else:
mbed.link.close()
assert len(new_mbed_list) <= 1
all_mbeds = new_mbed_list
# Return if no boards are connected
if all_mbeds == None or len(all_mbeds) <= 0:
if board_id is None:
print("No connected boards")
else:
print("Board %s is not connected" % board_id)
return None # No boards to close so it is safe to return
# Select first board and close others if True
if return_first:
for i in range(1, len(all_mbeds)):
all_mbeds[i].link.close()
all_mbeds = all_mbeds[0:1]
# Ask use to select boards if there is more than 1 left
if len(all_mbeds) > 1:
index = 0
print "id => usbinfo | boardname"
for mbed in all_mbeds:
print "%d => %s" % (index, mbed.getInfo().encode('ascii', 'ignore'))
index += 1
while True:
print "input id num to choice your board want to connect"
line = sys.stdin.readline()
valid = False
try:
ch = int(line)
valid = 0 <= ch < len(all_mbeds)
except __HOLE__:
pass
if not valid:
logging.info("BAD CHOICE: %s", line)
index = 0
for mbed in all_mbeds:
print "%d => %s" % (index, mbed.getInfo())
index += 1
else:
break
# close all others mbed connected
for mbed in all_mbeds:
if mbed != all_mbeds[ch]:
mbed.link.close()
all_mbeds = all_mbeds[ch:ch + 1]
assert len(all_mbeds) == 1
mbed = all_mbeds[0]
if init_board:
try:
mbed.init()
except:
mbed.link.close()
raise
return mbed | ValueError | dataset/ETHPy150Open mbedmicro/pyOCD/pyOCD/board/mbed_board.py/MbedBoard.chooseBoard |
7,039 | def parse(self, response):
if 'news.ycombinator.com' in response.url:
soup = bs(response.body)
items = [(x[0].text, x[0].get('href')) for x in
filter(None, [
x.findChildren() for x in
soup.findAll('td', {'class':'title'})
])]
for item in items:
print item
news_item = NewsItem()
news_item['title'] = item[0]
news_item['url'] = item[1]
try:
yield Request(item[1], callback=self.parse)
except __HOLE__:
yield Request('http://news.ycombinator.com/' + item[1], callback=self.parse)
yield news_item
else:
sha1_response = hashlib.sha1(response.url).hexdigest()
folder = PATH + '/' + sha1_response
if not os.path.exists(folder):
os.makedirs(folder)
with open(folder + '/index.html', 'w+') as file_obj:
file_obj.write(response.body) | ValueError | dataset/ETHPy150Open mvanveen/hncrawl/news/spiders/hnspider.py/HnspiderSpider.parse |
7,040 | def mangle_test_address(address):
path, possible_open_bracket, params = address.partition('[')
names = path.split("::")
try:
names.remove('()')
except __HOLE__:
pass
# convert file path to dotted path
names[0] = names[0].replace("/", '.')
names[0] = _py_ext_re.sub("", names[0])
# put any params back
names[-1] += possible_open_bracket + params
return names | ValueError | dataset/ETHPy150Open pytest-dev/pytest/_pytest/junitxml.py/mangle_test_address |
7,041 | def capture(self, webcam):
""" Capture an image from a webcam
Given a webcam, this attempts to capture an image using the subprocess
command. Also creates a thumbnail of the image
Args:
webcam (dict): Entry for the webcam. Example::
{
'name': 'Pier West',
'port': '/dev/video0',
'params': {
'rotate': 270
},
}
The values for the `params` key will be passed directly to fswebcam
"""
assert isinstance(webcam, dict)
self.logger.debug("Capturing image for {}...".format(webcam.get('name')))
# Filename to save
camera_name = webcam.get('port').split('/')[-1]
# Create the directory for storing images
webcam_dir = self.config['directories'].get('webcam')
timestamp = current_time().isot
date_dir = timestamp.split('T')[0].replace('-', '')
try:
os.makedirs("{}/{}".format(webcam_dir, date_dir), exist_ok=True)
except __HOLE__ as err:
self.logger.warning("Cannot create new dir: {} \t {}".format(date_dir, err))
# Output file names
out_file = '{}/{}/{}_{}.jpeg'.format(webcam_dir, date_dir, camera_name, timestamp)
# We also create a thumbnail and always link it to the same image
# name so that it is always current.
thumbnail_file = '{}/tn_{}.jpeg'.format(webcam_dir, camera_name)
options = self.base_params
if 'params' in webcam:
for opt, val in webcam.get('params').items():
options += "--{}={}".format(opt, val)
# Assemble all the parameters
params = " -d {} --title \"{}\" {} --save {} --scale {} {}".format(
webcam.get('port'),
webcam.get('name'),
options,
out_file,
self._thumbnail_resolution,
thumbnail_file
)
# Actually call the command.
# NOTE: This is a blocking call (within this process). See `start_capturing`
try:
self.logger.debug("Webcam subproccess command: {} {}".format(self.cmd, params))
with open(os.devnull, 'w') as devnull:
retcode = subprocess.call(self.cmd + params, shell=True, stdout=devnull, stderr=devnull)
if retcode < 0:
self.logger.warning(
"Image captured terminated for {}. Return code: {} \t Error: {}".format(
webcam.get('name'),
retcode,
sys.stderr
)
)
else:
self.logger.debug("Image captured for {}".format(webcam.get('name')))
# Static files (always points to most recent)
static_out_file = '{}/{}.jpeg'.format(webcam_dir, camera_name)
# Symlink the latest image
if os.path.lexists(static_out_file):
os.remove(static_out_file)
os.symlink(out_file, static_out_file)
return retcode
except OSError as e:
self.logger.warning("Execution failed:".format(e, file=sys.stderr)) | OSError | dataset/ETHPy150Open panoptes/POCS/panoptes/environment/webcams.py/Webcams.capture |
7,042 | def start_capturing(self):
""" Starts the capturing loop for all cameras
Depending on the number of frames taken for an individual image, capturing can
take up to ~30 sec.
"""
self.is_capturing = True
for process in self._processes:
self.logger.info("Staring webcam capture loop for process {}".format(process.name))
try:
process.start()
except __HOLE__:
self.logger.info("Can't start, trying to run")
process.run() | AssertionError | dataset/ETHPy150Open panoptes/POCS/panoptes/environment/webcams.py/Webcams.start_capturing |
7,043 | @property
def nicedir(self):
try:
from os.path import relpath, isabs
except __HOLE__:
return self.dir
else:
a = self.dir
r = relpath(self.dir)
if isabs(a):
home = os.environ["HOME"]
if a == home:
a = "~"
elif a.startswith(home + os.sep):
a = "~" + a[len(home):]
if len(r) < len(a):
return r
else:
return a | ImportError | dataset/ETHPy150Open trentm/sources/lib/sources.py/Source.nicedir |
7,044 | def _run(argv, cwd=None):
log.debug("run '%s'", ' '.join(argv))
try:
return subprocess.check_call(argv, cwd=cwd)
except __HOLE__:
_, err, _ = sys.exc_info()
import errno
if err.errno == errno.ENOENT:
raise OSError(errno.ENOENT, "'%s' not found" % argv[0])
else:
raise | OSError | dataset/ETHPy150Open trentm/sources/lib/sources.py/_run |
7,045 | def _parse_time(self, time):
try:
return int(time)
except __HOLE__:
return None | ValueError | dataset/ETHPy150Open rbarrois/mpdlcd/mpdlcd/mpdwrapper.py/MPDClient._parse_time |
7,046 | def google(q):
query = quote(q)
url = "https://encrypted.google.com/search?q={0}".format(query)
soup = BeautifulSoup(requests.get(url).text, "html5lib")
answer = soup.findAll("h3", attrs={"class": "r"})
if not answer:
return ":crying_cat_face: Sorry, google doesn't have an answer for you :crying_cat_face:"
try:
return unquote(re.findall(r"q=(.*?)&", str(answer[0]))[0])
except __HOLE__:
# in this case there is a first answer without a link, which is a
# google response! Let's grab it and display it to the user.
return ' '.join(answer[0].stripped_strings) | IndexError | dataset/ETHPy150Open llimllib/limbo/limbo/plugins/google.py/google |
7,047 | def run(req):
# tell the client helper the current taskid
stackhut.req_id = req['req_id']
iface_name, func_name = req['method'].split('.')
params = req['params']
if iface_name in SERVICES:
iface_impl = SERVICES[iface_name]
try:
func = getattr(iface_impl, func_name)
except __HOLE__:
return gen_error(-32601)
# iface_impl.preRequest()
# result = func(*params) if params else func()
# iface_impl.postRequest()
try:
result = func(*params) if params else func()
except stackhut.ServiceError as e:
return gen_error(-32002, e.msg, e.data)
return dict(result=result)
else:
return gen_error(-32601) | AttributeError | dataset/ETHPy150Open StackHut/stackhut/stackhut_toolkit/res/shims/python/runner.py/run |
7,048 | def to_python(self, value):
if not value or value == 'None':
return None
if isinstance(value, Geoposition):
return value
if isinstance(value, list):
return Geoposition(value[0], value[1])
# default case is string
value_parts = value.rsplit(',')
try:
latitude = value_parts[0]
except IndexError:
latitude = '0.0'
try:
longitude = value_parts[1]
except __HOLE__:
longitude = '0.0'
return Geoposition(latitude, longitude) | IndexError | dataset/ETHPy150Open philippbosch/django-geoposition/geoposition/fields.py/GeopositionField.to_python |
7,049 | def get(self, key, *args):
try:
return self[key]
except __HOLE__:
if args:
return args[0]
raise | KeyError | dataset/ETHPy150Open pyjs/pyjs/pyjs/options.py/Mappings.Defaults.get |
7,050 | def __getitem__(self, key, exc=KeyError):
try:
return self._cache[key]
except __HOLE__:
raise exc(key) | KeyError | dataset/ETHPy150Open pyjs/pyjs/pyjs/options.py/Mappings.__getitem__ |
7,051 | def __setitem__(self, key, kwds):
if not key:
raise TypeError('Malformed name.')
n = 'opt'
if key.isupper():
n = 'grp'
new = '_%s' % n
sig = '_%s_sig' % n
sig_hash = '_%s_sig_hash' % n
try:
None in kwds
except TypeError:
raise TypeError('Must pass list or dict.')
else:
try:
kwds[None]
except __HOLE__:
kwds= dict(zip(getattr(self, sig), kwds))
except KeyError:
pass
if set(kwds.keys()) != getattr(self, sig_hash):
raise TypeError('Malformed signature.')
getattr(self, new)(key, **kwds) | TypeError | dataset/ETHPy150Open pyjs/pyjs/pyjs/options.py/Mappings.__setitem__ |
7,052 | def get(self, key, *args):
try:
return self[key]
except __HOLE__:
if args:
return args[0]
raise | KeyError | dataset/ETHPy150Open pyjs/pyjs/pyjs/options.py/Mappings.get |
7,053 | def runWorkflowQuery(config, vistrail=None, version=None, fromTime=None,
toTime=None, user=None, offset=0, limit=100, modules=[], thumbs=None):
# returns list of workflows:
# (vistrail name, vistrail id, id, name, date, user, thumb)
result = []
db = open_db_connection(config)
select_part = \
"""SELECT DISTINCT v.name, v.id, w.parent_id, a1.value,
action.date, action.user"""
from_part = \
"""FROM workflow w"""
# "tag name" exist in workflow table but may have been changed
# so we use value from the vistrail __tag__ annotation
where_part = \
"""WHERE w.entity_type='workflow'"""
limit_part = 'LIMIT %s, %s' % (int(offset), int(limit))
if vistrail:
try:
where_part += " AND v.id=%s" % int(vistrail)
except ValueError:
where_part += " AND v.name=%s" % \
db.escape(vistrail, get_db_lib().converters.conversions)
if version:
try:
where_part += " AND w.parent_id=%s" % int(version)
except __HOLE__:
where_part += " AND a1.value=%s" % \
db.escape(version, get_db_lib().converters.conversions)
if fromTime:
where_part += " AND w.last_modified>%s" % \
db.escape(fromTime, get_db_lib().converters.conversions)
if toTime:
where_part += " AND w.last_modified<%s" % \
db.escape(toTime, get_db_lib().converters.conversions)
if user:
where_part += " AND action.user=%s" % \
db.escape(user, get_db_lib().converters.conversions)
next_port = 1
old_alias = None
for i, module, connected in zip(range(1,len(modules)+1), *zip(*modules)):
module = module.lower()
alias = "m%s"%i
from_part += \
""" JOIN module {0} ON
({0}.parent_id=w.id AND {0}.entity_type=w.entity_type AND
{0}.name={1})
""".format(alias,
db.escape(module, get_db_lib().converters.conversions))
if connected:
p1_alias, p2_alias=("port%s"%next_port), ("port%s"%(next_port+1))
next_port += 2
from_part += \
""" JOIN port {0} ON
({0}.entity_id=w.id AND {0}.entity_type=w.entity_type AND
{0}.moduleId={1}.id AND {0}.type='source')""".format(
p1_alias, old_alias)
from_part += \
""" JOIN port {0} ON
({0}.entity_id=w.id AND {0}.entity_type=w.entity_type AND
{0}.moduleId={1}.id AND {0}.type='destination' AND
{0}.parent_id = {2}.parent_id)""".format(
p2_alias, alias, p1_alias)
old_alias = alias
from_part += \
""" JOIN vistrail v ON w.vistrail_id = v.id JOIN
action ON action.entity_id=w.vistrail_id AND
action.id=w.parent_id LEFT JOIN
action_annotation a1 ON
a1.entity_id=w.vistrail_id AND
a1.action_id=w.parent_id AND
(a1.akey='__tag__' OR a1.akey IS NULL)"""
if thumbs:
select_part += ', t.image_bytes'
from_part += """ LEFT JOIN action_annotation a2 ON
(a2.entity_id=w.vistrail_id AND
a2.action_id=w.parent_id AND
(a2.akey='__thumb__' OR
a2.akey IS NULL)) LEFT JOIN
thumbnail t ON a2.value=t.file_name"""
else:
select_part += ', NULL'
command = ' '.join([select_part, from_part, where_part, limit_part]) + ';'
#print command
try:
c = db.cursor()
c.execute(command)
rows = c.fetchall()
result = rows
c.close()
except get_db_lib().Error, e:
msg = "Couldn't perform query on db (%d : %s)" % \
(e.args[0], e.args[1])
raise VistrailsDBException(msg)
# count all rows when offset = 0
if 0 == offset:
select_part = 'SELECT count(0)'
command = ' '.join([select_part,from_part,where_part]) +';'
#print command
try:
c = db.cursor()
c.execute(command)
res = c.fetchall()
result= (result, res[0][0])
c.close()
except get_db_lib().Error, e:
msg = "Couldn't perform query on db (%d : %s)" % \
(e.args[0], e.args[1])
raise VistrailsDBException(msg)
close_db_connection(db)
return result | ValueError | dataset/ETHPy150Open VisTrails/VisTrails/vistrails/db/services/query.py/runWorkflowQuery |
7,054 | def runLogQuery(config, vistrail=None, version=None, fromTime=None, toTime=None,
user=None, completed=None, offset=0, limit=100, modules=[],
thumbs=None):
# returns list of workflow executions:
# (vistrail name, vistrail id, log id, workflow id, workflow name,
# execution id, start time, end time, user, completed, thumb)
result = []
db = open_db_connection(config)
select_part = \
"""SELECT DISTINCT v.name, v.id, w.entity_id,
w.parent_version, a1.value, w.id,
w.ts_start, w.ts_end, w.user, w.completed"""
from_part = \
"""FROM workflow_exec w JOIN
log_tbl l ON (l.id = w.entity_id) JOIN
vistrail v ON (l.vistrail_id = v.id) LEFT JOIN
action_annotation a1 ON (a1.entity_id=v.id AND
a1.action_id=w.parent_version)"""
where_part = \
"""WHERE w.parent_type='vistrail' AND
w.entity_type='log' AND
(a1.akey='__tag__' OR a1.akey IS NULL)"""
limit_part = 'LIMIT %s, %s' % (int(offset), int(limit))
if vistrail:
try:
where_part += " AND v.id=%s" % int(vistrail)
except __HOLE__:
where_part += " AND v.name=%s" % \
db.escape(vistrail, get_db_lib().converters.conversions)
if version:
try:
where_part += " AND w.parent_version=%s" % int(version)
except ValueError:
where_part += " AND a1.value=%s" % \
db.escape(version, get_db_lib().converters.conversions)
if fromTime:
where_part += " AND w.ts_end>%s" % \
db.escape(fromTime, get_db_lib().converters.conversions)
if toTime:
where_part += " AND w.ts_start<%s" % \
db.escape(toTime, get_db_lib().converters.conversions)
if user:
where_part += " AND w.user=%s" % \
db.escape(user, get_db_lib().converters.conversions)
completed_dict = {'no':0, 'yes':1, 'ok':1}
if completed is not None:
try:
int(completed)
except ValueError:
completed = completed_dict.get(str(completed).lower(), -1)
where_part += " AND w.completed=%s" % completed
if thumbs:
select_part += ', t.image_bytes'
from_part += """ LEFT JOIN action_annotation a2 ON
(a2.entity_id=v.id AND
a2.action_id=w.parent_version) LEFT JOIN
thumbnail t ON a2.value=t.file_name"""
where_part += " AND (a2.akey='__thumb__' OR a2.akey IS NULL)"
else:
select_part += ', NULL'
# TODO nested module executions are not detected
for i, module, mCompleted in zip(range(1,len(modules)+1), *zip(*modules)):
alias = "m%s"%i
from_part += \
""" JOIN module_exec %s ON
(%s.parent_id=w.id AND
%s.entity_id=w.entity_id AND
%s.entity_type=w.entity_type)
""".replace('%s', alias)
where_part += \
""" AND %s.parent_type='workflow_exec'
AND %s.module_name=%s """ % (alias, alias,
db.escape(module.lower(), get_db_lib().converters.conversions) )
if mCompleted is not None:
mCompleted = completed_dict.get(str(mCompleted).lower(), -1)
where_part += """ AND %s.completed=%s""" % (alias, mCompleted)
command = ' '.join([select_part, from_part, where_part, limit_part]) + ';'
#print command
try:
c = db.cursor()
c.execute(command)
rows = c.fetchall()
result = rows
c.close()
except get_db_lib().Error, e:
msg = "Couldn't perform query on db (%d : %s)" % \
(e.args[0], e.args[1])
raise VistrailsDBException(msg)
# count all rows when offset = 0
if 0 == offset:
select_part = 'SELECT count(0)'
command = ' '.join([select_part,from_part,where_part]) +';'
#print command
try:
c = db.cursor()
c.execute(command)
res = c.fetchall()
result= (result, res[0][0])
c.close()
except get_db_lib().Error, e:
msg = "Couldn't perform query on db (%d : %s)" % \
(e.args[0], e.args[1])
raise VistrailsDBException(msg)
close_db_connection(db)
return result | ValueError | dataset/ETHPy150Open VisTrails/VisTrails/vistrails/db/services/query.py/runLogQuery |
7,055 | def collapse_addresses(addresses):
"""Collapse a list of IP objects.
Example:
collapse_addresses([IPv4Network('192.0.2.0/25'),
IPv4Network('192.0.2.128/25')]) ->
[IPv4Network('192.0.2.0/24')]
Args:
addresses: An iterator of IPv4Network or IPv6Network objects.
Returns:
An iterator of the collapsed IPv(4|6)Network objects.
Raises:
TypeError: If passed a list of mixed version objects.
"""
i = 0
addrs = []
ips = []
nets = []
# split IP addresses and networks
for ip in addresses:
if isinstance(ip, _BaseAddress):
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
ips.append(ip)
elif ip._prefixlen == ip._max_prefixlen:
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
try:
ips.append(ip.ip)
except __HOLE__:
ips.append(ip.network_address)
else:
if nets and nets[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, nets[-1]))
nets.append(ip)
# sort and dedup
ips = sorted(set(ips))
nets = sorted(set(nets))
while i < len(ips):
(first, last) = _find_address_range(ips[i:])
i = ips.index(last) + 1
addrs.extend(summarize_address_range(first, last))
return iter(_collapse_addresses_recursive(sorted(
addrs + nets, key=_BaseNetwork._get_networks_key))) | AttributeError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/collapse_addresses |
7,056 | def __eq__(self, other):
try:
return (self._ip == other._ip
and self._version == other._version)
except __HOLE__:
return NotImplemented | AttributeError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/_BaseAddress.__eq__ |
7,057 | def __eq__(self, other):
try:
return (self._version == other._version and
self.network_address == other.network_address and
int(self.netmask) == int(other.netmask))
except __HOLE__:
return NotImplemented | AttributeError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/_BaseNetwork.__eq__ |
7,058 | def _ip_int_from_string(self, ip_str):
"""Turn the given IP string into an integer for comparison.
Args:
ip_str: A string, the IP ip_str.
Returns:
The IP ip_str as an integer.
Raises:
AddressValueError: if ip_str isn't a valid IPv4 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
octets = ip_str.split('.')
if len(octets) != 4:
raise AddressValueError("Expected 4 octets in %r" % ip_str)
try:
return int.from_bytes(map(self._parse_octet, octets), 'big')
except __HOLE__ as exc:
raise AddressValueError("%s in %r" % (exc, ip_str)) | ValueError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/_BaseV4._ip_int_from_string |
7,059 | def _is_valid_netmask(self, netmask):
"""Verify that the netmask is valid.
Args:
netmask: A string, either a prefix or dotted decimal
netmask.
Returns:
A boolean, True if the prefix represents a valid IPv4
netmask.
"""
mask = netmask.split('.')
if len(mask) == 4:
try:
for x in mask:
if int(x) not in self._valid_mask_octets:
return False
except ValueError:
# Found something that isn't an integer or isn't valid
return False
for idx, y in enumerate(mask):
if idx > 0 and y > mask[idx - 1]:
return False
return True
try:
netmask = int(netmask)
except __HOLE__:
return False
return 0 <= netmask <= self._max_prefixlen | ValueError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/_BaseV4._is_valid_netmask |
7,060 | def _is_hostmask(self, ip_str):
"""Test if the IP string is a hostmask (rather than a netmask).
Args:
ip_str: A string, the potential hostmask.
Returns:
A boolean, True if the IP string is a hostmask.
"""
bits = ip_str.split('.')
try:
parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
except __HOLE__:
return False
if len(parts) != len(bits):
return False
if parts[0] < parts[-1]:
return True
return False | ValueError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/_BaseV4._is_hostmask |
7,061 | def __eq__(self, other):
address_equal = IPv4Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except __HOLE__:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False | AttributeError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/IPv4Interface.__eq__ |
7,062 | def __lt__(self, other):
address_less = IPv4Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return self.network < other.network
except __HOLE__:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False | AttributeError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/IPv4Interface.__lt__ |
7,063 | def _ip_int_from_string(self, ip_str):
"""Turn an IPv6 ip_str into an integer.
Args:
ip_str: A string, the IPv6 ip_str.
Returns:
An int, the IPv6 address
Raises:
AddressValueError: if ip_str isn't a valid IPv6 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
parts = ip_str.split(':')
# An IPv6 address needs at least 2 colons (3 parts).
_min_parts = 3
if len(parts) < _min_parts:
msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
raise AddressValueError(msg)
# If the address has an IPv4-style suffix, convert it to hexadecimal.
if '.' in parts[-1]:
try:
ipv4_int = IPv4Address(parts.pop())._ip
except AddressValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
parts.append('%x' % (ipv4_int & 0xFFFF))
# An IPv6 address can't have more than 8 colons (9 parts).
# The extra colon comes from using the "::" notation for a single
# leading or trailing zero part.
_max_parts = self._HEXTET_COUNT + 1
if len(parts) > _max_parts:
msg = "At most %d colons permitted in %r" % (_max_parts-1, ip_str)
raise AddressValueError(msg)
# Disregarding the endpoints, find '::' with nothing in between.
# This indicates that a run of zeroes has been skipped.
skip_index = None
for i in range(1, len(parts) - 1):
if not parts[i]:
if skip_index is not None:
# Can't have more than one '::'
msg = "At most one '::' permitted in %r" % ip_str
raise AddressValueError(msg)
skip_index = i
# parts_hi is the number of parts to copy from above/before the '::'
# parts_lo is the number of parts to copy from below/after the '::'
if skip_index is not None:
# If we found a '::', then check if it also covers the endpoints.
parts_hi = skip_index
parts_lo = len(parts) - skip_index - 1
if not parts[0]:
parts_hi -= 1
if parts_hi:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
parts_lo -= 1
if parts_lo:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_skipped = self._HEXTET_COUNT - (parts_hi + parts_lo)
if parts_skipped < 1:
msg = "Expected at most %d other parts with '::' in %r"
raise AddressValueError(msg % (self._HEXTET_COUNT-1, ip_str))
else:
# Otherwise, allocate the entire address to parts_hi. The
# endpoints could still be empty, but _parse_hextet() will check
# for that.
if len(parts) != self._HEXTET_COUNT:
msg = "Exactly %d parts expected without '::' in %r"
raise AddressValueError(msg % (self._HEXTET_COUNT, ip_str))
if not parts[0]:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_hi = len(parts)
parts_lo = 0
parts_skipped = 0
try:
# Now, parse the hextets into a 128-bit integer.
ip_int = 0
for i in range(parts_hi):
ip_int <<= 16
ip_int |= self._parse_hextet(parts[i])
ip_int <<= 16 * parts_skipped
for i in range(-parts_lo, 0):
ip_int <<= 16
ip_int |= self._parse_hextet(parts[i])
return ip_int
except __HOLE__ as exc:
raise AddressValueError("%s in %r" % (exc, ip_str)) | ValueError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/_BaseV6._ip_int_from_string |
7,064 | def __eq__(self, other):
address_equal = IPv6Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except __HOLE__:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False | AttributeError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/IPv6Interface.__eq__ |
7,065 | def __lt__(self, other):
address_less = IPv6Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return self.network < other.network
except __HOLE__:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False | AttributeError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/IPv6Interface.__lt__ |
7,066 | def _is_valid_netmask(self, prefixlen):
"""Verify that the netmask/prefixlen is valid.
Args:
prefixlen: A string, the netmask in prefix length format.
Returns:
A boolean, True if the prefix represents a valid IPv6
netmask.
"""
try:
prefixlen = int(prefixlen)
except __HOLE__:
return False
return 0 <= prefixlen <= self._max_prefixlen | ValueError | dataset/ETHPy150Open Exa-Networks/exabgp/lib/exabgp/dep/ipaddress.py/IPv6Network._is_valid_netmask |
7,067 | def parse_GECKO(cls, agentString):
"""
Attempt to parse the given User-Agent string as a Gecko-based browser's
user-agent.
"""
identifier = 'Gecko/'
start = agentString.find(identifier)
if start != -1:
end = agentString.find(' ', start)
if end == -1:
end = None
version = agentString[start + len(identifier):end]
try:
version = int(version)
except __HOLE__:
pass
else:
return cls(browsers.GECKO, (version,)) | ValueError | dataset/ETHPy150Open twisted/nevow/nevow/useragent.py/UserAgent.parse_GECKO |
7,068 | def parse_WEBKIT(cls, agentString):
"""
Attempt to parse the given User-Agent string as a WebKit-based
browser's user-agent.
"""
identifier = 'WebKit/'
start = agentString.find(identifier)
if start != -1:
end = start + len(identifier)
while (
end < len(agentString) and
agentString[end].isdigit() or
agentString[end] == '.'):
end += 1
version = agentString[start + len(identifier):end]
try:
version = map(int, version.split('.'))
except __HOLE__:
pass
else:
return cls(browsers.WEBKIT, tuple(version)) | ValueError | dataset/ETHPy150Open twisted/nevow/nevow/useragent.py/UserAgent.parse_WEBKIT |
7,069 | def parse_OPERA(cls, agentString):
"""
Attempt to parse an Opera user-agent.
"""
prefix = 'Opera/'
if agentString.startswith(prefix):
version = agentString[len(prefix):].split(None, 1)[0]
try:
version = map(int, version.split('.'))
except __HOLE__:
pass
else:
return cls(browsers.OPERA, tuple(version)) | ValueError | dataset/ETHPy150Open twisted/nevow/nevow/useragent.py/UserAgent.parse_OPERA |
7,070 | def parse_MSIE(cls, agentString):
"""
Attempt to parse an Internet Explorer user-agent.
"""
oldPrefix = 'Mozilla/4.0 (compatible; MSIE '
newPrefix = 'Mozilla/5.0 (compatible; MSIE '
for prefix in oldPrefix, newPrefix:
if agentString.startswith(prefix):
end = agentString.find(';', len(prefix))
if end == -1:
end = None
version = agentString[len(prefix):end]
try:
version = map(int, version.split('.'))
except __HOLE__:
pass
else:
return cls(browsers.INTERNET_EXPLORER, tuple(version)) | ValueError | dataset/ETHPy150Open twisted/nevow/nevow/useragent.py/UserAgent.parse_MSIE |
7,071 | def _Exists(self):
"""Returns true if the service exists."""
show_cmd = [AZURE_PATH,
'service',
'show',
'--json',
self.name]
stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True)
try:
json.loads(stdout)
except __HOLE__:
return False
return True | ValueError | dataset/ETHPy150Open GoogleCloudPlatform/PerfKitBenchmarker/perfkitbenchmarker/providers/azure/azure_virtual_machine.py/AzureService._Exists |
7,072 | def _Exists(self):
"""Returns true if the VM exists and attempts to get some data."""
show_cmd = [AZURE_PATH,
'vm',
'show',
'--json',
self.name]
stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True)
try:
json.loads(stdout)
except __HOLE__:
return False
return True | ValueError | dataset/ETHPy150Open GoogleCloudPlatform/PerfKitBenchmarker/perfkitbenchmarker/providers/azure/azure_virtual_machine.py/AzureVirtualMachine._Exists |
7,073 | def test_logistic_cg():
try:
import scipy
logistic_cg.cg_optimization_mnist(n_epochs=10)
except __HOLE__:
from nose.plugins.skip import SkipTest
raise SkipTest(
'SciPy not available. Needed for the logistic_cg example.') | ImportError | dataset/ETHPy150Open lisa-lab/DeepLearningTutorials/code/test.py/test_logistic_cg |
7,074 | def lower_global(self, name, value):
"""
1) Check global scope dictionary.
2) Check __builtins__.
2a) is it a dictionary (for non __main__ module)
2b) is it a module (for __main__ module)
"""
moddict = self.get_module_dict()
obj = self.pyapi.dict_getitem(moddict, self._freeze_string(name))
self.incref(obj) # obj is borrowed
try:
if value in _unsupported_builtins:
raise ForbiddenConstruct("builtins %s() is not supported"
% name, loc=self.loc)
except __HOLE__:
# `value` is unhashable, ignore
pass
if hasattr(builtins, name):
obj_is_null = self.is_null(obj)
bbelse = self.builder.basic_block
with self.builder.if_then(obj_is_null):
mod = self.pyapi.dict_getitem(moddict,
self._freeze_string("__builtins__"))
builtin = self.builtin_lookup(mod, name)
bbif = self.builder.basic_block
retval = self.builder.phi(self.pyapi.pyobj)
retval.add_incoming(obj, bbelse)
retval.add_incoming(builtin, bbif)
else:
retval = obj
with cgutils.if_unlikely(self.builder, self.is_null(retval)):
self.pyapi.raise_missing_global_error(name)
self.return_exception_raised()
return retval
# ------------------------------------------------------------------------- | TypeError | dataset/ETHPy150Open numba/numba/numba/objmode.py/PyLower.lower_global |
7,075 | def make_plot(benchmarks, title, adjustment):
class Style(DefaultStyle):
colors = ["#000000" if row["path"] else DefaultStyle.colors[1]
for row in benchmarks]
font_family = 'Consolas, "Deja Vu Sans Mono", "Bitstream Vera Sans Mono", "Courier New", monospace'
minimum = int(min(row["min"] * adjustment for row in benchmarks))
maximum = int(max(
min(row["max"], row["hd15iqr"]) * adjustment
for row in benchmarks
) + 1)
try:
import pygaljs
except __HOLE__:
opts = {}
else:
opts = {
"js": [
pygaljs.uri("2.0.x", "pygal-tooltips.js")
]
}
plot = CustomBox(
box_mode='tukey',
x_label_rotation=-90,
x_labels=["{0[name]}".format(row) for row in benchmarks],
show_legend=False,
title=title,
x_title="Trial",
y_title="Duration",
style=Style,
min_scale=20,
max_scale=20,
truncate_label=50,
range=(minimum, maximum),
zero=minimum,
css=[
"file://style.css",
"file://graph.css",
"""inline:
.tooltip .value {
font-size: 1em !important;
}
.axis text {
font-size: 9px !important;
}
"""
],
**opts
)
for row in benchmarks:
serie = [row[field] * adjustment for field in ["min", "ld15iqr", "q1", "median", "q3", "hd15iqr", "max"]]
serie.append(row["path"])
plot.add("{0[fullname]} - {0[rounds]} rounds".format(row), serie)
return plot | ImportError | dataset/ETHPy150Open ionelmc/pytest-benchmark/src/pytest_benchmark/histogram.py/make_plot |
7,076 | def copy_keys_except(dic, *keys):
"""Return a copy of the dict without the specified items.
"""
ret = dic.copy()
for key in keys:
try:
del ret[key]
except __HOLE__:
pass
return ret | KeyError | dataset/ETHPy150Open mikeorr/WebHelpers2/webhelpers2/containers.py/copy_keys_except |
7,077 | def del_keys(dic, *keys):
"""Delete several keys from a dict, ignoring those that don't exist.
This modifies the dict in place.
::
>>> d ={"A": 1, "B": 2, "C": 3}
>>> del_keys(d, "A", "C")
>>> d
{'B': 2}
"""
for key in keys:
try:
del dic[key]
except __HOLE__:
pass | KeyError | dataset/ETHPy150Open mikeorr/WebHelpers2/webhelpers2/containers.py/del_keys |
7,078 | def correlate_dicts(dicts, key):
"""Correlate several dicts under one superdict.
If you have several dicts each with a 'name' key, this
puts them in a container dict keyed by name.
Example::
>>> d1 = {"name": "Fred", "age": 41}
>>> d2 = {"name": "Barney", "age": 31}
>>> flintstones = correlate_dicts([d1, d2], "name")
>>> sorted(flintstones.keys())
['Barney', 'Fred']
>>> flintstones["Fred"]["age"]
41
If you're having trouble spelling this method correctly, remember:
"relate" has one 'l'. The 'r' is doubled because it occurs after a prefix.
Thus "correlate".
"""
ret = {}
i = 0
for d in dicts:
try:
my_key = d[key]
except __HOLE__:
msg = "'dicts' element %d contains no key '%s'"
tup = i, key
raise KeyError(msg % tup)
ret[my_key] = d
i += 1
return ret | KeyError | dataset/ETHPy150Open mikeorr/WebHelpers2/webhelpers2/containers.py/correlate_dicts |
7,079 | def correlate_objects(objects, attr):
"""Correlate several objects under one dict.
If you have several objects each with a 'name' attribute, this
puts them in a dict keyed by name.
Example::
>>> class Flintstone(DumbObject):
... pass
...
>>> fred = Flintstone(name="Fred", age=41)
>>> barney = Flintstone(name="Barney", age=31)
>>> flintstones = correlate_objects([fred, barney], "name")
>>> sorted(flintstones.keys())
['Barney', 'Fred']
>>> flintstones["Barney"].age
31
If you're having trouble spelling this method correctly, remember:
"relate" has one 'l'. The 'r' is doubled because it occurs after a prefix.
Thus "correlate".
"""
ret = {}
i = 0
for obj in objects:
try:
my_key = getattr(obj, attr)
except __HOLE__:
msg = "'%s' object at 'objects[%d]' contains no attribute '%s'"
tup = type(obj).__name__, i, attr
raise AttributeError(msg % tup)
ret[my_key] = obj
i += 1
return ret | AttributeError | dataset/ETHPy150Open mikeorr/WebHelpers2/webhelpers2/containers.py/correlate_objects |
7,080 | def run(self, context):
try:
self.output = self.device.execute(self.command, timeout=self.timeout, check_exit_code=False)
except __HOLE__:
self.device.killall('dhrystone')
raise | KeyboardInterrupt | dataset/ETHPy150Open ARM-software/workload-automation/wlauto/workloads/dhrystone/__init__.py/Dhrystone.run |
7,081 | def __init__(self, param_info, parent=None):
ColorChooserButton.__init__(self, parent)
try:
r,g,b = [int(float(i) * 255) for i in param_info.value.split(',')]
except __HOLE__:
r,g,b = (0.0, 0.0, 0.0)
self.setColor(QtGui.QColor(r,g,b))
self.setFixedHeight(22)
self.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Fixed) | ValueError | dataset/ETHPy150Open VisTrails/VisTrails/vistrails/gui/modules/paramexplore.py/PEColorChooserButton.__init__ |
7,082 | def __init__(self, file, align=True, bigendian=True, inclheader=False):
import struct
self.closed = False
self.align = align # whether to align to word (2-byte) boundaries
if bigendian:
strflag = '>'
else:
strflag = '<'
self.file = file
self.chunkname = file.read(4)
if len(self.chunkname) < 4:
raise EOFError
try:
self.chunksize = struct.unpack(strflag+'L', file.read(4))[0]
except struct.error:
raise EOFError
if inclheader:
self.chunksize = self.chunksize - 8 # subtract header
self.size_read = 0
try:
self.offset = self.file.tell()
except (AttributeError, __HOLE__):
self.seekable = False
else:
self.seekable = True | IOError | dataset/ETHPy150Open Southpaw-TACTIC/TACTIC/src/context/client/tactic-api-python-4.0.api04/Lib/chunk.py/Chunk.__init__ |
7,083 | def skip(self):
"""Skip the rest of the chunk.
If you are not interested in the contents of the chunk,
this method should be called so that the file points to
the start of the next chunk.
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
if self.seekable:
try:
n = self.chunksize - self.size_read
# maybe fix alignment
if self.align and (self.chunksize & 1):
n = n + 1
self.file.seek(n, 1)
self.size_read = self.size_read + n
return
except __HOLE__:
pass
while self.size_read < self.chunksize:
n = min(8192, self.chunksize - self.size_read)
dummy = self.read(n)
if not dummy:
raise EOFError | IOError | dataset/ETHPy150Open Southpaw-TACTIC/TACTIC/src/context/client/tactic-api-python-4.0.api04/Lib/chunk.py/Chunk.skip |
7,084 | def check_uid(val):
"""Return an uid, given a user value.
If the value is an integer, make sure it's an existing uid.
If the user value is unknown, raises a ValueError.
"""
if isinstance(val, six.integer_types):
try:
pwd.getpwuid(val)
return val
except (KeyError, OverflowError):
raise ValueError("%r isn't a valid user id" % val)
if not isinstance(val, str):
raise TypeError(val)
try:
return pwd.getpwnam(val).pw_uid
except __HOLE__:
raise ValueError("%r isn't a valid user val" % val) | KeyError | dataset/ETHPy150Open benoitc/gaffer/gaffer/util.py/check_uid |
7,085 | def check_gid(val):
"""Return a gid, given a group value
If the group value is unknown, raises a ValueError.
"""
if isinstance(val, int):
try:
grp.getgrgid(val)
return val
except (__HOLE__, OverflowError):
raise ValueError("No such group: %r" % val)
if not isinstance(val, str):
raise TypeError(val)
try:
return grp.getgrnam(val).gr_gid
except KeyError:
raise ValueError("No such group: %r" % val) | KeyError | dataset/ETHPy150Open benoitc/gaffer/gaffer/util.py/check_gid |
7,086 | def closerange(fd_low, fd_high): # NOQA
# Iterate through and close all file descriptors.
for fd in range(fd_low, fd_high):
try:
os.close(fd)
except __HOLE__: # ERROR, fd wasn't open to begin with (ignored)
pass
# http://www.svbug.com/documentation/comp.unix.programmer-FAQ/faq_2.html#SEC16 | OSError | dataset/ETHPy150Open benoitc/gaffer/gaffer/util.py/closerange |
7,087 | def parse_signal_value(sig):
if sig is None:
raise ValueError("invalid signal")
# value passed is a string
if isinstance(sig, six.string_types):
if sig.isdigit():
# if number in the string, try to parse it
try:
return int(sig)
except ValueError:
raise ValueError("invalid signal")
# else try to get the signal number from its name
signame = sig.upper()
if not signame.startswith('SIG'):
signame = "SIG%s" % signame
try:
signum = getattr(signal, signame)
except __HOLE__:
raise ValueError("invalid signal name")
return signum
# signal is a number, just return it
return sig | AttributeError | dataset/ETHPy150Open benoitc/gaffer/gaffer/util.py/parse_signal_value |
7,088 | def upgrade_websocket(self, environ, start_response):
"""
Attempt to upgrade the socket environ['wsgi.input'] into a websocket enabled connection.
"""
websocket_version = environ.get('HTTP_SEC_WEBSOCKET_VERSION', '')
if not websocket_version:
raise UpgradeRequiredError
elif websocket_version not in self.WS_VERSIONS:
raise HandshakeError('Unsupported WebSocket Version: {0}'.format(websocket_version))
key = environ.get('HTTP_SEC_WEBSOCKET_KEY', '').strip()
if not key:
raise HandshakeError('Sec-WebSocket-Key header is missing/empty')
try:
key_len = len(base64.b64decode(key))
except __HOLE__:
raise HandshakeError('Invalid key: {0}'.format(key))
if key_len != 16:
# 5.2.1 (3)
raise HandshakeError('Invalid key: {0}'.format(key))
sec_ws_accept = base64.b64encode(sha1(six.b(key) + self.WS_GUID).digest())
if six.PY3:
sec_ws_accept = sec_ws_accept.decode('ascii')
headers = [
('Upgrade', 'websocket'),
('Connection', 'Upgrade'),
('Sec-WebSocket-Accept', sec_ws_accept),
('Sec-WebSocket-Version', str(websocket_version))
]
if environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL') is not None:
headers.append(('Sec-WebSocket-Protocol', environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL')))
logger.debug('WebSocket request accepted, switching protocols')
start_response(force_str('101 Switching Protocols'), headers)
six.get_method_self(start_response).finish_content()
return WebSocket(environ['wsgi.input']) | TypeError | dataset/ETHPy150Open jrief/django-websocket-redis/ws4redis/django_runserver.py/WebsocketRunServer.upgrade_websocket |
7,089 | def testInvalidVisibleColumnIds(self):
t = TableGenerator.createTableWithDefaultContainer(3, 10)
try:
t.setVisibleColumns(['a', 'Property 2', 'Property 3'])
self.fail('IllegalArgumentException expected')
except __HOLE__:
pass # OK, expected
self.assertEquals(self._defaultColumns3, t.getVisibleColumns()) | ValueError | dataset/ETHPy150Open rwl/muntjac/muntjac/test/server/component/table/table_visible_columns.py/TableVisibleColumns.testInvalidVisibleColumnIds |
7,090 | def testDuplicateVisibleColumnIds(self):
t = TableGenerator.createTableWithDefaultContainer(3, 10)
try:
t.setVisibleColumns(['Property 0', 'Property 1', 'Property 2',
'Property 1'])
# FIXME: Multiple properties in the Object array should be detected
# (#6476)
#self.fail("IllegalArgumentException expected")
except __HOLE__:
pass # OK, expected
# FIXME: Multiple properties in the Object array should be detected
# (#6476)
# assertArrayEquals(defaultColumns3, t.getVisibleColumns()); | ValueError | dataset/ETHPy150Open rwl/muntjac/muntjac/test/server/component/table/table_visible_columns.py/TableVisibleColumns.testDuplicateVisibleColumnIds |
7,091 | def write_result(self, buf):
indent = 0
frame = self.frame
_classes = ['dataframe'] # Default class.
if self.classes is not None:
if isinstance(self.classes, str):
self.classes = self.classes.split()
if not isinstance(self.classes, (list, tuple)):
raise AssertionError('classes must be list or tuple, '
'not %s' % type(self.classes))
_classes.extend(self.classes)
if self.notebook:
div_style = ''
try:
import IPython
if IPython.__version__ < LooseVersion('3.0.0'):
div_style = ' style="max-width:1500px;overflow:auto;"'
except __HOLE__:
pass
self.write('<div{0}>'.format(div_style))
self.write('<table border="1" class="%s">' % ' '.join(_classes),
indent)
indent += self.indent_delta
indent = self._write_header(indent)
indent = self._write_body(indent)
self.write('</table>', indent)
if self.should_show_dimensions:
by = chr(215) if compat.PY3 else unichr(215) # ×
self.write(u('<p>%d rows %s %d columns</p>') %
(len(frame), by, len(frame.columns)))
if self.notebook:
self.write('</div>')
_put_lines(buf, self.elements) | ImportError | dataset/ETHPy150Open pydata/pandas/pandas/formats/format.py/HTMLFormatter.write_result |
7,092 | def detect_console_encoding():
"""
Try to find the most capable encoding supported by the console.
slighly modified from the way IPython handles the same issue.
"""
import locale
global _initial_defencoding
encoding = None
try:
encoding = sys.stdout.encoding or sys.stdin.encoding
except __HOLE__:
pass
# try again for something better
if not encoding or 'ascii' in encoding.lower():
try:
encoding = locale.getpreferredencoding()
except Exception:
pass
# when all else fails. this will usually be "ascii"
if not encoding or 'ascii' in encoding.lower():
encoding = sys.getdefaultencoding()
# GH3360, save the reported defencoding at import time
# MPL backends may change it. Make available for debugging.
if not _initial_defencoding:
_initial_defencoding = sys.getdefaultencoding()
return encoding | AttributeError | dataset/ETHPy150Open pydata/pandas/pandas/formats/format.py/detect_console_encoding |
7,093 | def ask_around(self, service_name, payload, operator=None):
"""Ask all handlers of a given service name, return list of answers.
Handlers connected through the optionally given operator are skipped,
so that partyline applications do not call themselves.
"""
answers = []
try:
service_handlers = self.handlers[service_name]
except __HOLE__:
if not self.ignore_missing_services:
raise NoSuchServiceName('No handler is registered for %r.' %
repr(service_name))
service_handlers = []
for handler in service_handlers:
if operator is not None and handler in operator.handlers:
# Skip handlers on the same operator, ask *others* for answer.
continue
try:
answers.append(handler(payload))
except HighAndDry:
continue
return answers | KeyError | dataset/ETHPy150Open rduplain/wsgi_party/wsgi_party.py/WSGIParty.ask_around |
7,094 | def _get_login_token(self, email, password):
"""
Passes email and password to base api and returns login token.
"""
auth_url = 'authentication.json'
params = urllib.urlencode({
'email': email,
'password': password,
})
try:
data = urllib2.urlopen(self.base_url + auth_url, params).read()
except urllib2.HTTPError, e:
return ("ERROR", "HTTP: %s" % str(e))
except urllib2.URLError, e:
return ("ERROR", "Error URL: %s" % str(e.reason.args[1]))
try:
dict_data = json.loads(data)
token = dict_data["authentication"]["token"]
except __HOLE__:
return ("ERROR", "Error: No Token Returned")
return ("SUCCESS", token)
##########################
# Accounts Functions
########################## | KeyError | dataset/ETHPy150Open npinger/base-crm-api-client/base_client.py/BaseAPIService._get_login_token |
7,095 | def update_deal_tags(self, deal_id, tags, action='add'):
"""
Adds, removes, or relplaces tags for a deal. Returns a json or xml response.
Arguments:
deal_id: The base id of the deal that we want to work with
tags: comma separated string of tags. Eg. 'platinum,trial_period'
action: one of the following: 'add', 'remove', 'replace'
"""
deal_data = self._get_deal(deal_id=deal_id, force_json=True)
deal_data_dict = json.loads(deal_data)
old_tags = deal_data_dict['deal']['deal_tags'].split(', ')
new_tags_list = tags.split(',')
if action == 'add':
new_tags = _list_to_tags(list(set(new_tags_list + old_tags)))
elif action == 'remove':
for elem in new_tags_list:
try:
old_tags.remove(elem)
except __HOLE__:
pass
new_tags = _list_to_tags(old_tags)
elif action == 'replace':
new_tags = _list_to_tags(new_tags_list)
return self.update_deal(deal_info={'deal_tags': new_tags}, deal_id=deal_id) | ValueError | dataset/ETHPy150Open npinger/base-crm-api-client/base_client.py/BaseAPIService.update_deal_tags |
7,096 | def update_contact_tags(self, contact_id, tags, action='add'):
"""
Adds, removes, or relplaces tags for a contact. Returns a json or xml response.
Arguments:
contact_id: The base id of the contact that we want to work with
tags: comma separated string of tags. Eg. 'platinum,trial_period'
action: one of the following: 'add', 'remove', 'replace'
"""
contact_data = self._get_contact(contact_id=contact_id, force_json=True)
contact_data_dict = json.loads(contact_data)
old_tags = contact_data_dict['contact']['tags_joined_by_comma'].split(', ')
new_tags_list = tags.split(',')
if action == 'add':
new_tags = _list_to_tags(list(set(new_tags_list + old_tags)))
elif action == 'remove':
for elem in new_tags_list:
try:
old_tags.remove(elem)
except __HOLE__:
pass
new_tags = _list_to_tags(old_tags)
elif action == 'replace':
new_tags = _list_to_tags(new_tags_list)
person = not contact_data_dict['contact']['is_organisation']
return self.update_contact(contact_info={'tag_list': new_tags}, contact_id=contact_id,
person=person) | ValueError | dataset/ETHPy150Open npinger/base-crm-api-client/base_client.py/BaseAPIService.update_contact_tags |
7,097 | def dispatch(self, parameterName, value):
"""
When called in dispatch, do the coerce for C{value} and save the
returned value.
"""
if value is None:
raise UsageError("Parameter '%s' requires an argument."
% (parameterName,))
try:
value = self.coerce(value)
except __HOLE__, e:
raise UsageError("Parameter type enforcement failed: %s" % (e,))
self.options.opts[parameterName] = value | ValueError | dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/python/usage.py/CoerceParameter.dispatch |
7,098 | def parseOptions(self, options=None):
"""
The guts of the command-line parser.
"""
if options is None:
options = sys.argv[1:]
# we really do need to place the shell completion check here, because
# if we used an opt_shell_completion method then it would be possible
# for other opt_* methods to be run first, and they could possibly
# raise validation errors which would result in error output on the
# terminal of the user performing shell completion. Validation errors
# would occur quite frequently, in fact, because users often initiate
# tab-completion while they are editing an unfinished command-line.
if len(options) > 1 and options[-2] == "--_shell-completion":
from twisted.python import _shellcomp
cmdName = path.basename(sys.argv[0])
_shellcomp.shellComplete(self, cmdName, options,
self._shellCompFile)
sys.exit(0)
try:
opts, args = getopt.getopt(options,
self.shortOpt, self.longOpt)
except getopt.error, e:
raise UsageError(str(e))
for opt, arg in opts:
if opt[1] == '-':
opt = opt[2:]
else:
opt = opt[1:]
optMangled = opt
if optMangled not in self.synonyms:
optMangled = opt.replace("-", "_")
if optMangled not in self.synonyms:
raise UsageError("No such option '%s'" % (opt,))
optMangled = self.synonyms[optMangled]
if isinstance(self._dispatch[optMangled], CoerceParameter):
self._dispatch[optMangled].dispatch(optMangled, arg)
else:
self._dispatch[optMangled](optMangled, arg)
if (getattr(self, 'subCommands', None)
and (args or self.defaultSubCommand is not None)):
if not args:
args = [self.defaultSubCommand]
sub, rest = args[0], args[1:]
for (cmd, short, parser, doc) in self.subCommands:
if sub == cmd or sub == short:
self.subCommand = cmd
self.subOptions = parser()
self.subOptions.parent = self
self.subOptions.parseOptions(rest)
break
else:
raise UsageError("Unknown command: %s" % sub)
else:
try:
self.parseArgs(*args)
except __HOLE__:
raise UsageError("Wrong number of arguments.")
self.postOptions() | TypeError | dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/twisted/python/usage.py/Options.parseOptions |
7,099 | def main():
parser = ArgumentParser(description='Download videos from some video platforms via http requests using youtube-dl', formatter_class=RawTextHelpFormatter)
parser.add_argument('-v', '--version', action='version', version=__version__)
parser.add_argument('-u', '--update', action='store_true', help='update or install youtube-dl and exit\n\n')
parser.add_argument('-s', '--server', type=str, default='localhost', help='select server (localhost by default)')
parser.add_argument('-p', '--port', type=int, default=49149, help='select server listening port (49149 by default)')
parser.add_argument('--verbose', action='store_true', help='show what the program is doing')
args = parser.parse_args()
try:
paths = Paths()
if (args.update):
update(paths, args.verbose)
else:
checkYoutubedl(paths, args.verbose)
runServer(args.server, args.port, args.verbose)
except __HOLE__:
sys.exit(0) | KeyboardInterrupt | dataset/ETHPy150Open r4mos/youtube-dl-simple-server/youtube_dl_simple_server/__init__.py/main |
Subsets and Splits