Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
4,700 |
def is_zipfile(filename):
"""Quickly see if file is a ZIP file by checking the magic number."""
try:
fpin = open(filename, "rb")
endrec = _EndRecData(fpin)
fpin.close()
if endrec:
return True # file has correct magic number
except __HOLE__:
pass
return False
|
IOError
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/zipfile.py/is_zipfile
|
4,701 |
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
if mode not in ("r", "w", "a"):
raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError,\
"Compression requires the (missing) zlib module"
else:
raise RuntimeError, "That compression method is not supported"
self._allowZip64 = allowZip64
self._didModify = False
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = key = mode.replace('b', '')[0]
self.pwd = None
self.comment = ''
# Check if we were passed a file-like object
if isinstance(file, basestring):
self._filePassed = 0
self.filename = file
modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'}
try:
self.fp = open(file, modeDict[mode])
except __HOLE__:
if mode == 'a':
mode = key = 'w'
self.fp = open(file, modeDict[mode])
else:
raise
else:
self._filePassed = 1
self.fp = file
self.filename = getattr(file, 'name', None)
if key == 'r':
self._GetContents()
elif key == 'w':
pass
elif key == 'a':
try: # See if file is a zip file
self._RealGetContents()
# seek to start of directory and overwrite
self.fp.seek(self.start_dir, 0)
except BadZipfile: # file is not a zip file, just append
self.fp.seek(0, 2)
else:
if not self._filePassed:
self.fp.close()
self.fp = None
raise RuntimeError, 'Mode must be "r", "w" or "a"'
|
IOError
|
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/zipfile.py/ZipFile.__init__
|
4,702 |
def _LookupDiskIndex(self, idx):
"""Looks up uuid or name of disk if necessary."""
try:
return int(idx)
except __HOLE__:
pass
for i, d in enumerate(self.cfg.GetInstanceDisks(self.instance.uuid)):
if d.name == idx or d.uuid == idx:
return i
raise errors.OpPrereqError("Lookup of disk %r failed" % idx)
|
ValueError
|
dataset/ETHPy150Open ganeti/ganeti/lib/cmdlib/instance_set_params.py/LUInstanceSetParams._LookupDiskIndex
|
4,703 |
def _ModifyDisk(self, idx, disk, params, _):
"""Modifies a disk.
"""
changes = []
if constants.IDISK_MODE in params:
disk.mode = params.get(constants.IDISK_MODE)
changes.append(("disk.mode/%d" % idx, disk.mode))
if constants.IDISK_NAME in params:
disk.name = params.get(constants.IDISK_NAME)
changes.append(("disk.name/%d" % idx, disk.name))
# Modify arbitrary params in case instance template is ext
for key, value in params.iteritems():
if (key not in constants.MODIFIABLE_IDISK_PARAMS and
disk.dev_type == constants.DT_EXT):
# stolen from GetUpdatedParams: default means reset/delete
if value.lower() == constants.VALUE_DEFAULT:
try:
del disk.params[key]
except __HOLE__:
pass
else:
disk.params[key] = value
changes.append(("disk.params:%s/%d" % (key, idx), value))
# Update disk object
self.cfg.Update(disk, self.feedback_fn)
return changes
|
KeyError
|
dataset/ETHPy150Open ganeti/ganeti/lib/cmdlib/instance_set_params.py/LUInstanceSetParams._ModifyDisk
|
4,704 |
def clean_acls(self, req):
if 'swift.clean_acl' in req.environ:
for header in ('x-container-read', 'x-container-write'):
if header in req.headers:
try:
req.headers[header] = \
req.environ['swift.clean_acl'](header,
req.headers[header])
except __HOLE__ as err:
return HTTPBadRequest(request=req, body=str(err))
return None
|
ValueError
|
dataset/ETHPy150Open openstack/swift/swift/proxy/controllers/container.py/ContainerController.clean_acls
|
4,705 |
def test_creation(self):
rd = self.create_domain(name='130', ip_type='4')
rd.save()
try:
ip = Ip(ip_str="130.193.1.2") # Forget the ip_type
ip.clean_ip()
except __HOLE__, e:
pass
self.assertEqual(ValidationError, type(e))
ip = Ip(ip_str="130.193.1.2", ip_type='4')
self.assertFalse(ip.ip_upper and ip.ip_lower and ip.reverse_domain)
ip.clean_ip()
self.assertTrue(ip.ip_upper == 0 and ip.ip_lower)
|
ValidationError
|
dataset/ETHPy150Open mozilla/inventory/mozdns/ip/tests.py/SimpleTest.test_creation
|
4,706 |
def subhelper():
# 8 calls
# 10 ticks total: 8 ticks local, 2 ticks in subfunctions
global ticks
ticks += 2
for i in range(2): # 0
try:
C().foo # 1 x 2
except __HOLE__:
ticks += 3 # 3 x 2
|
AttributeError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/test/test_profile.py/subhelper
|
4,707 |
@fbuild.db.caches
def guess_platform(ctx, arch=None):
"""L{guess_platform} returns a platform set that describes the various
features of the specified I{platform}. If I{platform} is I{None}, try to
determine which platform the system is and return that value. If the
platform cannot be determined, return I{None}."""
ctx.logger.check('determining platform')
if arch is None:
# If we're on Windows, then don't even try uname
if os.name == 'nt':
res = archmap[platform.system().lower()]
ctx.logger.passed(res)
return frozenset(res)
# Let's see if uname exists
try:
uname = fbuild.builders.find_program(ctx, ['uname'], quieter=1)
except fbuild.builders.MissingProgram:
# Maybe we're on windows. Let's just use what python thinks is the
# platform.
#arch = os.name
arch = platform.system().lower()
else:
# We've got uname, so let's see what platform it thinks we're on.
try:
stdout, stderr = ctx.execute((uname, '-s'), quieter=1)
except fbuild.ExecutionError:
# Ack, that failed too. Just fall back to python.
#arch = os.name
arch = platform.system().lower()
else:
arch = stdout.decode('utf-8').strip().lower()
if arch.startswith('mingw32'):
arch = 'mingw'
elif arch.startswith('cygwin'):
arch = 'cygwin'
try:
architecture = archmap[arch]
except __HOLE__:
ctx.logger.failed()
raise UnknownPlatform(arch)
else:
ctx.logger.passed(architecture)
return frozenset(architecture)
# ------------------------------------------------------------------------------
|
KeyError
|
dataset/ETHPy150Open felix-lang/fbuild/lib/fbuild/builders/platform.py/guess_platform
|
4,708 |
def to_python(self, value):
"""Convert a MongoDB-compatible type to a Python type.
"""
if isinstance(value, basestring):
return value
if hasattr(value, 'to_python'):
return value.to_python()
is_list = False
if not hasattr(value, 'items'):
try:
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
except __HOLE__: # Not iterable return the value
return value
if self.field:
self.field._auto_dereference = self._auto_dereference
value_dict = dict([(key, self.field.to_python(item))
for key, item in value.items()])
else:
Document = _import_class('Document')
value_dict = {}
for k, v in value.items():
if isinstance(v, Document):
# We need the id from the saved object to create the DBRef
if v.pk is None:
self.error('You can only reference documents once they'
' have been saved to the database')
collection = v._get_collection_name()
value_dict[k] = DBRef(collection, v.pk)
elif hasattr(v, 'to_python'):
value_dict[k] = v.to_python()
else:
value_dict[k] = self.to_python(v)
if is_list: # Convert back to a list
return [v for _, v in sorted(value_dict.items(),
key=operator.itemgetter(0))]
return value_dict
|
TypeError
|
dataset/ETHPy150Open MongoEngine/mongoengine/mongoengine/base/fields.py/ComplexBaseField.to_python
|
4,709 |
def to_mongo(self, value, **kwargs):
"""Convert a Python type to a MongoDB-compatible type.
"""
Document = _import_class("Document")
EmbeddedDocument = _import_class("EmbeddedDocument")
GenericReferenceField = _import_class("GenericReferenceField")
if isinstance(value, basestring):
return value
if hasattr(value, 'to_mongo'):
if isinstance(value, Document):
return GenericReferenceField().to_mongo(
value, **kwargs)
cls = value.__class__
val = value.to_mongo(**kwargs)
# If it's a document that is not inherited add _cls
if isinstance(value, EmbeddedDocument):
val['_cls'] = cls.__name__
return val
is_list = False
if not hasattr(value, 'items'):
try:
is_list = True
value = dict([(k, v) for k, v in enumerate(value)])
except __HOLE__: # Not iterable return the value
return value
if self.field:
value_dict = dict([(key, self.field.to_mongo(item, **kwargs))
for key, item in value.iteritems()])
else:
value_dict = {}
for k, v in value.iteritems():
if isinstance(v, Document):
# We need the id from the saved object to create the DBRef
if v.pk is None:
self.error('You can only reference documents once they'
' have been saved to the database')
# If its a document that is not inheritable it won't have
# any _cls data so make it a generic reference allows
# us to dereference
meta = getattr(v, '_meta', {})
allow_inheritance = (
meta.get('allow_inheritance', ALLOW_INHERITANCE)
is True)
if not allow_inheritance and not self.field:
value_dict[k] = GenericReferenceField().to_mongo(
v, **kwargs)
else:
collection = v._get_collection_name()
value_dict[k] = DBRef(collection, v.pk)
elif hasattr(v, 'to_mongo'):
cls = v.__class__
val = v.to_mongo(**kwargs)
# If it's a document that is not inherited add _cls
if isinstance(v, (Document, EmbeddedDocument)):
val['_cls'] = cls.__name__
value_dict[k] = val
else:
value_dict[k] = self.to_mongo(v, **kwargs)
if is_list: # Convert back to a list
return [v for _, v in sorted(value_dict.items(),
key=operator.itemgetter(0))]
return value_dict
|
TypeError
|
dataset/ETHPy150Open MongoEngine/mongoengine/mongoengine/base/fields.py/ComplexBaseField.to_mongo
|
4,710 |
def validate(self, value):
"""If field is provided ensure the value is valid.
"""
errors = {}
if self.field:
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
sequence = value.iteritems()
else:
sequence = enumerate(value)
for k, v in sequence:
try:
self.field._validate(v)
except ValidationError, error:
errors[k] = error.errors or error
except (__HOLE__, AssertionError), error:
errors[k] = error
if errors:
field_class = self.field.__class__.__name__
self.error('Invalid %s item (%s)' % (field_class, value),
errors=errors)
# Don't allow empty values if required
if self.required and not value:
self.error('Field is required and cannot be empty')
|
ValueError
|
dataset/ETHPy150Open MongoEngine/mongoengine/mongoengine/base/fields.py/ComplexBaseField.validate
|
4,711 |
def _validate_polygon(self, value, top_level=True):
if not isinstance(value, (list, tuple)):
return 'Polygons must contain list of linestrings'
# Quick and dirty validator
try:
value[0][0][0]
except (__HOLE__, IndexError):
return "Invalid Polygon must contain at least one valid linestring"
errors = []
for val in value:
error = self._validate_linestring(val, False)
if not error and val[0] != val[-1]:
error = 'LineStrings must start and end at the same point'
if error and error not in errors:
errors.append(error)
if errors:
if top_level:
return "Invalid Polygon:\n%s" % ", ".join(errors)
else:
return "%s" % ", ".join(errors)
|
TypeError
|
dataset/ETHPy150Open MongoEngine/mongoengine/mongoengine/base/fields.py/GeoJsonBaseField._validate_polygon
|
4,712 |
def _validate_linestring(self, value, top_level=True):
"""Validates a linestring"""
if not isinstance(value, (list, tuple)):
return 'LineStrings must contain list of coordinate pairs'
# Quick and dirty validator
try:
value[0][0]
except (TypeError, __HOLE__):
return "Invalid LineString must contain at least one valid point"
errors = []
for val in value:
error = self._validate_point(val)
if error and error not in errors:
errors.append(error)
if errors:
if top_level:
return "Invalid LineString:\n%s" % ", ".join(errors)
else:
return "%s" % ", ".join(errors)
|
IndexError
|
dataset/ETHPy150Open MongoEngine/mongoengine/mongoengine/base/fields.py/GeoJsonBaseField._validate_linestring
|
4,713 |
def _validate_multipoint(self, value):
if not isinstance(value, (list, tuple)):
return 'MultiPoint must be a list of Point'
# Quick and dirty validator
try:
value[0][0]
except (__HOLE__, IndexError):
return "Invalid MultiPoint must contain at least one valid point"
errors = []
for point in value:
error = self._validate_point(point)
if error and error not in errors:
errors.append(error)
if errors:
return "%s" % ", ".join(errors)
|
TypeError
|
dataset/ETHPy150Open MongoEngine/mongoengine/mongoengine/base/fields.py/GeoJsonBaseField._validate_multipoint
|
4,714 |
def _validate_multilinestring(self, value, top_level=True):
if not isinstance(value, (list, tuple)):
return 'MultiLineString must be a list of LineString'
# Quick and dirty validator
try:
value[0][0][0]
except (TypeError, __HOLE__):
return "Invalid MultiLineString must contain at least one valid linestring"
errors = []
for linestring in value:
error = self._validate_linestring(linestring, False)
if error and error not in errors:
errors.append(error)
if errors:
if top_level:
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
else:
return "%s" % ", ".join(errors)
|
IndexError
|
dataset/ETHPy150Open MongoEngine/mongoengine/mongoengine/base/fields.py/GeoJsonBaseField._validate_multilinestring
|
4,715 |
def _validate_multipolygon(self, value):
if not isinstance(value, (list, tuple)):
return 'MultiPolygon must be a list of Polygon'
# Quick and dirty validator
try:
value[0][0][0][0]
except (__HOLE__, IndexError):
return "Invalid MultiPolygon must contain at least one valid Polygon"
errors = []
for polygon in value:
error = self._validate_polygon(polygon, False)
if error and error not in errors:
errors.append(error)
if errors:
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
TypeError
|
dataset/ETHPy150Open MongoEngine/mongoengine/mongoengine/base/fields.py/GeoJsonBaseField._validate_multipolygon
|
4,716 |
def delete(self, name):
try:
super(BuiltFileStorage, self).delete(name)
except __HOLE__:
name = self.path(name)
if os.path.isdir(name):
os.rmdir(name)
else:
raise
|
OSError
|
dataset/ETHPy150Open hzdg/django-staticbuilder/staticbuilder/storage.py/BuiltFileStorage.delete
|
4,717 |
def detectPthImportedPackages():
if not hasattr(sys.modules["site"], "getsitepackages"):
return ()
pth_imports = set()
for prefix in sys.modules["site"].getsitepackages():
if not Utils.isDir(prefix):
continue
for path, filename in Utils.listDir(prefix):
if filename.endswith(".pth"):
try:
for line in open(path, "rU"):
if line.startswith("import "):
if ';' in line:
line = line[:line.find(';')]
for part in line[7:].split(','):
pth_imports.add(part.strip())
except __HOLE__:
warning("Python installation problem, cannot read file '%s'.")
return tuple(sorted(pth_imports))
|
OSError
|
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/importing/PreloadedPackages.py/detectPthImportedPackages
|
4,718 |
def __getitem__(self, key):
try:
value_record = super(NearCache, self).__getitem__(key)
if value_record.is_expired(self.max_idle_seconds):
super(NearCache, self).__delitem__(key)
raise KeyError
except __HOLE__ as ke:
self._cache_miss += 1
raise ke
if self.eviction_policy == EVICTION_POLICY.LRU:
value_record.last_access_time = current_time()
elif self.eviction_policy == EVICTION_POLICY.LFU:
value_record.access_hit += 1
self._cache_hit += 1
return self.serialization_service.to_object(value_record.value) \
if self.in_memory_format == IN_MEMORY_FORMAT.BINARY else value_record.value
|
KeyError
|
dataset/ETHPy150Open hazelcast/hazelcast-python-client/hazelcast/near_cache.py/NearCache.__getitem__
|
4,719 |
def get_template_sources(self, template_name, template_dirs=None):
template_name = self.prepare_template_name(template_name)
for loader in self.template_source_loaders:
if hasattr(loader, 'get_template_sources'):
try:
for result in loader.get_template_sources(template_name, template_dirs):
yield result
except __HOLE__:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of this particular
# template_dir (it might be inside another one, so this isn't
# fatal).
pass
|
UnicodeDecodeError
|
dataset/ETHPy150Open gregmuellegger/django-mobile/django_mobile/loader.py/Loader.get_template_sources
|
4,720 |
def _convert_value(value):
"""Parse string as python literal if possible and fallback to string."""
try:
return ast.literal_eval(value)
except (__HOLE__, SyntaxError):
# use as string if nothing else worked
return value
|
ValueError
|
dataset/ETHPy150Open IDSIA/sacred/sacred/arg_parser.py/_convert_value
|
4,721 |
def skip_without(*names):
"""skip a test if some names are not importable"""
@decorator
def skip_without_names(f, *args, **kwargs):
"""decorator to skip tests in the absence of numpy."""
for name in names:
try:
__import__(name)
except __HOLE__:
raise SkipTest
return f(*args, **kwargs)
return skip_without_names
|
ImportError
|
dataset/ETHPy150Open ipython/ipython-py3k/IPython/parallel/tests/clienttest.py/skip_without
|
4,722 |
@classmethod
def convert(cls, value):
if isinstance(value, cls):
return value
try:
return cls(value.upper())
except (__HOLE__, AttributeError):
valids = [cls.VERY_HIGH, cls.HIGH, cls.NORMAL,
cls.LOW, cls.VERY_LOW]
valids = [p.value for p in valids]
raise ValueError("'%s' is not a valid priority, valid"
" priorities are %s" % (value, valids))
|
ValueError
|
dataset/ETHPy150Open openstack/taskflow/taskflow/jobs/base.py/JobPriority.convert
|
4,723 |
def get_logs(self, decode_logs=True):
"""
:param decode_logs: bool, docker by default output logs in simple json structure:
{ "stream": "line" }
if this arg is set to True, it decodes logs to human readable form
:return: str
"""
logs = graceful_chain_get(self.get_annotations_or_labels(), "logs")
if not logs:
logger.error("no logs")
return ""
if decode_logs:
output = []
for line in logs.split("\n"):
try:
decoded_line = json.loads(line)
except __HOLE__:
continue
output += [decoded_line.get("stream", "").strip()]
error = decoded_line.get("error", "").strip()
if error:
output += [error]
error_detail = decoded_line.get("errorDetail", {})
error_msg = error_detail.get("message", "").strip()
if error_msg:
output += [error_msg]
output += "\n"
return "\n".join(output)
else:
return logs
|
ValueError
|
dataset/ETHPy150Open projectatomic/osbs-client/osbs/build/build_response.py/BuildResponse.get_logs
|
4,724 |
def search_entries(module_list, ops = None, constructors = None, seen = None):
if ops is None: ops = []
if constructors is None: constructors = []
if seen is None: seen = set()
modules = []
for module in module_list:
symbol_name_list = [s for s in dir(module) if not s[0] == '_']
for symbol_name in symbol_name_list:
symbol = getattr(module, symbol_name)
try:
if symbol in seen:
continue
seen.add(symbol)
except __HOLE__:
pass
if type(symbol) == type(module): # module
modules.append(symbol)
try:
ops.append(EntryOp(symbol, symbol_name, module))
except TypeError:
try:
constructors.append(EntryConstructor(symbol, symbol_name, module))
except TypeError:
pass
for symbol in modules:
search_entries([symbol], ops, constructors, seen)
return ops, constructors
|
TypeError
|
dataset/ETHPy150Open RDFLib/rdfextras/docs/scripts/gen_oplist.py/search_entries
|
4,725 |
def payday():
# Wire things up.
# ===============
env = wireup.env()
wireup.db(env)
wireup.billing(env)
# Lazily import the billing module.
# =================================
# This dodges a problem where db in billing is None if we import it from
# gratipay before calling wireup.billing.
from gratipay.billing.payday import Payday
try:
Payday.start().run()
except __HOLE__:
pass
except:
import aspen
import traceback
aspen.log(traceback.format_exc())
|
KeyboardInterrupt
|
dataset/ETHPy150Open gratipay/gratipay.com/gratipay/cli.py/payday
|
4,726 |
def __call__(self):
import zmq
context = zmq.Context()
sock = context.socket(zmq.SUB)
sock.setsockopt(zmq.SUBSCRIBE, '')
sock.connect('tcp://' + self.hostname +':'+str(self.port))
# Get progress via socket
percent = None
while True:
try:
message= sock.recv()
[percent_str, label] = message.split('|')
percent = float(percent_str)
self.label = label
super(RemoteProgress, self).__call__(percent)
except __HOLE__:
if percent is not None:
self.message("Exited at %.3f%% completion" % percent)
break
except:
self.message("Could not process socket message: %r"
% message)
|
KeyboardInterrupt
|
dataset/ETHPy150Open ioam/holoviews/holoviews/ipython/widgets.py/RemoteProgress.__call__
|
4,727 |
def tryToCloseStream(self, out):
try:
# try to close output stream (e.g. file handle)
if out is not None:
out.close()
except __HOLE__:
pass # NOP
|
IOError
|
dataset/ETHPy150Open rwl/muntjac/muntjac/terminal/gwt/server/abstract_communication_manager.py/AbstractCommunicationManager.tryToCloseStream
|
4,728 |
def writeUidlResponce(self, callback, repaintAll, outWriter, window,
analyzeLayouts):
outWriter.write('\"changes\":[')
paintables = None
invalidComponentRelativeSizes = None
paintTarget = JsonPaintTarget(self, outWriter, not repaintAll)
windowCache = self._currentlyOpenWindowsInClient.get(window.getName())
if windowCache is None:
windowCache = OpenWindowCache()
self._currentlyOpenWindowsInClient[window.getName()] = windowCache
# Paints components
if repaintAll:
paintables = list()
paintables.append(window)
# Reset sent locales
self._locales = None
self.requireLocale( self._application.getLocale() )
else:
# remove detached components from paintableIdMap so they
# can be GC'ed
# TODO figure out if we could move this beyond the painting phase,
# "respond as fast as possible, then do the cleanup". Beware of
# painting the dirty detatched components.
for p in self._paintableIdMap.keys():
if p.getApplication() is None:
self.unregisterPaintable(p)
if self._paintableIdMap[p] in self._idPaintableMap:
del self._idPaintableMap[self._paintableIdMap[p]]
if p in self._paintableIdMap:
del self._paintableIdMap[p]
if p in self._dirtyPaintables:
self._dirtyPaintables.remove(p)
paintables = self.getDirtyVisibleComponents(window)
if paintables is not None:
# We need to avoid painting children before parent.
# This is ensured by ordering list by depth in component
# tree
def compare(c1, c2):
d1 = 0
while c1.getParent() is not None:
d1 += 1
c1 = c1.getParent()
d2 = 0
while c2.getParent() is not None:
d2 += 1
c2 = c2.getParent()
if d1 < d2:
return -1
if d1 > d2:
return 1
return 0
paintables.sort(cmp=compare)
for p in paintables:
# TODO CLEAN
if isinstance(p, Window):
w = p
if w.getTerminal() is None:
w.setTerminal(
self._application.getMainWindow().getTerminal())
# This does not seem to happen in tk5, but remember this case:
# else if (p instanceof IComponent) { if (((IComponent)
# p).getParent() == null || ((IComponent) p).getApplication() ==
# null) { // IComponent requested repaint, but is no // longer
# attached: skip paintablePainted(p); continue; } }
# TODO we may still get changes that have been
# rendered already (changes with only cached flag)
if paintTarget.needsToBePainted(p):
paintTarget.startTag('change')
paintTarget.addAttribute('format', 'uidl')
pid = self.getPaintableId(p)
paintTarget.addAttribute('pid', pid)
p.paint(paintTarget)
paintTarget.endTag('change')
self.paintablePainted(p)
if analyzeLayouts:
# FIXME: circular import
from muntjac.terminal.gwt.server.component_size_validator \
import ComponentSizeValidator
w = p
invalidComponentRelativeSizes = ComponentSizeValidator.\
validateComponentRelativeSizes(w.getContent(),
None, None)
# Also check any existing subwindows
if w.getChildWindows() is not None:
for subWindow in w.getChildWindows():
invalidComponentRelativeSizes = \
ComponentSizeValidator.\
validateComponentRelativeSizes(
subWindow.getContent(),
invalidComponentRelativeSizes,
None)
paintTarget.close()
outWriter.write(']') # close changes
outWriter.write(', \"meta\" : {')
metaOpen = False
if repaintAll:
metaOpen = True
outWriter.write('\"repaintAll\":true')
if analyzeLayouts:
outWriter.write(', \"invalidLayouts\":')
outWriter.write('[')
if invalidComponentRelativeSizes is not None:
first = True
for invalidLayout in invalidComponentRelativeSizes:
if not first:
outWriter.write(',')
else:
first = False
invalidLayout.reportErrors(outWriter, self, stderr)
outWriter.write(']')
if self._highLightedPaintable is not None:
outWriter.write(", \"hl\":\"")
idd = self._paintableIdMap.get(self._highLightedPaintable)
outWriter.write(idd if idd is not None else 'null')
outWriter.write("\"")
self._highLightedPaintable = None
ci = None
try:
ci = self._application.getSystemMessages()
except __HOLE__, e:
logger.warning('getSystemMessages() failed - continuing')
# meta instruction for client to enable auto-forward to
# sessionExpiredURL after timer expires.
if ((ci is not None) and (ci.getSessionExpiredMessage() is None)
and (ci.getSessionExpiredCaption() is None)
and ci.isSessionExpiredNotificationEnabled()):
newTimeoutInterval = self.getTimeoutInterval()
if repaintAll or (self._timeoutInterval != newTimeoutInterval):
if ci.getSessionExpiredURL() is None:
escapedURL = ''
else:
escapedURL = ci.getSessionExpiredURL().replace('/', '\\/')
if metaOpen:
outWriter.write(',')
outWriter.write('\"timedRedirect\":{\"interval\":'
+ newTimeoutInterval + 15 + ',\"url\":\"'
+ escapedURL + '\"}')
metaOpen = True
self._timeoutInterval = newTimeoutInterval
outWriter.write('}, \"resources\" : {')
# Precache custom layouts
# TODO We should only precache the layouts that are not
# cached already (plagiate from usedPaintableTypes)
resourceIndex = 0
for resource in paintTarget.getUsedResources():
is_ = None
try:
is_ = callback.getThemeResourceAsStream(self.getTheme(window),
resource)
except IOError, e:
# FIXME: Handle exception
logger.info('Failed to get theme resource stream.')
if is_ is not None:
outWriter.write((', ' if resourceIndex > 0 else '')
+ '\"' + resource + '\" : ')
resourceIndex += 1 # post increment
layout = str()
try:
layout = is_.read()
except IOError, e:
# FIXME: Handle exception
logger.info('Resource transfer failed: ' + str(e))
outWriter.write('\"%s\"' % JsonPaintTarget.escapeJSON(layout))
else:
# FIXME: Handle exception
logger.critical('CustomLayout not found: ' + resource)
outWriter.write('}')
usedPaintableTypes = paintTarget.getUsedPaintableTypes()
typeMappingsOpen = False
for class1 in usedPaintableTypes:
if windowCache.cache(class1):
# client does not know the mapping key for this type,
# send mapping to client
if not typeMappingsOpen:
typeMappingsOpen = True
outWriter.write(', \"typeMappings\" : { ')
else:
outWriter.write(' , ')
canonicalName = clsname(class1)
if canonicalName.startswith('muntjac.ui'):
# use Muntjac package names FIXME: Python client side
canonicalName = 'com.vaadin.ui.' + class1.__name__
elif canonicalName.startswith('muntjac.demo.sampler'):
canonicalName = 'com.vaadin.demo.sampler.' + class1.__name__
elif hasattr(class1, 'TYPE_MAPPING'):
canonicalName = getattr(class1, 'TYPE_MAPPING')
else:
raise ValueError('type mapping name [%s]' % canonicalName)
outWriter.write('\"')
outWriter.write(canonicalName)
outWriter.write('\" : ')
outWriter.write(self.getTagForType(class1))
if typeMappingsOpen:
outWriter.write(' }')
# add any pending locale definitions requested by the client
self.printLocaleDeclarations(outWriter)
if self._dragAndDropService is not None:
self._dragAndDropService.printJSONResponse(outWriter)
|
AttributeError
|
dataset/ETHPy150Open rwl/muntjac/muntjac/terminal/gwt/server/abstract_communication_manager.py/AbstractCommunicationManager.writeUidlResponce
|
4,729 |
def decodeVariableValue(self, encodedValue):
"""Decode encoded burst, record, field and array item separator
characters in a variable value String received from the client.
This protects from separator injection attacks.
@param encodedValue: value to decode
@return: decoded value
"""
iterator = iter(encodedValue)
try:
character = iterator.next()
except StopIteration:
return ''
result = StringIO()
while True:
try:
if self.VAR_ESCAPE_CHARACTER == character:
character = iterator.next()
if character == chr(ord(self.VAR_ESCAPE_CHARACTER) + 0x30):
# escaped escape character
result.write(self.VAR_ESCAPE_CHARACTER)
elif character == chr(ord(self.VAR_BURST_SEPARATOR) + 0x30):
pass
elif character == chr(ord(self._VAR_RECORD_SEPARATOR)+0x30):
pass
elif character == chr(ord(self._VAR_FIELD_SEPARATOR) +0x30):
pass
elif (character ==
chr(ord(self.VAR_ARRAYITEM_SEPARATOR) + 0x30)):
# +0x30 makes these letters for easier reading
result.write( chr(ord(character) - 0x30) )
else:
# other escaped character - probably a client-server
# version mismatch
raise ValueError("Invalid escaped character from the "
"client - check that the widgetset and server "
"versions match")
else:
# not a special character - add it to the result as is
result.write(character)
character = iterator.next()
except __HOLE__:
break
r = result.getvalue()
result.close()
return r
|
StopIteration
|
dataset/ETHPy150Open rwl/muntjac/muntjac/terminal/gwt/server/abstract_communication_manager.py/AbstractCommunicationManager.decodeVariableValue
|
4,730 |
def printLocaleDeclarations(self, outWriter):
"""Prints the queued (pending) locale definitions to a PrintWriter
in a (UIDL) format that can be sent to the client and used there in
formatting dates, times etc.
"""
# Send locale informations to client
outWriter.write(', \"locales\":[')
while self._pendingLocalesIndex < len(self._locales):
l = self.generateLocale(self._locales[self._pendingLocalesIndex])
# Locale name
outWriter.write('{\"name\":\"' + str(l) + '\",')
# Month names (both short and full)
months = l.months['format']['wide'].values()
short_months = l.months['format']['abbreviated'].values()
outWriter.write(('\"smn\":[\"'
+ short_months[0] + '\",\"' + short_months[1] + '\",\"'
+ short_months[2] + '\",\"' + short_months[3] + '\",\"'
+ short_months[4] + '\",\"' + short_months[5] + '\",\"'
+ short_months[6] + '\",\"' + short_months[7] + '\",\"'
+ short_months[8] + '\",\"' + short_months[9] + '\",\"'
+ short_months[10] + '\",\"' + short_months[11] + '\"'
+ '],').encode('utf-8'))
outWriter.write(('\"mn\":[\"'
+ months[0] + '\",\"' + months[1] + '\",\"'
+ months[2] + '\",\"' + months[3] + '\",\"'
+ months[4] + '\",\"' + months[5] + '\",\"'
+ months[6] + '\",\"' + months[7] + '\",\"'
+ months[8] + '\",\"' + months[9] + '\",\"'
+ months[10] + '\",\"' + months[11] + '\"'
+ '],').encode('utf-8'))
# Weekday names (both short and full)
days = l.days['format']['wide'].values()
short_days = l.days['format']['abbreviated'].values()
outWriter.write(('\"sdn\":[\"'
+ short_days[6] + '\",\"'
+ short_days[0] + '\",\"' + short_days[1] + '\",\"'
+ short_days[2] + '\",\"' + short_days[3] + '\",\"'
+ short_days[4] + '\",\"' + short_days[5] + '\"'
+ '],').encode('utf-8'))
outWriter.write(('\"dn\":[\"'
+ days[6] + '\",\"'
+ days[0] + '\",\"' + days[1] + '\",\"'
+ days[2] + '\",\"' + days[3] + '\",\"'
+ days[4] + '\",\"' + days[5] + '\"'
+ '],').encode('utf-8'))
# First day of week
# (Babel: 6 = sunday, 0 = monday, Vaadin: 0 = sunday, 1 = monday)
fdow = l.first_week_day
if fdow == 0:
fdow = 1
else:
fdow = 0
outWriter.write('\"fdow\":' + str(fdow) + ',')
# Date formatting (MM/DD/YYYY etc.)
try:
df = l.date_formats['short'].pattern
df += ' '
df += l.time_formats['short'].pattern
df = df.encode('utf-8') # convert unicode to string
except __HOLE__:
logger.warning('Unable to get default date '
'pattern for locale ' + str(l))
#df = locale.nl_langinfo(locale.D_T_FMT)
df = 'dd/MM/yy HH:mm'
timeStart = df.find('H')
if timeStart < 0:
timeStart = df.find('h')
ampm_first = df.find('a')
# E.g. in Korean locale AM/PM is before h:mm
# TODO should take that into consideration on client-side as well,
# now always h:mm a
if ampm_first > 0 and ampm_first < timeStart:
timeStart = ampm_first
# Hebrew locale has time before the date
timeFirst = timeStart == 0
if timeFirst:
dateStart = df.find(' ')
if ampm_first > dateStart:
dateStart = df.find(' ', ampm_first)
dateformat = df[dateStart + 1:]
else:
dateformat = df[:timeStart - 1]
outWriter.write('\"df\":\"' + dateformat.strip() + '\",')
# Time formatting (24 or 12 hour clock and AM/PM suffixes)
timeformat = df[timeStart:len(df)]
# Doesn't return second or milliseconds.
#
# We use timeformat to determine 12/24-hour clock
twelve_hour_clock = timeformat.find('a') > -1
# TODO there are other possibilities as well, like 'h' in french
# (ignore them, too complicated)
hour_min_delimiter = '.' if timeformat.find('.') > -1 else ':'
# outWriter.write("\"tf\":\"" + timeformat + "\",");
outWriter.write('\"thc\":' + str(twelve_hour_clock).lower() + ',')
outWriter.write('\"hmd\":\"' + hour_min_delimiter + '\"')
if twelve_hour_clock:
ampm = [( l.periods['am'] ).encode('utf-8'),
( l.periods['pm'] ).encode('utf-8')]
outWriter.write(',\"ampm\":[\"' + ampm[0] + '\",\"'
+ ampm[1] + '\"]')
outWriter.write('}')
if self._pendingLocalesIndex < len(self._locales) - 1:
outWriter.write(',')
self._pendingLocalesIndex += 1
outWriter.write(']') # close locales
|
KeyError
|
dataset/ETHPy150Open rwl/muntjac/muntjac/terminal/gwt/server/abstract_communication_manager.py/AbstractCommunicationManager.printLocaleDeclarations
|
4,731 |
def CheckAccess(self, token):
"""Enforce a dual approver policy for access."""
namespace, _ = self.urn.Split(2)
if namespace != "ACL":
raise access_control.UnauthorizedAccess(
"Approval object has invalid urn %s." % self.urn,
subject=self.urn, requested_access=token.requested_access)
user, subject_urn = self.InferUserAndSubjectFromUrn()
if user != token.username:
raise access_control.UnauthorizedAccess(
"Approval object is not for user %s." % token.username,
subject=self.urn, requested_access=token.requested_access)
now = rdfvalue.RDFDatetime().Now()
# Is this an emergency access?
break_glass = self.Get(self.Schema.BREAK_GLASS)
if break_glass and now < break_glass:
# This tags the token as an emergency token.
token.is_emergency = True
return True
# Check that there are enough approvers.
approvers = self.GetNonExpiredApprovers()
if len(approvers) < config_lib.CONFIG["ACL.approvers_required"]:
msg = ("Requires %s approvers for access." %
config_lib.CONFIG["ACL.approvers_required"])
raise access_control.UnauthorizedAccess(
msg, subject=subject_urn, requested_access=token.requested_access)
# Check User labels
if self.checked_approvers_label:
approvers_with_label = []
# We need to check labels with high privilege since normal users can
# inspect other user's labels.
for approver in approvers:
try:
user = aff4.FACTORY.Open("aff4:/users/%s" % approver,
aff4_type="GRRUser",
token=token.SetUID())
if self.checked_approvers_label in user.GetLabelsNames():
approvers_with_label.append(approver)
except __HOLE__:
pass
if len(approvers_with_label) < self.min_approvers_with_label:
raise access_control.UnauthorizedAccess(
"At least %d approver(s) should have '%s' label." % (
self.min_approvers_with_label,
self.checked_approvers_label),
subject=subject_urn,
requested_access=token.requested_access)
return True
|
IOError
|
dataset/ETHPy150Open google/grr/grr/lib/aff4_objects/security.py/ApprovalWithApproversAndReason.CheckAccess
|
4,732 |
@flow.StateHandler()
def Start(self):
"""Create the Approval object and notify the Approval Granter."""
approval_urn = self.BuildApprovalUrn()
subject_title = self.BuildSubjectTitle()
access_urn = self.BuildAccessUrl()
# This object must already exist.
try:
approval_request = aff4.FACTORY.Open(approval_urn, mode="rw",
aff4_type=self.approval_type,
token=self.token)
except __HOLE__:
raise access_control.UnauthorizedAccess("Approval object does not exist.",
requested_access="rw")
# We are now an approver for this request.
approval_request.AddAttribute(
approval_request.Schema.APPROVER(self.token.username))
email_msg_id = utils.SmartStr(approval_request.Get(
approval_request.Schema.EMAIL_MSG_ID))
email_cc = utils.SmartStr(approval_request.Get(
approval_request.Schema.EMAIL_CC))
approval_request.Close(sync=True)
# Notify to the user.
fd = aff4.FACTORY.Create(
aff4.ROOT_URN.Add("users").Add(self.args.delegate),
"GRRUser", mode="rw", token=self.token)
fd.Notify("ViewObject", self.args.subject_urn,
"%s has granted you access to %s."
% (self.token.username, subject_title), self.session_id)
fd.Close()
if not config_lib.CONFIG.Get("Email.send_approval_emails"):
return
reason = self.CreateReasonHTML(self.args.reason)
template = u"""
<html><body><h1>Access to
<a href='%(admin_ui)s#%(subject_urn)s'>%(subject_title)s</a> granted.</h1>
The user %(username)s has granted access to
<a href='%(admin_ui)s#%(subject_urn)s'>%(subject_title)s</a> for the
purpose of "%(reason)s".
Please click <a href='%(admin_ui)s#%(subject_urn)s'>here</a> to access it.
<p>Thanks,</p>
<p>%(signature)s</p>
</body></html>"""
body = template % dict(
subject_title=subject_title,
username=self.token.username,
reason=reason,
admin_ui=config_lib.CONFIG["AdminUI.url"],
subject_urn=access_urn,
signature=config_lib.CONFIG["Email.signature"]
)
# Email subject should match approval request, and we add message id
# references so they are grouped together in a thread by gmail.
subject = u"Approval for %s to access %s." % (
utils.SmartStr(self.args.delegate), subject_title)
headers = {"In-Reply-To": email_msg_id, "References": email_msg_id}
email_alerts.EMAIL_ALERTER.SendEmail(
utils.SmartStr(self.args.delegate),
utils.SmartStr(self.token.username), subject,
utils.SmartStr(body), is_html=True,
cc_addresses=email_cc,
headers=headers)
|
IOError
|
dataset/ETHPy150Open google/grr/grr/lib/aff4_objects/security.py/GrantApprovalWithReasonFlow.Start
|
4,733 |
@systemcall
def process_input(self):
"""Attempt to read a single Record from the socket and process it."""
# Currently, any children Request process notify this Connection
# that it is no longer needed by closing the Connection's socket.
# We need to put a timeout on select, otherwise we might get
# stuck in it indefinitely... (I don't like this solution.)
while self._keepGoing:
try:
r, w, e = select.select([self._sock], [], [], 1.0)
except __HOLE__:
# Sigh. ValueError gets thrown sometimes when passing select
# a closed socket.
raise EOFError
if r:
break
if not self._keepGoing:
return
rec = Record()
rec.read(self._sock)
if rec.type == FCGI_GET_VALUES:
self._do_get_values(rec)
elif rec.type == FCGI_BEGIN_REQUEST:
self._do_begin_request(rec)
elif rec.type == FCGI_ABORT_REQUEST:
self._do_abort_request(rec)
elif rec.type == FCGI_PARAMS:
self._do_params(rec)
elif rec.type == FCGI_STDIN:
self._do_stdin(rec)
elif rec.type == FCGI_DATA:
self._do_data(rec)
elif rec.requestId == FCGI_NULL_REQUEST_ID:
self._do_unknown_type(rec)
else:
# Need to complain about this.
pass
|
ValueError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/inet/fcgi.py/Connection.process_input
|
4,734 |
def _setupSocket(self):
if self._bindAddress is None: # Run as a normal FastCGI?
sock = socket.fromfd(FCGI_LISTENSOCK_FILENO, socket.AF_INET,
socket.SOCK_STREAM)
try:
sock.getpeername()
except socket.error, e:
if e[0] != errno.ENOTCONN:
raise
else:
# Run as a server
oldUmask = None
if isinstance(self._bindAddress, str):
# Unix socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
os.unlink(self._bindAddress)
except __HOLE__:
pass
if self._umask is not None:
oldUmask = os.umask(self._umask)
else:
# INET socket
assert type(self._bindAddress) is tuple
assert len(self._bindAddress) == 2
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
close_on_exec(sock.fileno())
sock.bind(self._bindAddress)
sock.listen(socket.SOMAXCONN)
if oldUmask is not None:
os.umask(oldUmask)
return sock
|
OSError
|
dataset/ETHPy150Open kdart/pycopia/core/pycopia/inet/fcgi.py/FCGIServer._setupSocket
|
4,735 |
def pandas_dtype(dtype):
"""
Converts input into a pandas only dtype object or a numpy dtype object.
Parameters
----------
dtype : object to be converted
Returns
-------
np.dtype or a pandas dtype
"""
if isinstance(dtype, string_types):
try:
return DatetimeTZDtype.construct_from_string(dtype)
except __HOLE__:
pass
try:
return CategoricalDtype.construct_from_string(dtype)
except TypeError:
pass
return np.dtype(dtype)
|
TypeError
|
dataset/ETHPy150Open pydata/pandas/pandas/types/api.py/pandas_dtype
|
4,736 |
def testCannotEraseFile(self):
def failedUnlink(path):
if '/contents0' in path:
raise OSError(errno.EACCES, 'Permission denied', path)
origUnlink(path)
origUnlink = os.unlink
os.unlink = failedUnlink
try:
foo = self.addComponent('foo:runtime', '1', '',[('/dir/contents0')])
self.updatePkg('foo:runtime')
# just so the erase doesn't try to erase the directory, add a file
self.writeFile('%s/dir/foo' % self.rootDir, 'blah\n')
self.logFilter.add()
try:
self.erasePkg(self.rootDir, 'foo:runtime')
except __HOLE__:
pass
else:
assert 0
self.logFilter.compare([
'error: /dir/contents0 could not be removed: '
'Permission denied',
'error: a critical error occured -- reverting '
'filesystem changes'])
finally:
os.unlink = origUnlink
|
OSError
|
dataset/ETHPy150Open sassoftware/conary/conary_test/updatetest.py/UpdateTest.testCannotEraseFile
|
4,737 |
def _getOpenFiles(self):
fddir = os.path.join('/proc', str(os.getpid()), 'fd')
ret = []
for fdnum in os.listdir(fddir):
try:
rf = os.readlink(os.path.join(fddir, fdnum))
except __HOLE__:
continue
if rf.startswith(self.tmpDir):
ret.append(rf)
return ret
|
OSError
|
dataset/ETHPy150Open sassoftware/conary/conary_test/updatetest.py/UpdateTest._getOpenFiles
|
4,738 |
def testGroupByDefault(self):
# CNY-1476
def fooVer():
return [x.asString() for x in client.db.getTroveVersionList('group-foo')]
for v in [1, 2]:
self.addComponent('foo:lib', str(v), filePrimer=4*v)
groupFooContents = ['foo:lib']
if v == 2:
self.addComponent('bar:lib', str(v), filePrimer=4*v+1)
self.addCollection('bar', str(v), ['bar:lib'])
groupFooContents.append('bar')
self.addCollection('group-foo', str(v), groupFooContents)
self.addCollection('group-dist', str(v), [('group-foo', False)])
client = conaryclient.ConaryClient(self.cfg)
# Install group-dist
self.updatePkg('group-dist=1')
# group-foo should not be installed since it's byDefault=False
self.assertFalse(fooVer())
# Install group-foo
self.updatePkg('group-foo=1')
self.assertEquals(fooVer(), ['/localhost@rpl:linux/1-1-1'])
# run updateall
self.captureOutput(self.updateAll)
self.assertEquals(fooVer(), ['/localhost@rpl:linux/2-1-1'])
# bar should be installed
try:
self.assertTrue(client.db.getTroveVersionList('bar'))
except __HOLE__:
raise testhelp.SkipTestException('CNY-1476 has to be fixed')
else:
raise Exception("Remove the SkipTestException")
|
AssertionError
|
dataset/ETHPy150Open sassoftware/conary/conary_test/updatetest.py/UpdateTest.testGroupByDefault
|
4,739 |
def _get_dates_loc(self, dates, date):
if hasattr(dates, 'indexMap'): # 0.7.x
date = dates.indexMap[date]
else:
date = dates.get_loc(date)
try: # pandas 0.8.0 returns a boolean array
len(date)
from numpy import where
date = where(date)[0].item()
except __HOLE__: # this is expected behavior
pass
return date
|
TypeError
|
dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/tsa/base/tsa_model.py/TimeSeriesModel._get_dates_loc
|
4,740 |
def _get_predict_start(self, start):
"""
Returns the index of the given start date. Subclasses should define
default behavior for start = None. That isn't handled here.
Start can be a string or an integer if self.data.dates is None.
"""
dates = self.data.dates
if isinstance(start, str):
if dates is None:
raise ValueError("Got a string for start and dates is None")
dtstart = self._str_to_date(start)
self.data.predict_start = dtstart
try:
start = self._get_dates_loc(dates, dtstart)
except __HOLE__:
raise ValueError("Start must be in dates. Got %s | %s" %
(str(start), str(dtstart)))
self._set_predict_start_date(start)
return start
|
KeyError
|
dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/tsa/base/tsa_model.py/TimeSeriesModel._get_predict_start
|
4,741 |
def _get_predict_end(self, end):
"""
See _get_predict_start for more information. Subclasses do not
need to define anything for this.
"""
out_of_sample = 0 # will be overwritten if needed
if end is None: # use data for ARIMA - endog changes
end = len(self.data.endog) - 1
dates = self.data.dates
freq = self.data.freq
if isinstance(end, str) or (dates is not None
and isinstance(end, type(dates[0]))):
if dates is None:
raise ValueError("Got a string or date for `end` and `dates` is None")
if isinstance(end, str):
dtend = self._str_to_date(end)
else:
dtend = end # end could be a pandas TimeStamp not a datetime
self.data.predict_end = dtend
try:
end = self._get_dates_loc(dates, dtend)
except __HOLE__ as err: # end is greater than dates[-1]...probably
if dtend > self.data.dates[-1]:
end = len(self.data.endog) - 1
freq = self.data.freq
out_of_sample = datetools._idx_from_dates(dates[-1], dtend,
freq)
else:
if freq is None:
raise ValueError("There is no frequency for these "
"dates and date %s is not in dates "
"index. Try giving a date that is in "
"the dates index or use an integer."
% dtend)
else: #pragma: no cover
raise err # should never get here
self._make_predict_dates() # attaches self.data.predict_dates
elif isinstance(end, int) and dates is not None:
try:
self.data.predict_end = dates[end]
except IndexError as err:
nobs = len(self.data.endog) - 1 # as an index
out_of_sample = end - nobs
end = nobs
if freq is not None:
self.data.predict_end = datetools._date_from_idx(dates[-1],
out_of_sample, freq)
elif out_of_sample <= 0: # have no frequency but are in sample
#TODO: what error to catch here to make sure dates is
#on the index?
try:
self.data.predict_end = self._get_dates_loc(dates,
end)
except KeyError:
raise
else:
self.data.predict_end = end + out_of_sample
self.data.predict_start = self._get_dates_loc(dates,
self.data.predict_start)
self._make_predict_dates()
elif isinstance(end, int):
nobs = len(self.data.endog) - 1 # is an index
if end > nobs:
out_of_sample = end - nobs
end = nobs
elif freq is None: # should have a date with freq = None
raise ValueError("When freq is None, you must give an integer "
"index for end.")
else:
raise ValueError("no rule for interpreting end")
return end, out_of_sample
|
KeyError
|
dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/tsa/base/tsa_model.py/TimeSeriesModel._get_predict_end
|
4,742 |
def _make_predict_dates(self):
data = self.data
dtstart = data.predict_start
dtend = data.predict_end
freq = data.freq
if freq is not None:
pandas_freq = _freq_to_pandas[freq]
try:
from pandas import DatetimeIndex
dates = DatetimeIndex(start=dtstart, end=dtend,
freq=pandas_freq)
except __HOLE__ as err:
from pandas import DateRange
dates = DateRange(dtstart, dtend, offset = pandas_freq).values
# handle
elif freq is None and (isinstance(dtstart, int) and
isinstance(dtend, int)):
from pandas import Index
dates = Index(lrange(dtstart, dtend+1))
# if freq is None and dtstart and dtend aren't integers, we're
# in sample
else:
dates = self.data.dates
start = self._get_dates_loc(dates, dtstart)
end = self._get_dates_loc(dates, dtend)
dates = dates[start:end+1] # is this index inclusive?
self.data.predict_dates = dates
|
ImportError
|
dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/tsa/base/tsa_model.py/TimeSeriesModel._make_predict_dates
|
4,743 |
def get_by_project_and_user(self, context, project_id, user_id, resource):
self.called.append(('get_by_project_and_user',
context, project_id, user_id, resource))
try:
return self.by_user[user_id][resource]
except __HOLE__:
raise exception.ProjectUserQuotaNotFound(project_id=project_id,
user_id=user_id)
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/tests/unit/test_quota.py/FakeDriver.get_by_project_and_user
|
4,744 |
def get_by_project(self, context, project_id, resource):
self.called.append(('get_by_project', context, project_id, resource))
try:
return self.by_project[project_id][resource]
except __HOLE__:
raise exception.ProjectQuotaNotFound(project_id=project_id)
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/tests/unit/test_quota.py/FakeDriver.get_by_project
|
4,745 |
def get_by_class(self, context, quota_class, resource):
self.called.append(('get_by_class', context, quota_class, resource))
try:
return self.by_class[quota_class][resource]
except __HOLE__:
raise exception.QuotaClassNotFound(class_name=quota_class)
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/tests/unit/test_quota.py/FakeDriver.get_by_class
|
4,746 |
def run(self, exe='monolithic'):
"""
Make system call to run one or more of the stand-alone executables,
writing a log file to the folder containing the input file.
"""
if self.feffinp == None:
raise Exception("no feff.inp file was specified")
if not isfile(self.feffinp):
raise Exception("feff.inp file '%s' could not be found" % self.feffinp)
savefile = '.save_save_save.inp'
here=os.getcwd()
os.chdir(dirname(self.feffinp))
log = 'f85e.log'
if exe == None:
exe = ''
else:
if not ((exe in ('monolithic', 'rdinp', 'pot', 'opconsat', 'xsph', 'pathfinder', 'genfmt', 'ff2x')) or exe.startswith('feff')):
os.chdir(here)
raise Exception("'%s' is not a valid executable name" % exe)
## default behavior is to step through the feff85exafs modules (but not opconsat, monolithic presumes that opconsat will not be used)
if exe.startswith('mono'): # run modules recursively
if isfile(log): os.unlink(log)
for m in ('rdinp', 'pot', 'xsph', 'pathfinder', 'genfmt', 'ff2x'):
os.chdir(here)
self.run(m)
if m == 'pot' and self.mpse:
self.run('opconsat')
return
elif exe.startswith('feff'):
if isfile(log): os.unlink(log)
## if exe is unset or not set to something already recognized,
## try to figure out what executable to run
##
## the logic is:
## 1. if exe seems to be a feff version, try to find that Feff executable
## 2. if repo is None, try to find the installed feff85exafs executable
## 3. if repo is set, try to find the newly compiled feff85exafs executable
## 4. if nothing has yet been found, try to use _xafs._feff_executable
## 5. if nothing is found, raise an Exception
program=None
if exe.startswith('feff'): # step 1, exe seems to be numbered feff (e.g. feff6, feff7, ...)
self.resolved = find_executable(exe)
if self.resolved:
program=self.resolved
if exe in ('rdinp', 'pot', 'opconsat', 'xsph', 'pathfinder', 'genfmt', 'ff2x'):
if self.repo == None: # step 2, try to find the installed feff85exafs module
self.resolved = find_executable(exe)
if not os.access(self.resolved, os.X_OK):
os.chdir(here)
raise Exception("'%s' is not an executable" % self.resolved)
if self.resolved:
program=self.resolved
else: # step 3, try to find the newly compiled feff85exafs module
folder=exe.upper()
if exe=='pathfinder':
folder='PATH'
program=join(self.repo, 'src', folder, exe)
self.resolved=program
if not isfile(program):
os.chdir(here)
raise Exception("'%s' cannot be found (has it been compiled?)" % program)
if not os.access(program, os.X_OK):
os.chdir(here)
raise Exception("'%s' is not an executable" % program)
if program == None: # step 4, try _xafs.feff_executable
program = self._larch.symtable.get_symbol('_xafs._feff_executable')
try:
program = self._larch.symtable.get_symbol('_xafs._feff_executable')
except __HOLE__:
os.chdir(here)
raise Exception("_xafs._feff_executable is not set (1)")
except AttributeError:
os.chdir(here)
raise Exception("_xafs._feff_executable is not set (2)")
if program != None:
if not os.access(program, os.X_OK):
program = None
if program == None: # step 5, give up
os.chdir(here)
raise Exception("'%s' executable cannot be found" % exe)
## preserve an existing feff.inp file if this is not called feff.inp
if basename(self.feffinp) != 'feff.inp':
if isfile('feff.inp'):
copy('feff.inp', savefile)
copy(basename(self.feffinp), 'feff.inp')
f = open(log, 'a')
header = "\n======= running module %s ====================================================\n" % exe
if self.verbose: print(header)
f.write(header)
process=subprocess.Popen(program, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
flag = False
thislist = []
while True:
line = process.stdout.readline()
if not line:
break
if self.verbose: print( ':'+line.rstrip())
## snarf threshold energy
pattern = re.compile('mu_(new|old)=\s+(-?\d\.\d+)')
match = pattern.search(line)
if match != None:
self.threshold.append(match.group(2))
## snarf charge transfer
if line.strip().startswith('Charge transfer'):
thislist = []
flag = True
elif line.strip().startswith('SCF ITERATION'):
self.chargetransfer.append(list(thislist))
flag = False
elif line.strip().startswith('Done with module 1'):
self.chargetransfer.append(list(thislist))
flag = False
elif flag:
this = line.split()
thislist.append(this[1])
f.write(line)
f.close
if isfile(savefile):
move(savefile, 'feff.inp')
os.chdir(here)
return None
######################################################################
|
NameError
|
dataset/ETHPy150Open xraypy/xraylarch/plugins/xafs/feffrunner.py/FeffRunner.run
|
4,747 |
@app.route('/add', methods=['POST'])
def add():
storm_ = app.get_storm()
try:
name = request.json['name']
connection_uri = request.json['connection_uri']
if 'id_file' in request.json:
id_file = request.json['id_file']
else:
id_file = ''
if '@' in name:
msg = 'invalid value: "@" cannot be used in name.'
return jsonify(message=msg), 400
user, host, port = parse(connection_uri)
storm_.add_entry(name, host, user, port, id_file)
return response(status=201)
except ValueError as exc:
return jsonify(message=exc.message)
except (__HOLE__, TypeError):
return response(status=400)
|
KeyError
|
dataset/ETHPy150Open emre/storm/storm/web.py/add
|
4,748 |
@app.route('/edit', methods=['PUT'])
def edit():
storm_ = app.get_storm()
try:
name = request.json['name']
connection_uri = request.json['connection_uri']
if 'id_file' in request.json:
id_file = request.json['id_file']
else:
id_file = ''
user, host, port = parse(connection_uri)
storm_.edit_entry(name, host, user, port, id_file)
return response()
except ValueError as exc:
return jsonify(message=exc.message), 404
except (__HOLE__, TypeError):
return response(status=400)
|
KeyError
|
dataset/ETHPy150Open emre/storm/storm/web.py/edit
|
4,749 |
@app.route('/delete', methods=['POST'])
def delete():
storm_ = app.get_storm()
try:
name = request.json['name']
storm_.delete_entry(name)
return response()
except __HOLE__ as exc:
return jsonify(message=exc.message), 404
except (TypeError, ValueError):
return response(status=400)
|
ValueError
|
dataset/ETHPy150Open emre/storm/storm/web.py/delete
|
4,750 |
def pid_from_str(s):
id_ = -1
try:
id_ = int(s)
except __HOLE__:
pass
return id_
|
ValueError
|
dataset/ETHPy150Open kashefy/nideep/nideep/eval/log_utils.py/pid_from_str
|
4,751 |
def process(self, d):
if d is None:
# Special case for when no doc could be found and that is OK
return None
try:
value = _lookup_keys(self.keys, d)
except __HOLE__:
if self.if_missing is not None:
Exc, args = self.if_missing[0], self.if_missing[1:]
raise Exc(*args)
else:
return None
return self.apply(value)
|
KeyError
|
dataset/ETHPy150Open wiki-ai/revscoring/revscoring/extractors/api/util.py/key.process
|
4,752 |
def _lookup_keys(keys, d):
if isinstance(keys, str) or not hasattr(keys, "__iter__"):
keys = [keys]
try:
for key in keys:
d = d[key]
except __HOLE__:
raise KeyError(keys)
return d
|
KeyError
|
dataset/ETHPy150Open wiki-ai/revscoring/revscoring/extractors/api/util.py/_lookup_keys
|
4,753 |
def run_async(self):
log_output = self.git(
"log",
"-{}".format(self._limit) if self._limit else None,
"--skip={}".format(self._pagination) if self._pagination else None,
"--author={}".format(self._author) if self._author else None,
'--format=%h%n%H%n%s%n%an%n%at%x00',
'--cherry' if self.cherry_branch else None,
'..{}'.format(self.cherry_branch) if self.cherry_branch else None,
"--" if self._filename else None,
self._filename
).strip("\x00")
self._entries = []
self._hashes = []
for entry in log_output.split("\x00"):
try:
short_hash, long_hash, summary, author, datetime = entry.strip("\n").split("\n")
self._entries.append([
short_hash + " " + summary,
author + ", " + util.dates.fuzzy(datetime)
])
self._hashes.append(long_hash)
except __HOLE__:
# Empty line - less expensive to catch the exception once than
# to check truthiness of entry.strip() each time.
pass
if not len(self._entries) < self._limit:
self._entries.append([
">>> NEXT {} COMMITS >>>".format(self._limit),
"Skip this set of commits and choose from the next-oldest batch."
])
self.window.show_quick_panel(
self._entries,
self.on_hash_selection,
flags=sublime.MONOSPACE_FONT
)
|
ValueError
|
dataset/ETHPy150Open divmain/GitSavvy/core/commands/log.py/GsLogCommand.run_async
|
4,754 |
def get_pyserial_version(self, pyserial_version):
"""! Retrieve pyserial module version
@return Returns float with pyserial module number
"""
version = 3.0
m = self.re_float.search(pyserial_version)
if m:
try:
version = float(m.group(0))
except __HOLE__:
version = 3.0 # We will assume you've got latest (3.0+)
return version
|
ValueError
|
dataset/ETHPy150Open ARMmbed/htrun/mbed_host_tests/host_tests_plugins/module_reset_mbed.py/HostTestPluginResetMethod_Mbed.get_pyserial_version
|
4,755 |
def set_content_length(self):
"""Compute Content-Length or switch to chunked encoding if possible"""
try:
blocks = len(self.result)
except (TypeError, __HOLE__, NotImplementedError):
pass
else:
if blocks==1:
self.headers['Content-Length'] = str(self.bytes_sent)
return
# XXX Try for chunked encoding if origin server and client is 1.1
|
AttributeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/core/servers/basehttp.py/ServerHandler.set_content_length
|
4,756 |
def __call__(self, environ, start_response):
import os.path
# Ignore requests that aren't under ADMIN_MEDIA_PREFIX. Also ignore
# all requests if ADMIN_MEDIA_PREFIX isn't a relative URL.
if self.media_url.startswith('http://') or self.media_url.startswith('https://') \
or not environ['PATH_INFO'].startswith(self.media_url):
return self.application(environ, start_response)
# Find the admin file and serve it up, if it exists and is readable.
try:
file_path = self.file_path(environ['PATH_INFO'])
except ValueError: # Resulting file path was not valid.
status = '404 NOT FOUND'
headers = {'Content-type': 'text/plain'}
output = ['Page not found: %s' % environ['PATH_INFO']]
start_response(status, headers.items())
return output
if not os.path.exists(file_path):
status = '404 NOT FOUND'
headers = {'Content-type': 'text/plain'}
output = ['Page not found: %s' % environ['PATH_INFO']]
else:
try:
fp = open(file_path, 'rb')
except __HOLE__:
status = '401 UNAUTHORIZED'
headers = {'Content-type': 'text/plain'}
output = ['Permission denied: %s' % environ['PATH_INFO']]
else:
# This is a very simple implementation of conditional GET with
# the Last-Modified header. It makes media files a bit speedier
# because the files are only read off disk for the first
# request (assuming the browser/client supports conditional
# GET).
mtime = http_date(os.stat(file_path)[stat.ST_MTIME])
headers = {'Last-Modified': mtime}
if environ.get('HTTP_IF_MODIFIED_SINCE', None) == mtime:
status = '304 NOT MODIFIED'
output = []
else:
status = '200 OK'
mime_type = mimetypes.guess_type(file_path)[0]
if mime_type:
headers['Content-Type'] = mime_type
output = [fp.read()]
fp.close()
start_response(status, headers.items())
return output
|
IOError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/django/core/servers/basehttp.py/AdminMediaHandler.__call__
|
4,757 |
def get_instance_port(self, instance_id):
"""Returns the port of the HTTP server for an instance."""
try:
instance_id = int(instance_id)
except __HOLE__:
raise request_info.InvalidInstanceIdError()
with self._condition:
if 0 <= instance_id < len(self._instances):
wsgi_servr = self._wsgi_servers[instance_id]
else:
raise request_info.InvalidInstanceIdError()
return wsgi_servr.port
|
ValueError
|
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/devappserver2/module.py/ManualScalingModule.get_instance_port
|
4,758 |
def get_instance(self, instance_id):
"""Returns the instance with the provided instance ID."""
try:
with self._condition:
return self._instances[int(instance_id)]
except (ValueError, __HOLE__):
raise request_info.InvalidInstanceIdError()
|
IndexError
|
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/devappserver2/module.py/ManualScalingModule.get_instance
|
4,759 |
def get_instance_port(self, instance_id):
"""Returns the port of the HTTP server for an instance."""
try:
instance_id = int(instance_id)
except __HOLE__:
raise request_info.InvalidInstanceIdError()
with self._condition:
if 0 <= instance_id < len(self._instances):
wsgi_servr = self._wsgi_servers[instance_id]
else:
raise request_info.InvalidInstanceIdError()
return wsgi_servr.port
|
ValueError
|
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/devappserver2/module.py/BasicScalingModule.get_instance_port
|
4,760 |
def get_instance(self, instance_id):
"""Returns the instance with the provided instance ID."""
try:
with self._condition:
return self._instances[int(instance_id)]
except (ValueError, __HOLE__):
raise request_info.InvalidInstanceIdError()
|
IndexError
|
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/devappserver2/module.py/BasicScalingModule.get_instance
|
4,761 |
def Browser(driver_name='firefox', *args, **kwargs):
"""
Returns a driver instance for the given name.
When working with ``firefox``, it's possible to provide a profile name
and a list of extensions.
If you don't provide any driver_name, then ``firefox`` will be used.
If there is no driver registered with the provided ``driver_name``, this
function will raise a :class:`splinter.exceptions.DriverNotFoundError`
exception.
"""
try:
driver = _DRIVERS[driver_name]
except __HOLE__:
raise DriverNotFoundError("No driver for %s" % driver_name)
return driver(*args, **kwargs)
|
KeyError
|
dataset/ETHPy150Open cobrateam/splinter/splinter/browser.py/Browser
|
4,762 |
def FindFileByName(self, file_name):
"""Gets a FileDescriptor by file name.
Args:
file_name: The path to the file to get a descriptor for.
Returns:
A FileDescriptor for the named file.
Raises:
KeyError: if the file can not be found in the pool.
"""
try:
return self._file_descriptors[file_name]
except __HOLE__:
pass
try:
file_proto = self._internal_db.FindFileByName(file_name)
except KeyError as error:
if self._descriptor_db:
file_proto = self._descriptor_db.FindFileByName(file_name)
else:
raise error
if not file_proto:
raise KeyError('Cannot find a file named %s' % file_name)
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
KeyError
|
dataset/ETHPy150Open sklearn-theano/sklearn-theano/sklearn_theano/externals/google/protobuf/descriptor_pool.py/DescriptorPool.FindFileByName
|
4,763 |
def FindFileContainingSymbol(self, symbol):
"""Gets the FileDescriptor for the file containing the specified symbol.
Args:
symbol: The name of the symbol to search for.
Returns:
A FileDescriptor that contains the specified symbol.
Raises:
KeyError: if the file can not be found in the pool.
"""
symbol = _NormalizeFullyQualifiedName(symbol)
try:
return self._descriptors[symbol].file
except KeyError:
pass
try:
return self._enum_descriptors[symbol].file
except KeyError:
pass
try:
file_proto = self._internal_db.FindFileContainingSymbol(symbol)
except __HOLE__ as error:
if self._descriptor_db:
file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
else:
raise error
if not file_proto:
raise KeyError('Cannot find a file containing %s' % symbol)
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
KeyError
|
dataset/ETHPy150Open sklearn-theano/sklearn-theano/sklearn_theano/externals/google/protobuf/descriptor_pool.py/DescriptorPool.FindFileContainingSymbol
|
4,764 |
def get_item_types(self):
if hasattr(self, "_item_types"):
return self._item_types
url = self._build_url('itemtypes')
options = {
'apikey': self._api_key,
'tenantid': self._tenant
}
try:
response = self._fetch_response(url, params=options)
except:
return ['ITEM',]
try:
self._item_types = response['itemTypes']['itemType']
return self._item_types
except __HOLE__:
return ['ITEM',]
|
KeyError
|
dataset/ETHPy150Open snowball-one/django-oscar-easyrec/easyrec/gateway.py/EasyRec.get_item_types
|
4,765 |
def __init__(self, mapping=(), fvars={}, autoreload=None):
if autoreload is None:
autoreload = web.config.get('debug', False)
self.init_mapping(mapping)
self.fvars = fvars
self.processors = []
self.add_processor(loadhook(self._load))
self.add_processor(unloadhook(self._unload))
if autoreload:
def main_module_name():
mod = sys.modules['__main__']
file = getattr(mod, '__file__', None) # make sure this works even from python interpreter
return file and os.path.splitext(os.path.basename(file))[0]
def modname(fvars):
"""find name of the module name from fvars."""
file, name = fvars.get('__file__'), fvars.get('__name__')
if file is None or name is None:
return None
if name == '__main__':
# Since the __main__ module can't be reloaded, the module has
# to be imported using its file name.
name = main_module_name()
return name
mapping_name = utils.dictfind(fvars, mapping)
module_name = modname(fvars)
def reload_mapping():
"""loadhook to reload mapping and fvars."""
mod = __import__(module_name, None, None, [''])
mapping = getattr(mod, mapping_name, None)
if mapping:
self.fvars = mod.__dict__
self.init_mapping(mapping)
self.add_processor(loadhook(Reloader()))
if mapping_name and module_name:
self.add_processor(loadhook(reload_mapping))
# load __main__ module usings its filename, so that it can be reloaded.
if main_module_name() and '__main__' in sys.argv:
try:
__import__(main_module_name())
except __HOLE__:
pass
|
ImportError
|
dataset/ETHPy150Open joxeankoret/nightmare/runtime/web/application.py/application.__init__
|
4,766 |
def handle_with_processors(self):
def process(processors):
try:
if processors:
p, processors = processors[0], processors[1:]
return p(lambda: process(processors))
else:
return self.handle()
except web.HTTPError:
raise
except (KeyboardInterrupt, __HOLE__):
raise
except:
print >> web.debug, traceback.format_exc()
raise self.internalerror()
# processors must be applied in the resvere order. (??)
return process(self.processors)
|
SystemExit
|
dataset/ETHPy150Open joxeankoret/nightmare/runtime/web/application.py/application.handle_with_processors
|
4,767 |
def wsgifunc(self, *middleware):
"""Returns a WSGI-compatible function for this application."""
def peep(iterator):
"""Peeps into an iterator by doing an iteration
and returns an equivalent iterator.
"""
# wsgi requires the headers first
# so we need to do an iteration
# and save the result for later
try:
firstchunk = iterator.next()
except __HOLE__:
firstchunk = ''
return itertools.chain([firstchunk], iterator)
def is_generator(x): return x and hasattr(x, 'next')
def wsgi(env, start_resp):
# clear threadlocal to avoid inteference of previous requests
self._cleanup()
self.load(env)
try:
# allow uppercase methods only
if web.ctx.method.upper() != web.ctx.method:
raise web.nomethod()
result = self.handle_with_processors()
if is_generator(result):
result = peep(result)
else:
result = [result]
except web.HTTPError, e:
result = [e.data]
result = web.safestr(iter(result))
status, headers = web.ctx.status, web.ctx.headers
start_resp(status, headers)
def cleanup():
self._cleanup()
yield '' # force this function to be a generator
return itertools.chain(result, cleanup())
for m in middleware:
wsgi = m(wsgi)
return wsgi
|
StopIteration
|
dataset/ETHPy150Open joxeankoret/nightmare/runtime/web/application.py/application.wsgifunc
|
4,768 |
def cgirun(self, *middleware):
"""
Return a CGI handler. This is mostly useful with Google App Engine.
There you can just do:
main = app.cgirun()
"""
wsgiapp = self.wsgifunc(*middleware)
try:
from google.appengine.ext.webapp.util import run_wsgi_app
return run_wsgi_app(wsgiapp)
except __HOLE__:
# we're not running from within Google App Engine
return wsgiref.handlers.CGIHandler().run(wsgiapp)
|
ImportError
|
dataset/ETHPy150Open joxeankoret/nightmare/runtime/web/application.py/application.cgirun
|
4,769 |
def autodelegate(prefix=''):
"""
Returns a method that takes one argument and calls the method named prefix+arg,
calling `notfound()` if there isn't one. Example:
urls = ('/prefs/(.*)', 'prefs')
class prefs:
GET = autodelegate('GET_')
def GET_password(self): pass
def GET_privacy(self): pass
`GET_password` would get called for `/prefs/password` while `GET_privacy` for
`GET_privacy` gets called for `/prefs/privacy`.
If a user visits `/prefs/password/change` then `GET_password(self, '/change')`
is called.
"""
def internal(self, arg):
if '/' in arg:
first, rest = arg.split('/', 1)
func = prefix + first
args = ['/' + rest]
else:
func = prefix + arg
args = []
if hasattr(self, func):
try:
return getattr(self, func)(*args)
except __HOLE__:
raise web.notfound()
else:
raise web.notfound()
return internal
|
TypeError
|
dataset/ETHPy150Open joxeankoret/nightmare/runtime/web/application.py/autodelegate
|
4,770 |
def check(self, mod):
# jython registers java packages as modules but they either
# don't have a __file__ attribute or its value is None
if not (mod and hasattr(mod, '__file__') and mod.__file__):
return
try:
mtime = os.stat(mod.__file__).st_mtime
except (__HOLE__, IOError):
return
if mod.__file__.endswith(self.__class__.SUFFIX) and os.path.exists(mod.__file__[:-1]):
mtime = max(os.stat(mod.__file__[:-1]).st_mtime, mtime)
if mod not in self.mtimes:
self.mtimes[mod] = mtime
elif self.mtimes[mod] < mtime:
try:
reload(mod)
self.mtimes[mod] = mtime
except ImportError:
pass
|
OSError
|
dataset/ETHPy150Open joxeankoret/nightmare/runtime/web/application.py/Reloader.check
|
4,771 |
def accept_ride_request(self,person):
print "#" * 80
print self.username + ": ACCEPTING A RIDE REQUEST..."
print "#" * 80
if isinstance(person,str):
username = person
else:
try:
username = person['username']
except __HOLE__:
username = person['author']['username']
response = self.client.put('/trips/' +str(self.trip['id'])+ '/participations/'+username+'/',
data={'status':'accept'})
print "Dycapo Response: \n" + str(response)
print "#" * 80
return utils.rest_to_response(response)
|
KeyError
|
dataset/ETHPy150Open dgraziotin/dycapo/tests/classes.py/Driver.accept_ride_request
|
4,772 |
def for_name(fq_name, recursive=False):
"""Find class/function/method specified by its fully qualified name.
Fully qualified can be specified as:
* <module_name>.<class_name>
* <module_name>.<function_name>
* <module_name>.<class_name>.<method_name> (an unbound method will be
returned in this case).
for_name works by doing __import__ for <module_name>, and looks for
<class_name>/<function_name> in module's __dict__/attrs. If fully qualified
name doesn't contain '.', the current module will be used.
Args:
fq_name: fully qualified name of something to find
Returns:
class object.
Raises:
ImportError: when specified module could not be loaded or the class
was not found in the module.
"""
# if "." not in fq_name:
# raise ImportError("'%s' is not a full-qualified name" % fq_name)
fq_name = str(fq_name)
module_name = __name__
short_name = fq_name
if fq_name.rfind(".") >= 0:
(module_name, short_name) = (fq_name[:fq_name.rfind(".")],
fq_name[fq_name.rfind(".") + 1:])
try:
result = __import__(module_name, None, None, [short_name])
return result.__dict__[short_name]
except KeyError:
# If we're recursively inside a for_name() chain, then we want to raise
# this error as a key error so we can report the actual source of the
# problem. If we're *not* recursively being called, that means the
# module was found and the specific item could not be loaded, and thus
# we want to raise an ImportError directly.
if recursive:
raise
else:
raise ImportError("Could not find '%s' on path '%s'" % (
short_name, module_name))
except ImportError, e:
# module_name is not actually a module. Try for_name for it to figure
# out what's this.
try:
module = for_name(module_name, recursive=True)
if hasattr(module, short_name):
return getattr(module, short_name)
else:
# The module was found, but the function component is missing.
raise KeyError()
except KeyError:
raise ImportError("Could not find '%s' on path '%s'" % (
short_name, module_name))
except __HOLE__:
# This means recursive import attempts failed, thus we will raise the
# first ImportError we encountered, since it's likely the most accurate.
pass
# Raise the original import error that caused all of this, since it is
# likely the real cause of the overall problem.
raise
|
ImportError
|
dataset/ETHPy150Open livid/v2ex/mapreduce/util.py/for_name
|
4,773 |
def ugly_tree_creation_test():
tree = LookupTree()
error_values = []
for i in range(10000):
tree.insert(hash(i), (i, i))
n = 0
try:
for k, v in tree:
n += 1
if n != i+1:
error_values.append(i)
except __HOLE__:
# this is failing the first time through the loop, because
# integers aren't iterable.
# I'm torn about what to think about this fact.
# Probably just means I don't understand the tree as well as I thought
print "Quit being able to iterate over tree on insert # %d" % i
raise
assert not error_values
|
TypeError
|
dataset/ETHPy150Open zhemao/funktown/unittest.py/ugly_tree_creation_test
|
4,774 |
def _select_binary_base_path(self, supportdir, version, name, uname_func=None):
"""Calculate the base path.
Exposed for associated unit tests.
:param supportdir: the path used to make a path under --pants_bootstrapdir.
:param version: the version number of the tool used to make a path under --pants-bootstrapdir.
:param name: name of the binary to search for. (e.g 'protoc')
:param uname_func: method to use to emulate os.uname() in testing
:returns: Base path used to select the binary file.
"""
uname_func = uname_func or os.uname
sysname, _, release, _, machine = uname_func()
try:
os_id = get_os_id(uname_func=uname_func)
except __HOLE__:
os_id = None
if os_id is None:
raise self.MissingMachineInfo("Pants has no binaries for {}".format(sysname))
try:
middle_path = self._path_by_id[os_id]
except KeyError:
raise self.MissingMachineInfo(
"Update --binaries-path-by-id to find binaries for {sysname} {machine} {release}.".format(
sysname=sysname, release=release, machine=machine))
return os.path.join(supportdir, *(middle_path + [version, name]))
|
KeyError
|
dataset/ETHPy150Open pantsbuild/pants/src/python/pants/binaries/binary_util.py/BinaryUtil._select_binary_base_path
|
4,775 |
@contextmanager
def _select_binary_stream(self, name, binary_path, fetcher=None):
"""Select a binary matching the current os and architecture.
:param string binary_path: The path to the binary to fetch.
:param fetcher: Optional argument used only for testing, to 'pretend' to open urls.
:returns: a 'stream' to download it from a support directory. The returned 'stream' is actually
a lambda function which returns the files binary contents.
:raises: :class:`pants.binary_util.BinaryUtil.BinaryNotFound` if no binary of the given version
and name could be found for the current platform.
"""
if not self._baseurls:
raise self.NoBaseUrlsError(
'No urls are defined for the --pants-support-baseurls option.')
downloaded_successfully = False
accumulated_errors = []
for baseurl in OrderedSet(self._baseurls): # Wrap in OrderedSet because duplicates are wasteful.
url = posixpath.join(baseurl, binary_path)
logger.info('Attempting to fetch {name} binary from: {url} ...'.format(name=name, url=url))
try:
with temporary_file() as dest:
fetcher = fetcher or Fetcher()
fetcher.download(url, listener=Fetcher.ProgressListener(), path_or_fd=dest)
logger.info('Fetched {name} binary from: {url} .'.format(name=name, url=url))
downloaded_successfully = True
dest.seek(0)
yield lambda: dest.read()
break
except (__HOLE__, Fetcher.Error, ValueError) as e:
accumulated_errors.append('Failed to fetch binary from {url}: {error}'
.format(url=url, error=e))
if not downloaded_successfully:
raise self.BinaryNotFound(binary_path, accumulated_errors)
|
IOError
|
dataset/ETHPy150Open pantsbuild/pants/src/python/pants/binaries/binary_util.py/BinaryUtil._select_binary_stream
|
4,776 |
def ranking(self, dimfun, groupby, ftarget=10**-8):
"""
Produce a set of function evaluation ranks over all algorithms
and strategies.
Returns a set of rows where each row contains a budget as first
element and ranks for individual algorithms and strategies
as the second element (in the order of the strategies in the
output of algds_dimfunc(), and stratds_dimfunc() respectively).
The ranks are always computed based on function values after
a particular budget. If multiple algorithms reach ftarget,
they are ranked by the order in which they did.
"""
nameds = list(itertools.chain(self.algds_dimfunc(dimfun), self.stratds_dimfunc(dimfun)))
count = len(nameds)
# Produce "fv" items, one per dataset, containing single function value
# for each budget
fvset = []
for (name, ds) in nameds:
budgets = ds.funvals[:,0]
f1vals = np.maximum(groupby(ds.funvals[:, 1:], axis=1), ftarget)
fv = np.transpose(np.vstack([budgets, f1vals]))
fvset.append(fv)
# Align the "fv" items by budget and merge them
fva = ra.alignArrayData(ra.VArrayMultiReader(fvset))
budgets = fva[:,0]
# Assign function values and rank them
# However, we want to resolve eventual ties by ranking first
# converging function first. So we do a trick and rewrite ftarget
# values in increasing convergence sort order.
values = fva[:,1:].copy()
firstconv = np.ones(count) * (np.size(budgets)+1) # runlength+1 is default
for i in range(count): # XXX: drop the loop
try:
firstconv[i] = np.nonzero(values[:,i] == ftarget)[0][0]
except __HOLE__:
continue # no rewriting needed
firstconvranks = ss.mstats.rankdata(firstconv)
for i in range(count):
r = firstconvranks[i]
values[firstconv[i]:, i] = ftarget - (1-r/count)*ftarget
ranks = ss.mstats.rankdata(values, axis=1)
return np.transpose(np.vstack([budgets, ranks.T]))
|
IndexError
|
dataset/ETHPy150Open pasky/cocopf/pproc.py/PortfolioDataSets.ranking
|
4,777 |
def resolve_fid(fid):
"""
Convert a given "function id" string to a number of list of numbers,
with the ability to resolve symbolic names for a variety of function
classes.
XXX: So far, the classes are fixed as determined on the scipy+CMA
portfolio. Instead, determine them dynamically.
"""
# A list of numbers?
if fid.count(',') > 0:
return [int(i) for i in fid.split(',')]
# A number?
try:
return int(fid)
except __HOLE__:
pass
# A symbolic name!
symbols = dict(
all=set(range(1,25)),
# functions that converge earlier than 7^3
q=set([1,2,5]),
# functions that have a single sharply optimal oracle
single=set([2,3,4,6,7,10,11,12,13,15,16,17,18,19,20,21,22,23]),
# functions that have multiple feasible candidates (#evals@optimal slowdown <2)
many=set([1,5,8,9,14,24]),
# functions whose optimal oracle did not dominate throughout the computation (i.e. was not best two powers of |pf| ago)
volatile=set([6,8,9,10,11,12,13,14,15,19,20,21,22,23,24]),
# functions whose oracle converges steadily
steady=set([6,12,17,18,19,24]),
# functions whose oracle converges unexpectedly
sudden=set([1,2,3,4,5,7,8,9,10,11,13,14,15,16,20,21,22,23]),
# functions where CMA (the by far best-performing pf member) converges
CMAgood=set([6,7,10,11,12,13,16,17,18,21,22,23]),
# functions where CMA (the by far best-performing pf member) does not converge
CMAbad=set([1,2,3,4,5,8,9,14,15,19,20,24]),
# original BBOB families
separ=set(range(1,6)),
lcond=set(range(6,10)),
hcond=set(range(10,15)),
multi=set(range(15,20)),
mult2=set(range(20,25)),
)
fidset = set([])
for m in re.finditer(r'([+:-])?([^+:-]+)', fid):
if m.group(1) is None:
fidset = symbols[m.group(2)]
elif m.group(1) == '+':
fidset |= symbols[m.group(2)]
elif m.group(1) == ':':
fidset &= symbols[m.group(2)]
elif m.group(1) == '-':
fidset -= symbols[m.group(2)]
else:
raise ValueError('bad fid syntax ' + fid)
return list(fidset)
|
ValueError
|
dataset/ETHPy150Open pasky/cocopf/pproc.py/resolve_fid
|
4,778 |
def maybe_asynchronous(f):
def wrapped(*args, **kwargs):
try:
callback = kwargs.pop('callback')
except __HOLE__:
callback = None
result = f(*args, **kwargs)
if callback is not None:
callback(result)
else:
return result
return wrapped
|
KeyError
|
dataset/ETHPy150Open Lothiraldan/ZeroServices/zeroservices/utils.py/maybe_asynchronous
|
4,779 |
@property
def username(self):
try:
return self._thread_local.user
except __HOLE__:
return DEFAULT_USER.get()
|
AttributeError
|
dataset/ETHPy150Open cloudera/hue/desktop/libs/hadoop/src/hadoop/yarn/mapreduce_api.py/MapreduceApi.username
|
4,780 |
def test_convolutional_network():
"""Test smaller version of convolutional_network.ipynb"""
skip.skip_if_no_data()
yaml_file_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..'))
save_path = os.path.dirname(os.path.realpath(__file__))
# Escape potential backslashes in Windows filenames, since
# they will be processed when the YAML parser will read it
# as a string
save_path.replace('\\', r'\\')
yaml = open("{0}/conv.yaml".format(yaml_file_path), 'r').read()
hyper_params = {'train_stop': 50,
'valid_stop': 50050,
'test_stop': 50,
'batch_size': 50,
'output_channels_h2': 4,
'output_channels_h3': 4,
'max_epochs': 1,
'save_path': save_path}
yaml = yaml % (hyper_params)
train = yaml_parse.load(yaml)
train.main_loop()
try:
os.remove("{}/convolutional_network_best.pkl".format(save_path))
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open lisa-lab/pylearn2/pylearn2/scripts/tutorials/convolutional_network/tests/test_convnet.py/test_convolutional_network
|
4,781 |
def _call_agent(session, instance, vm_ref, method, addl_args=None,
timeout=None, success_codes=None):
"""Abstracts out the interaction with the agent xenapi plugin."""
if addl_args is None:
addl_args = {}
if timeout is None:
timeout = CONF.xenserver.agent_timeout
if success_codes is None:
success_codes = ['0']
# always fetch domid because VM may have rebooted
dom_id = session.VM.get_domid(vm_ref)
args = {
'id': str(uuid.uuid4()),
'dom_id': str(dom_id),
'timeout': str(timeout),
}
args.update(addl_args)
try:
ret = session.call_plugin('agent', method, args)
except session.XenAPI.Failure as e:
err_msg = e.details[-1].splitlines()[-1]
if 'TIMEOUT:' in err_msg:
LOG.error(_LE('TIMEOUT: The call to %(method)s timed out. '
'args=%(args)r'),
{'method': method, 'args': args}, instance=instance)
raise exception.AgentTimeout(method=method)
elif 'REBOOT:' in err_msg:
LOG.debug('REBOOT: The call to %(method)s detected a reboot. '
'args=%(args)r',
{'method': method, 'args': args}, instance=instance)
_wait_for_new_dom_id(session, vm_ref, dom_id, method)
return _call_agent(session, instance, vm_ref, method,
addl_args, timeout, success_codes)
elif 'NOT IMPLEMENTED:' in err_msg:
LOG.error(_LE('NOT IMPLEMENTED: The call to %(method)s is not '
'supported by the agent. args=%(args)r'),
{'method': method, 'args': args}, instance=instance)
raise exception.AgentNotImplemented(method=method)
else:
LOG.error(_LE('The call to %(method)s returned an error: %(e)s. '
'args=%(args)r'),
{'method': method, 'args': args, 'e': e},
instance=instance)
raise exception.AgentError(method=method)
if not isinstance(ret, dict):
try:
ret = jsonutils.loads(ret)
except __HOLE__:
LOG.error(_LE('The agent call to %(method)s returned an invalid '
'response: %(ret)r. args=%(args)r'),
{'method': method, 'ret': ret, 'args': args},
instance=instance)
raise exception.AgentError(method=method)
if ret['returncode'] not in success_codes:
LOG.error(_LE('The agent call to %(method)s returned '
'an error: %(ret)r. args=%(args)r'),
{'method': method, 'ret': ret, 'args': args},
instance=instance)
raise exception.AgentError(method=method)
LOG.debug('The agent call to %(method)s was successful: '
'%(ret)r. args=%(args)r',
{'method': method, 'ret': ret, 'args': args},
instance=instance)
# Some old versions of the Windows agent have a trailing \\r\\n
# (ie CRLF escaped) for some reason. Strip that off.
return ret['message'].replace('\\r\\n', '')
|
TypeError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/agent.py/_call_agent
|
4,782 |
def should_use_agent(instance):
sys_meta = utils.instance_sys_meta(instance)
if USE_AGENT_SM_KEY not in sys_meta:
return CONF.xenserver.use_agent_default
else:
use_agent_raw = sys_meta[USE_AGENT_SM_KEY]
try:
return strutils.bool_from_string(use_agent_raw, strict=True)
except __HOLE__:
LOG.warning(_LW("Invalid 'agent_present' value. "
"Falling back to the default."),
instance=instance)
return CONF.xenserver.use_agent_default
|
ValueError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/xenapi/agent.py/should_use_agent
|
4,783 |
def update_proxy_settings(self, *args, **kwargs):
"""
When the network configuration changes, this updates the SOCKS proxy
settings based the current IP address(es)
"""
# Open a SystemConfiguration preferences session:
sc_prefs = SCPreferences()
# We want to enable the server when our hostname is not on the corporate network:
# BUG: This does not handle multi-homed systems well:
current_address = socket.gethostbyname(socket.getfqdn())
new_state = not current_address.startswith('10.0.1.')
logging.info(
"Current address is now %s: SOCKS proxy will be %s" % (
current_address,
"Enabled" if new_state else "Disabled"
)
)
try:
sc_prefs.set_proxy(
enable=new_state,
protocol='SOCKS',
server=self.socks_server,
port=self.socks_port
)
sc_prefs.save()
logging.info("Successfully updated SOCKS proxy setting")
except __HOLE__, e:
logging.error("Unable to set SOCKS proxy setting: %s" % e.message)
|
RuntimeError
|
dataset/ETHPy150Open MacSysadmin/pymacadmin/examples/crankd/socks-proxy/ProxyManager.py/ProxyManager.update_proxy_settings
|
4,784 |
@db.listens_for(db.Session, "after_flush")
def store_purge_keys(config, session, flush_context):
cache_keys = config.registry["cache_keys"]
# We'll (ab)use the session.info dictionary to store a list of pending
# purges to the session.
purges = session.info.setdefault("warehouse.cache.origin.purges", set())
# Go through each new, changed, and deleted object and attempt to store
# a cache key that we'll want to purge when the session has been committed.
for obj in (session.new | session.dirty | session.deleted):
try:
key_maker = cache_keys[obj.__class__]
except __HOLE__:
continue
purges.update(key_maker(obj).purge)
|
KeyError
|
dataset/ETHPy150Open pypa/warehouse/warehouse/cache/origin/__init__.py/store_purge_keys
|
4,785 |
@db.listens_for(db.Session, "after_commit")
def execute_purge(config, session):
purges = session.info.pop("warehouse.cache.origin.purges", set())
try:
cacher_factory = config.find_service_factory(IOriginCache)
except __HOLE__:
return
cacher = cacher_factory(None, config)
cacher.purge(purges)
|
ValueError
|
dataset/ETHPy150Open pypa/warehouse/warehouse/cache/origin/__init__.py/execute_purge
|
4,786 |
def origin_cache(seconds, keys=None, stale_while_revalidate=None,
stale_if_error=None):
if keys is None:
keys = []
def inner(view):
@functools.wraps(view)
def wrapped(context, request):
cache_keys = request.registry["cache_keys"]
context_keys = []
if context.__class__ in cache_keys:
context_keys = cache_keys[context.__class__](context).cache
try:
cacher = request.find_service(IOriginCache)
except __HOLE__:
pass
else:
request.add_response_callback(
functools.partial(
cacher.cache,
sorted(context_keys + keys),
seconds=seconds,
stale_while_revalidate=stale_while_revalidate,
stale_if_error=stale_if_error,
)
)
return view(context, request)
return wrapped
return inner
|
ValueError
|
dataset/ETHPy150Open pypa/warehouse/warehouse/cache/origin/__init__.py/origin_cache
|
4,787 |
def __iter__(self):
while True:
if self._current_iter:
for o in self._current_iter:
yield o
try:
k_range = self._key_ranges.next()
self._current_iter = self._key_range_iter_cls(k_range,
self._query_spec)
except __HOLE__:
self._current_iter = None
break
|
StopIteration
|
dataset/ETHPy150Open GoogleCloudPlatform/appengine-mapreduce/python/src/mapreduce/datastore_range_iterators.py/_KeyRangesIterator.__iter__
|
4,788 |
@defer.inlineCallbacks
def call_py(self, jsondata):
"""
Calls jsonrpc service's method and returns its return value in python
object format or None if there is none.
This method is same as call() except the return value is a python
object instead of JSON string. This method is mainly only useful for
debugging purposes.
"""
try:
try:
rdata = json.loads(jsondata)
except __HOLE__:
raise ParseError
except ParseError, e:
defer.returnValue(self._get_err(e))
return
# set some default values for error handling
request = self._get_default_vals()
try:
if isinstance(rdata, dict) and rdata:
# It's a single request.
self._fill_request(request, rdata)
respond = yield self._handle_request(request)
# Don't respond to notifications
if respond is None:
defer.returnValue(None)
else:
defer.returnValue(respond)
return
elif isinstance(rdata, list) and rdata:
# It's a batch.
requests = []
responds = []
for rdata_ in rdata:
# set some default values for error handling
request_ = self._get_default_vals()
try:
self._fill_request(request_, rdata_)
except InvalidRequestError, e:
err = self._get_err(e, request_['id'])
if err:
responds.append(err)
continue
except JSONRPCError, e:
err = self._get_err(e, request_['id'])
if err:
responds.append(err)
continue
requests.append(request_)
for request_ in requests:
try:
# TODO: We should use a deferred list so requests
# are processed in parallel
respond = yield self._handle_request(request_)
except JSONRPCError, e:
respond = self._get_err(e,
request_['id'],
request_['jsonrpc'])
# Don't respond to notifications
if respond is not None:
responds.append(respond)
if responds:
defer.returnValue(responds)
return
# Nothing to respond.
defer.returnValue(None)
return
else:
# empty dict, list or wrong type
raise InvalidRequestError
except InvalidRequestError, e:
defer.returnValue(self._get_err(e, request['id']))
except JSONRPCError, e:
defer.returnValue(self._get_err(e,
request['id'],
request['jsonrpc']))
|
ValueError
|
dataset/ETHPy150Open flowroute/txjason/txjason/service.py/JSONRPCService.call_py
|
4,789 |
def __getattribute__(self, attrname):
# get attribute of the SoftLink itself
if (attrname in SoftLink._link_attrnames
or attrname[:3] in SoftLink._link_attrprefixes):
return object.__getattribute__(self, attrname)
# get attribute of the target node
elif not self._v_isopen:
raise tables.ClosedNodeError('the node object is closed')
elif self.is_dangling():
return None
else:
target_node = self.dereference()
try:
# __getattribute__() fails to get children of Groups
return target_node.__getattribute__(attrname)
except __HOLE__:
# some node classes (e.g. Array) don't implement __getattr__()
return target_node.__getattr__(attrname)
|
AttributeError
|
dataset/ETHPy150Open PyTables/PyTables/tables/link.py/SoftLink.__getattribute__
|
4,790 |
def _run_worker():
LOG.info('(PID=%s) Results tracker started.', os.getpid())
tracker = resultstracker.get_tracker()
try:
tracker.start(wait=True)
except (KeyboardInterrupt, __HOLE__):
LOG.info('(PID=%s) Results tracker stopped.', os.getpid())
tracker.shutdown()
except:
return 1
return 0
|
SystemExit
|
dataset/ETHPy150Open StackStorm/st2/st2actions/st2actions/cmd/st2resultstracker.py/_run_worker
|
4,791 |
def main():
try:
_setup()
return _run_worker()
except __HOLE__ as exit_code:
sys.exit(exit_code)
except:
LOG.exception('(PID=%s) Results tracker quit due to exception.', os.getpid())
return 1
finally:
_teardown()
|
SystemExit
|
dataset/ETHPy150Open StackStorm/st2/st2actions/st2actions/cmd/st2resultstracker.py/main
|
4,792 |
def backup_config_file(env, suffix):
try:
backup, f = create_unique_file(env.config.filename + suffix)
f.close()
shutil.copyfile(env.config.filename, backup)
env.log.info("Saved backup of configuration file in %s", backup)
except __HOLE__ as e:
env.log.warn("Couldn't save backup of configuration file (%s)",
exception_to_unicode(e))
|
IOError
|
dataset/ETHPy150Open edgewall/trac/trac/upgrades/__init__.py/backup_config_file
|
4,793 |
def __pysal_choro(values, scheme, k=5):
""" Wrapper for choropleth schemes from PySAL for use with plot_dataframe
Parameters
----------
values
Series to be plotted
scheme
pysal.esda.mapclassify classificatin scheme
['Equal_interval'|'Quantiles'|'Fisher_Jenks']
k
number of classes (2 <= k <=9)
Returns
-------
binning
Binning objects that holds the Series with values replaced with
class identifier and the bins.
"""
try:
from pysal.esda.mapclassify import Quantiles, Equal_Interval, Fisher_Jenks
schemes = {}
schemes['equal_interval'] = Equal_Interval
schemes['quantiles'] = Quantiles
schemes['fisher_jenks'] = Fisher_Jenks
s0 = scheme
scheme = scheme.lower()
if scheme not in schemes:
scheme = 'quantiles'
warnings.warn('Unrecognized scheme "{0}". Using "Quantiles" '
'instead'.format(s0), UserWarning, stacklevel=3)
if k < 2 or k > 9:
warnings.warn('Invalid k: {0} (2 <= k <= 9), setting k=5 '
'(default)'.format(k), UserWarning, stacklevel=3)
k = 5
binning = schemes[scheme](values, k)
return binning
except __HOLE__:
raise ImportError("PySAL is required to use the 'scheme' keyword")
|
ImportError
|
dataset/ETHPy150Open geopandas/geopandas/geopandas/plotting.py/__pysal_choro
|
4,794 |
def authenticateUserAPOP(self, user, digest):
# Override the default lookup scheme to allow virtual domains
user, domain = self.lookupDomain(user)
try:
portal = self.service.lookupPortal(domain)
except __HOLE__:
return defer.fail(cred.error.UnauthorizedLogin())
else:
return portal.login(
pop3.APOPCredentials(self.magic, user, digest),
None,
pop3.IMailbox
)
|
KeyError
|
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/mail/protocols.py/VirtualPOP3.authenticateUserAPOP
|
4,795 |
def authenticateUserPASS(self, user, password):
user, domain = self.lookupDomain(user)
try:
portal = self.service.lookupPortal(domain)
except __HOLE__:
return defer.fail(cred.error.UnauthorizedLogin())
else:
return portal.login(
cred.credentials.UsernamePassword(user, password),
None,
pop3.IMailbox
)
|
KeyError
|
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/mail/protocols.py/VirtualPOP3.authenticateUserPASS
|
4,796 |
def lookupDomain(self, user):
try:
user, domain = user.split(self.domainSpecifier, 1)
except __HOLE__:
domain = ''
if domain not in self.service.domains:
raise pop3.POP3Error("no such domain %s" % domain)
return user, domain
|
ValueError
|
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/mail/protocols.py/VirtualPOP3.lookupDomain
|
4,797 |
def main():
try:
#Read config file
settings=Settings()
#Set up logger
logger=Logger(settings)
#Create scanner
server=Server(settings,logger)
#Begin scanning
server.start()
except __HOLE__:
server.stop()
|
KeyboardInterrupt
|
dataset/ETHPy150Open SteveAbb/Vestigo/Vestigo Base/vestigo_base.py/main
|
4,798 |
def find_applications(self):
self.applications = {}
for application in all_apps():
if self.local_names and not application in self.local_names:
continue
try:
search_module_name = '%s.search' % application.application_name
_temp = __import__(search_module_name,
globals(), locals(),
['ApplicationSearch'], -1)
if not hasattr(_temp, 'ApplicationSearch'):
raise ImportError
except __HOLE__:
continue
else:
search_provider = _temp.ApplicationSearch(application)
self.applications[application.local_name] = search_provider
|
ImportError
|
dataset/ETHPy150Open mollyproject/mollyproject/molly/apps/search/providers/application_search.py/ApplicationSearchProvider.find_applications
|
4,799 |
def get_queryset(self):
request = self.request
# Allow pages to be filtered to a specific type
try:
models = page_models_from_string(request.GET.get('type', 'wagtailcore.Page'))
except (LookupError, __HOLE__):
raise BadRequestError("type doesn't exist")
if not models:
models = [Page]
if len(models) == 1:
queryset = models[0].objects.all()
else:
queryset = Page.objects.all()
# Filter pages by specified models
queryset = filter_page_type(queryset, models)
# Get live pages that are not in a private section
queryset = queryset.public().live()
# Filter by site
queryset = queryset.descendant_of(request.site.root_page, inclusive=True)
return queryset
|
ValueError
|
dataset/ETHPy150Open torchbox/wagtail/wagtail/api/v2/endpoints.py/PagesAPIEndpoint.get_queryset
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.