Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
700 | def read_request_line(self):
# HTTP/1.1 connections are persistent by default. If a client
# requests a page, then idles (leaves the connection open),
# then rfile.readline() will raise socket.error("timed out").
# Note that it does this based on the value given to settimeout(),
# and doesn't need the client to request or acknowledge the close
# (although your TCP stack might suffer for it: cf Apache's history
# with FIN_WAIT_2).
request_line = self.rfile.readline()
# Set started_request to True so communicate() knows to send 408
# from here on out.
self.started_request = True
if not request_line:
return False
if request_line == CRLF:
# RFC 2616 sec 4.1: "...if the server is reading the protocol
# stream at the beginning of a message and receives a CRLF
# first, it should ignore the CRLF."
# But only ignore one leading line! else we enable a DoS.
request_line = self.rfile.readline()
if not request_line:
return False
if not request_line.endswith(CRLF):
self.simple_response("400 Bad Request", "HTTP requires CRLF terminators")
return False
try:
method, uri, req_protocol = request_line.strip().split(SPACE, 2)
# The [x:y] slicing is necessary for byte strings to avoid getting ord's
rp = int(req_protocol[5:6]), int(req_protocol[7:8])
except ValueError:
self.simple_response("400 Bad Request", "Malformed Request-Line")
return False
self.uri = uri
self.method = method
# uri may be an abs_path (including "http://host.domain.tld");
scheme, authority, path = self.parse_request_uri(uri)
if NUMBER_SIGN in path:
self.simple_response("400 Bad Request",
"Illegal #fragment in Request-URI.")
return False
if scheme:
self.scheme = scheme
qs = EMPTY
if QUESTION_MARK in path:
path, qs = path.split(QUESTION_MARK, 1)
# Unquote the path+params (e.g. "/this%20path" -> "/this path").
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
#
# But note that "...a URI must be separated into its components
# before the escaped characters within those components can be
# safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
# Therefore, "/this%2Fpath" becomes "/this%2Fpath", not "/this/path".
try:
atoms = [self.unquote_bytes(x) for x in quoted_slash.split(path)]
except __HOLE__:
ex = sys.exc_info()[1]
self.simple_response("400 Bad Request", ex.args[0])
return False
path = b"%2F".join(atoms)
self.path = path
# Note that, like wsgiref and most other HTTP servers,
# we "% HEX HEX"-unquote the path but not the query string.
self.qs = qs
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
# request server actual written supported response
# protocol protocol response protocol feature set
# a 1.0 1.0 1.0 1.0
# b 1.0 1.1 1.1 1.0
# c 1.1 1.0 1.0 1.0
# d 1.1 1.1 1.1 1.1
# Notice that, in (b), the response will be "HTTP/1.1" even though
# the client only understands 1.0. RFC 2616 10.5.6 says we should
# only return 505 if the _major_ version is different.
# The [x:y] slicing is necessary for byte strings to avoid getting ord's
sp = int(self.server.protocol[5:6]), int(self.server.protocol[7:8])
if sp[0] != rp[0]:
self.simple_response("505 HTTP Version Not Supported")
return False
self.request_protocol = req_protocol
self.response_protocol = "HTTP/%s.%s" % min(rp, sp)
return True | ValueError | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/HTTPRequest.read_request_line |
701 | def read_request_headers(self):
"""Read self.rfile into self.inheaders. Return success."""
# then all the http headers
try:
read_headers(self.rfile, self.inheaders)
except __HOLE__:
ex = sys.exc_info()[1]
self.simple_response("400 Bad Request", ex.args[0])
return False
mrbs = self.server.max_request_body_size
if mrbs and int(self.inheaders.get(b"Content-Length", 0)) > mrbs:
self.simple_response("413 Request Entity Too Large",
"The entity sent with the request exceeds the maximum "
"allowed bytes.")
return False
# Persistent connection support
if self.response_protocol == "HTTP/1.1":
# Both server and client are HTTP/1.1
if self.inheaders.get(b"Connection", b"") == b"close":
self.close_connection = True
else:
# Either the server or client (or both) are HTTP/1.0
if self.inheaders.get(b"Connection", b"") != b"Keep-Alive":
self.close_connection = True
# Transfer-Encoding support
te = None
if self.response_protocol == "HTTP/1.1":
te = self.inheaders.get(b"Transfer-Encoding")
if te:
te = [x.strip().lower() for x in te.split(b",") if x.strip()]
self.chunked_read = False
if te:
for enc in te:
if enc == b"chunked":
self.chunked_read = True
else:
# Note that, even if we see "chunked", we must reject
# if there is an extension we don't recognize.
self.simple_response("501 Unimplemented")
self.close_connection = True
return False
# From PEP 333:
# "Servers and gateways that implement HTTP 1.1 must provide
# transparent support for HTTP 1.1's "expect/continue" mechanism.
# This may be done in any of several ways:
# 1. Respond to requests containing an Expect: 100-continue request
# with an immediate "100 Continue" response, and proceed normally.
# 2. Proceed with the request normally, but provide the application
# with a wsgi.input stream that will send the "100 Continue"
# response if/when the application first attempts to read from
# the input stream. The read request must then remain blocked
# until the client responds.
# 3. Wait until the client decides that the server does not support
# expect/continue, and sends the request body on its own.
# (This is suboptimal, and is not recommended.)
#
# We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
# but it seems like it would be a big slowdown for such a rare case.
if self.inheaders.get(b"Expect", b"") == b"100-continue":
# Don't use simple_response here, because it emits headers
# we don't want. See https://bitbucket.org/cherrypy/cherrypy/issue/951
msg = self.server.protocol.encode('ascii') + b" 100 Continue\r\n\r\n"
try:
self.conn.wfile.write(msg)
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
raise
return True | ValueError | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/HTTPRequest.read_request_headers |
702 | def unquote_bytes(self, path):
"""takes quoted string and unquotes % encoded values"""
res = path.split(b'%')
for i in range(1, len(res)):
item = res[i]
try:
res[i] = bytes([int(item[:2], 16)]) + item[2:]
except __HOLE__:
raise
return b''.join(res) | ValueError | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/HTTPRequest.unquote_bytes |
703 | def communicate(self):
"""Read each request and respond appropriately."""
request_seen = False
try:
while True:
# (re)set req to None so that if something goes wrong in
# the RequestHandlerClass constructor, the error doesn't
# get written to the previous request.
req = None
req = self.RequestHandlerClass(self.server, self)
# This order of operations should guarantee correct pipelining.
req.parse_request()
if self.server.stats['Enabled']:
self.requests_seen += 1
if not req.ready:
# Something went wrong in the parsing (and the server has
# probably already made a simple_response). Return and
# let the conn close.
return
request_seen = True
req.respond()
if req.close_connection:
return
except socket.error:
e = sys.exc_info()[1]
errnum = e.args[0]
# sadly SSL sockets return a different (longer) time out string
if errnum == 'timed out' or errnum == 'The read operation timed out':
# Don't error if we're between requests; only error
# if 1) no request has been started at all, or 2) we're
# in the middle of a request.
# See https://bitbucket.org/cherrypy/cherrypy/issue/853
if (not request_seen) or (req and req.started_request):
# Don't bother writing the 408 if the response
# has already started being written.
if req and not req.sent_headers:
try:
req.simple_response("408 Request Timeout")
except FatalSSLAlert:
# Close the connection.
return
elif errnum not in socket_errors_to_ignore:
self.server.error_log("socket.error %s" % repr(errnum),
level=logging.WARNING, traceback=True)
if req and not req.sent_headers:
try:
req.simple_response("500 Internal Server Error")
except FatalSSLAlert:
# Close the connection.
return
return
except (KeyboardInterrupt, __HOLE__):
raise
except FatalSSLAlert:
# Close the connection.
return
except NoSSLError:
if req and not req.sent_headers:
# Unwrap our wfile
self.wfile = CP_makefile(self.socket._sock, "wb", self.wbufsize)
req.simple_response("400 Bad Request",
"The client sent a plain HTTP request, but "
"this server only speaks HTTPS on this port.")
self.linger = True
except Exception:
e = sys.exc_info()[1]
self.server.error_log(repr(e), level=logging.ERROR, traceback=True)
if req and not req.sent_headers:
try:
req.simple_response("500 Internal Server Error")
except FatalSSLAlert:
# Close the connection.
return | SystemExit | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/HTTPConnection.communicate |
704 | def run(self):
self.server.stats['Worker Threads'][self.getName()] = self.stats
try:
self.ready = True
while True:
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
self.conn = conn
if self.server.stats['Enabled']:
self.start_time = time.time()
try:
conn.communicate()
finally:
conn.close()
if self.server.stats['Enabled']:
self.requests_seen += self.conn.requests_seen
self.bytes_read += self.conn.rfile.bytes_read
self.bytes_written += self.conn.wfile.bytes_written
self.work_time += time.time() - self.start_time
self.start_time = None
self.conn = None
except (__HOLE__, SystemExit):
exc = sys.exc_info()[1]
self.server.interrupt = exc | KeyboardInterrupt | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/WorkerThread.run |
705 | def stop(self, timeout=5):
# OmniMarkupPreviewer: Force shutdown without waiting too much
while self._get_qsize() > 0:
conn = self.get()
if conn is not _SHUTDOWNREQUEST:
conn.close()
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
# Don't join currentThread (when stop is called inside a request).
current = threading.currentThread()
if timeout and timeout >= 0:
endtime = time.time() + timeout
while self._threads:
worker = self._threads.pop()
if worker is not current and worker.isAlive():
try:
if timeout is None or timeout < 0:
worker.join()
else:
remaining_time = endtime - time.time()
if remaining_time > 0:
worker.join(remaining_time)
if worker.isAlive():
# We exhausted the timeout.
# Forcibly shut down the socket.
c = worker.conn
if c and not c.rfile.closed:
try:
c.socket.shutdown(socket.SHUT_RD)
except TypeError:
# pyOpenSSL sockets don't take an arg
c.socket.shutdown()
worker.join()
except (AssertionError,
# Ignore repeated Ctrl-C.
# See https://bitbucket.org/cherrypy/cherrypy/issue/691.
__HOLE__):
pass | KeyboardInterrupt | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/ThreadPool.stop |
706 | def start(self):
"""Run the server forever."""
# We don't have to trap KeyboardInterrupt or SystemExit here,
# because cherrpy.server already does so, calling self.stop() for us.
# If you're using this server with another framework, you should
# trap those exceptions in whatever code block calls start().
self._interrupt = None
if self.software is None:
self.software = "%s Server" % self.version
# Select the appropriate socket
if isinstance(self.bind_addr, basestring):
# AF_UNIX socket
# So we can reuse the socket...
try: os.unlink(self.bind_addr)
except: pass
# So everyone can access the socket...
try: os.chmod(self.bind_addr, 511) # 0777
except: pass
info = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)]
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6 addresses)
host, port = self.bind_addr
try:
info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
except socket.gaierror:
if ':' in self.bind_addr[0]:
info = [(socket.AF_INET6, socket.SOCK_STREAM,
0, "", self.bind_addr + (0, 0))]
else:
info = [(socket.AF_INET, socket.SOCK_STREAM,
0, "", self.bind_addr)]
self.socket = None
errors = ["No socket could be created:"]
for res in info:
af, socktype, proto, canonname, sa = res
try:
self.bind(af, socktype, proto)
except socket.error as serr:
errors.append("(%s: %s)" % (sa, serr))
if self.socket:
self.socket.close()
self.socket = None
continue
break
if not self.socket:
raise socket.error("\n".join(errors))
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
# Create worker threads
self.requests.start()
self.ready = True
self._start_time = time.time()
while self.ready:
try:
self.tick()
except (KeyboardInterrupt, __HOLE__):
raise
except:
self.error_log("Error in HTTPServer.tick", level=logging.ERROR,
traceback=True)
if self.interrupt:
while self.interrupt is True:
# Wait for self.stop() to complete. See _set_interrupt.
time.sleep(0.1)
if self.interrupt:
raise self.interrupt | SystemExit | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/HTTPServer.start |
707 | def bind(self, family, type, proto=0):
"""Create (or recreate) the actual socket object."""
self.socket = socket.socket(family, type, proto)
prevent_socket_inheritance(self.socket)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay and not isinstance(self.bind_addr, str):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self.ssl_adapter is not None:
self.socket = self.ssl_adapter.bind(self.socket)
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See https://bitbucket.org/cherrypy/cherrypy/issue/871.
if (hasattr(socket, 'AF_INET6') and family == socket.AF_INET6
and self.bind_addr[0] in ('::', '::0', '::0.0.0.0')):
try:
self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (__HOLE__, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
self.socket.bind(self.bind_addr) | AttributeError | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/HTTPServer.bind |
708 | def get_ssl_adapter_class(name='builtin'):
"""Return an SSL adapter class for the given name."""
adapter = ssl_adapters[name.lower()]
if isinstance(adapter, basestring):
last_dot = adapter.rfind(".")
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
try:
mod = sys.modules[mod_path]
if mod is None:
raise KeyError()
except __HOLE__:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
return adapter
# -------------------------------- WSGI Stuff -------------------------------- # | KeyError | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/get_ssl_adapter_class |
709 | def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
env_10 = WSGIGateway_10.get_environ(self)
env = env_10.copy()
env['wsgi.version'] = ('u', 0)
# Request-URI
env.setdefault('wsgi.url_encoding', 'utf-8')
try:
# SCRIPT_NAME is the empty string, who cares what encoding it is?
env["PATH_INFO"] = req.path.decode(env['wsgi.url_encoding'])
env["QUERY_STRING"] = req.qs.decode(env['wsgi.url_encoding'])
except __HOLE__:
# Fall back to latin 1 so apps can transcode if needed.
env['wsgi.url_encoding'] = 'ISO-8859-1'
env["PATH_INFO"] = env_10["PATH_INFO"]
env["QUERY_STRING"] = env_10["QUERY_STRING"]
return env | UnicodeDecodeError | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/WSGIGateway_u0.get_environ |
710 | def __init__(self, apps):
try:
apps = list(apps.items())
except __HOLE__:
pass
# Sort the apps by len(path), descending
apps.sort()
apps.reverse()
# The path_prefix strings must start, but not end, with a slash.
# Use "" instead of "/".
self.apps = [(p.rstrip("/"), a) for p, a in apps] | AttributeError | dataset/ETHPy150Open timonwong/OmniMarkupPreviewer/OmniMarkupLib/libs/cherrypy/wsgiserver/wsgiserver3.py/WSGIPathInfoDispatcher.__init__ |
711 | def initialize(self, import_func=__import__):
"""Attempt to import the config module, if not already imported.
This function always sets self._module to a value unequal
to None: either the imported module (if imported successfully), or
a dummy object() instance (if an ImportError was raised). Other
exceptions are *not* caught.
When a dummy instance is used, it is also put in sys.modules.
This allows us to detect when sys.modules was changed (as
dev_appserver.py does when it notices source code changes) and
re-try the __import__ in that case, while skipping it (for speed)
if nothing has changed.
Args:
import_func: Used for dependency injection.
"""
self._lock.acquire()
try:
if (self._module is not None and
self._module is sys.modules.get(self._modname)):
return
try:
import_func(self._modname)
except __HOLE__, err:
if str(err) != 'No module named %s' % self._modname:
raise
self._module = object()
sys.modules[self._modname] = self._module
else:
self._module = sys.modules[self._modname]
finally:
self._lock.release() | ImportError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/lib_config.py/LibConfigRegistry.initialize |
712 | def _clear_cache(self):
"""Clear the cached values."""
self._lock.acquire()
try:
self._initialized = False
for key in self._defaults:
self._overrides.pop(key, None)
try:
delattr(self, key)
except __HOLE__:
pass
finally:
self._lock.release() | AttributeError | dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/lib_config.py/ConfigHandle._clear_cache |
713 | def activate_stubs(self, connection):
try:
from google.appengine.tools import dev_appserver_main
self.setup_local_stubs(connection)
except __HOLE__:
self.activate_test_stubs(connection) | ImportError | dataset/ETHPy150Open django-nonrel/djangoappengine/djangoappengine/db/stubs.py/StubManager.activate_stubs |
714 | def setup_local_stubs(self, connection):
if self.active_stubs == 'local':
return
from .base import get_datastore_paths
from google.appengine.tools import dev_appserver_main
args = dev_appserver_main.DEFAULT_ARGS.copy()
args.update(get_datastore_paths(connection.settings_dict))
args.update(connection.settings_dict.get('DEV_APPSERVER_OPTIONS', {}))
log_level = logging.getLogger().getEffectiveLevel()
logging.getLogger().setLevel(logging.WARNING)
try:
from google.appengine.tools import dev_appserver
except __HOLE__:
from google.appengine.tools import old_dev_appserver as dev_appserver
dev_appserver.SetupStubs('dev~' + appid, **args)
logging.getLogger().setLevel(log_level)
self.active_stubs = 'local' | ImportError | dataset/ETHPy150Open django-nonrel/djangoappengine/djangoappengine/db/stubs.py/StubManager.setup_local_stubs |
715 | def setup_remote_stubs(self, connection):
if self.active_stubs == 'remote':
return
if not connection.remote_api_path:
from djangoappengine.utils import appconfig
from google.appengine.api import appinfo
default_module = next(m for m in appconfig.modules if m.module_name == appinfo.DEFAULT_MODULE)
for handler in default_module.handlers:
if handler.script in REMOTE_API_SCRIPTS:
connection.remote_api_path = handler.url.split('(', 1)[0]
break
server = '%s.%s' % (connection.remote_app_id, connection.domain)
remote_url = 'https://%s%s' % (server, connection.remote_api_path)
logging.info("Setting up remote_api for '%s' at %s." %
(connection.remote_app_id, remote_url))
if not have_appserver:
logging.info(
"Connecting to remote_api handler.\n\n"
"IMPORTANT: Check your login method settings in the "
"App Engine Dashboard if you have problems logging in. "
"Login is only supported for Google Accounts.")
from google.appengine.ext.remote_api import remote_api_stub
remote_api_stub.ConfigureRemoteApi(None,
connection.remote_api_path, auth_func, servername=server,
secure=connection.secure_remote_api,
rpc_server_factory=rpc_server_factory)
retry_delay = 1
while retry_delay <= 16:
try:
remote_api_stub.MaybeInvokeAuthentication()
except __HOLE__, e:
if not have_appserver:
logging.info("Retrying in %d seconds..." % retry_delay)
time.sleep(retry_delay)
retry_delay *= 2
else:
break
else:
try:
remote_api_stub.MaybeInvokeAuthentication()
except HTTPError, e:
raise URLError("%s\n"
"Couldn't reach remote_api handler at %s.\n"
"Make sure you've deployed your project and "
"installed a remote_api handler in app.yaml. "
"Note that login is only supported for "
"Google Accounts. Make sure you've configured "
"the correct authentication method in the "
"App Engine Dashboard." % (e, remote_url))
logging.info("Now using the remote datastore for '%s' at %s." %
(connection.remote_app_id, remote_url))
self.active_stubs = 'remote' | HTTPError | dataset/ETHPy150Open django-nonrel/djangoappengine/djangoappengine/db/stubs.py/StubManager.setup_remote_stubs |
716 | def runfastcgi(WSGIHandler, argset=[], **kwargs):
options = FASTCGI_OPTIONS.copy()
options.update(kwargs)
for x in argset:
if "=" in x:
k, v = x.split('=', 1)
else:
k, v = x, True
options[k.lower()] = v
if "help" in options:
return fastcgi_help()
try:
import flup
except __HOLE__, e:
print >> sys.stderr, "ERROR: %s" % e
print >> sys.stderr, " Unable to load the flup package. In order to run django"
print >> sys.stderr, " as a FastCGI application, you will need to get flup from"
print >> sys.stderr, " http://www.saddi.com/software/flup/ If you've already"
print >> sys.stderr, " installed flup, then make sure you have it in your PYTHONPATH."
return False
flup_module = 'server.' + options['protocol']
if options['method'] in ('prefork', 'fork'):
wsgi_opts = {
'maxSpare': int(options["maxspare"]),
'minSpare': int(options["minspare"]),
'maxChildren': int(options["maxchildren"]),
'maxRequests': int(options["maxrequests"]),
}
flup_module += '_fork'
elif options['method'] in ('thread', 'threaded'):
wsgi_opts = {
'maxSpare': int(options["maxspare"]),
'minSpare': int(options["minspare"]),
'maxThreads': int(options["maxchildren"]),
}
else:
return fastcgi_help("ERROR: Implementation must be one of prefork or thread.")
wsgi_opts['debug'] = options['debug'] is not None
try:
from flup.server.fcgi import WSGIServer
except:
print "Can't import flup." + flup_module
return False
if options["host"] and options["port"] and not options["socket"]:
wsgi_opts['bindAddress'] = (options["host"], int(options["port"]))
elif options["socket"] and not options["host"] and not options["port"]:
wsgi_opts['bindAddress'] = options["socket"]
elif not options["socket"] and not options["host"] and not options["port"]:
wsgi_opts['bindAddress'] = None
else:
return fastcgi_help("Invalid combination of host, port, socket.")
if options["daemonize"] is None:
# Default to daemonizing if we're running on a socket/named pipe.
daemonize = (wsgi_opts['bindAddress'] is not None)
else:
if options["daemonize"].lower() in ('true', 'yes', 't'):
daemonize = True
elif options["daemonize"].lower() in ('false', 'no', 'f'):
daemonize = False
else:
return fastcgi_help("ERROR: Invalid option for daemonize parameter.")
daemon_kwargs = {}
if options['outlog']:
daemon_kwargs['out_log'] = options['outlog']
if options['errlog']:
daemon_kwargs['err_log'] = options['errlog']
if options['umask']:
daemon_kwargs['umask'] = int(options['umask'], 8)
if daemonize:
from daemonize import become_daemon
become_daemon(our_home_dir=options["workdir"], **daemon_kwargs)
if options["pidfile"]:
fp = open(options["pidfile"], "w")
fp.write("%d\n" % os.getpid())
fp.close()
WSGIServer(WSGIHandler, **wsgi_opts).run() | ImportError | dataset/ETHPy150Open mimecuvalo/helloworld/tools/fastcgi.py/runfastcgi |
717 | def get_result(self, key, date_range=None, reduce=True, verbose_results=False):
"""
If your Calculator does not have a window set, you must pass a tuple of
date or datetime objects to date_range
"""
if verbose_results:
assert not reduce, "can't have reduce set for verbose results"
if date_range is not None:
start, end = date_range
elif self.window:
now = self.fluff.get_now()
start = now - self.window
end = now
result = {}
for emitter_name in self._fluff_emitters:
shared_key = [self.fluff._doc_type] + key + [self.slug, emitter_name]
emitter = getattr(self, emitter_name)
emitter_type = emitter._fluff_emitter
q_args = {
'reduce': reduce,
}
if emitter_type == 'date':
assert isinstance(date_range, tuple) or self.window, (
"You must either set a window on your Calculator "
"or pass in a date range")
if start > end:
q_args['descending'] = True
q = self.fluff.view(
'fluff/generic',
startkey=shared_key + [json_format_date(start)],
endkey=shared_key + [json_format_date(end)],
**q_args
).all()
elif emitter_type == 'null':
q = self.fluff.view(
'fluff/generic',
key=shared_key + [None],
**q_args
).all()
else:
raise EmitterTypeError(
'emitter type %s not recognized' % emitter_type
)
if reduce:
try:
result[emitter_name] = q[0]['value'][emitter._reduce_type]
except __HOLE__:
result[emitter_name] = 0
else:
# clean ids
def strip(id_string):
prefix = '%s-' % self.fluff.__name__
assert id_string.startswith(prefix)
return id_string[len(prefix):]
for row in q:
row['id'] = strip(row['id'])
if not verbose_results:
# strip down to ids
result[emitter_name] = [row['id'] for row in q]
else:
result[emitter_name] = q
return result | IndexError | dataset/ETHPy150Open dimagi/commcare-hq/corehq/ex-submodules/fluff/calculators.py/Calculator.get_result |
718 | def validate_number(self, number):
"""
Validates the given 1-based page number.
"""
try:
number = int(number)
except (TypeError, __HOLE__):
raise PageNotAnInteger('That page number is not an integer')
if number < 1:
raise EmptyPage('That page number is less than 1')
if number > self.num_pages:
if number == 1 and self.allow_empty_first_page:
pass
else:
raise EmptyPage('That page contains no results')
return number | ValueError | dataset/ETHPy150Open django/django/django/core/paginator.py/Paginator.validate_number |
719 | @cached_property
def count(self):
"""
Returns the total number of objects, across all pages.
"""
try:
return self.object_list.count()
except (__HOLE__, TypeError):
# AttributeError if object_list has no count() method.
# TypeError if object_list.count() requires arguments
# (i.e. is of type list).
return len(self.object_list) | AttributeError | dataset/ETHPy150Open django/django/django/core/paginator.py/Paginator.count |
720 | def load_tool_info(tool_name):
"""
Load the tool-info class.
@param tool_name: The name of the tool-info module.
Either a full Python package name or a name within the benchexec.tools package.
@return: A tuple of the full name of the used tool-info module and an instance of the tool-info class.
"""
tool_module = tool_name if '.' in tool_name else ("benchexec.tools." + tool_name)
try:
tool = __import__(tool_module, fromlist=['Tool']).Tool()
except ImportError as ie:
sys.exit('Unsupported tool "{0}" specified. ImportError: {1}'.format(tool_name, ie))
except __HOLE__:
sys.exit('The module "{0}" does not define the necessary class "Tool", '
'it cannot be used as tool info for BenchExec.'.format(tool_module))
return (tool_module, tool) | AttributeError | dataset/ETHPy150Open sosy-lab/benchexec/benchexec/model.py/load_tool_info |
721 | def __init__(self, benchmark_file, config, start_time):
"""
The constructor of Benchmark reads the source files, options, columns and the tool
from the XML in the benchmark_file..
"""
logging.debug("I'm loading the benchmark %s.", benchmark_file)
self.config = config
self.benchmark_file = benchmark_file
self.base_dir = os.path.dirname(self.benchmark_file)
# get benchmark-name
self.name = os.path.basename(benchmark_file)[:-4] # remove ending ".xml"
if config.name:
self.name += "."+config.name
self.start_time = start_time
self.instance = time.strftime("%Y-%m-%d_%H%M", self.start_time)
self.output_base_name = config.output_path + self.name + "." + self.instance
self.log_folder = self.output_base_name + ".logfiles" + os.path.sep
self.log_zip = self.output_base_name + ".logfiles.zip"
# parse XML
try:
rootTag = ElementTree.ElementTree().parse(benchmark_file)
except ElementTree.ParseError as e:
sys.exit('Benchmark file {} is invalid: {}'.format(benchmark_file, e))
if 'benchmark' != rootTag.tag:
sys.exit("Benchmark file {} is invalid: "
"It's root element is not named 'benchmark'.".format(benchmark_file))
# get tool
tool_name = rootTag.get('tool')
if not tool_name:
sys.exit('A tool needs to be specified in the benchmark definition file.')
(self.tool_module, self.tool) = load_tool_info(tool_name)
self.tool_name = self.tool.name()
# will be set from the outside if necessary (may not be the case in SaaS environments)
self.tool_version = None
self.executable = None
logging.debug("The tool to be benchmarked is %s.", self.tool_name)
def parse_memory_limit(value):
try:
value = int(value)
logging.warning(
'Value "%s" for memory limit interpreted as MB for backwards compatibility, '
'specify a unit to make this unambiguous.',
value)
return value * _BYTE_FACTOR * _BYTE_FACTOR
except ValueError:
return util.parse_memory_value(value)
def handle_limit_value(name, key, cmdline_value, parse_fn):
value = rootTag.get(key, None)
# override limit from XML with values from command line
if cmdline_value is not None:
if cmdline_value.strip() == "-1": # infinity
value = None
else:
value = cmdline_value
if value is not None:
try:
self.rlimits[key] = parse_fn(value)
except __HOLE__ as e:
sys.exit('Invalid value for {} limit: {}'.format(name.lower(), e))
if self.rlimits[key] <= 0:
sys.exit('{} limit "{}" is invalid, it needs to be a positive number '
'(or -1 on the command line for disabling it).'.format(name, value))
self.rlimits = {}
keys = list(rootTag.keys())
handle_limit_value("Time", TIMELIMIT, config.timelimit, util.parse_timespan_value)
handle_limit_value("Hard time", HARDTIMELIMIT, config.timelimit, util.parse_timespan_value)
handle_limit_value("Memory", MEMLIMIT, config.memorylimit, parse_memory_limit)
handle_limit_value("Core", CORELIMIT, config.corelimit, int)
if HARDTIMELIMIT in self.rlimits:
hardtimelimit = self.rlimits.pop(HARDTIMELIMIT)
if TIMELIMIT in self.rlimits:
if hardtimelimit < self.rlimits[TIMELIMIT]:
logging.warning(
'Hard timelimit %d is smaller than timelimit %d, ignoring the former.',
hardtimelimit, self.rlimits[TIMELIMIT])
elif hardtimelimit > self.rlimits[TIMELIMIT]:
self.rlimits[SOFTTIMELIMIT] = self.rlimits[TIMELIMIT]
self.rlimits[TIMELIMIT] = hardtimelimit
else:
self.rlimits[TIMELIMIT] = hardtimelimit
# get number of threads, default value is 1
self.num_of_threads = int(rootTag.get("threads")) if ("threads" in keys) else 1
if config.num_of_threads != None:
self.num_of_threads = config.num_of_threads
if self.num_of_threads < 1:
logging.error("At least ONE thread must be given!")
sys.exit()
# get global options and property file
self.options = util.get_list_from_xml(rootTag)
self.propertyfile = util.text_or_none(util.get_single_child_from_xml(rootTag, PROPERTY_TAG))
# get columns
self.columns = Benchmark.load_columns(rootTag.find("columns"))
# get global source files, they are used in all run sets
globalSourcefilesTags = rootTag.findall("tasks") + rootTag.findall("sourcefiles")
# get required files
self._required_files = set()
for required_files_tag in rootTag.findall('requiredfiles'):
required_files = util.expand_filename_pattern(required_files_tag.text, self.base_dir)
if not required_files:
logging.warning('Pattern %s in requiredfiles tag did not match any file.',
required_files_tag.text)
self._required_files = self._required_files.union(required_files)
# get requirements
self.requirements = Requirements(rootTag.findall("require"), self.rlimits, config)
self.result_files_pattern = None
resultFilesTags = rootTag.findall("resultfiles")
if resultFilesTags:
if len(resultFilesTags) > 1:
logging.warning("Benchmark file has multiple <resultfiles> tags, "
"ignoring all but the first.")
self.result_files_pattern = resultFilesTags[0].text
# get benchmarks
self.run_sets = []
for (i, rundefinitionTag) in enumerate(rootTag.findall("rundefinition")):
self.run_sets.append(RunSet(rundefinitionTag, self, i+1, globalSourcefilesTags))
if not self.run_sets:
for (i, rundefinitionTag) in enumerate(rootTag.findall("test")):
self.run_sets.append(RunSet(rundefinitionTag, self, i+1, globalSourcefilesTags))
if self.run_sets:
logging.warning("Benchmark file %s uses deprecated <test> tags. "
"Please rename them to <rundefinition>.",
benchmark_file)
else:
logging.warning("Benchmark file %s specifies no runs to execute "
"(no <rundefinition> tags found).",
benchmark_file)
if not any(runSet.should_be_executed() for runSet in self.run_sets):
logging.warning("No <rundefinition> tag selected, nothing will be executed.")
if config.selected_run_definitions:
logging.warning("The selection %s does not match any run definitions of %s.",
config.selected_run_definitions,
[runSet.real_name for runSet in self.run_sets])
elif config.selected_run_definitions:
for selected in config.selected_run_definitions:
if not any(util.wildcard_match(run_set.real_name, selected) for run_set in self.run_sets):
logging.warning(
'The selected run definition "%s" is not present in the input file, '
'skipping it.',
selected) | ValueError | dataset/ETHPy150Open sosy-lab/benchexec/benchexec/model.py/Benchmark.__init__ |
722 | def after_execution(self, exitcode, forceTimeout=False, termination_reason=None):
"""
@deprecated: use set_result() instead
"""
# termination reason is not fully precise for timeouts, so we guess "timeouts"
# if time is too high
isTimeout = forceTimeout \
or termination_reason in ['cputime', 'cputime-soft', 'walltime'] \
or self._is_timeout()
if isinstance(exitcode, int):
exitcode = util.ProcessExitCode.from_raw(exitcode)
# read output
try:
with open(self.log_file, 'rt', errors='ignore') as outputFile:
output = outputFile.readlines()
# first 6 lines are for logging, rest is output of subprocess, see runexecutor.py for details
output = output[6:]
except __HOLE__ as e:
logging.warning("Cannot read log file: %s", e.strerror)
output = []
self.status = self._analyse_result(exitcode, output, isTimeout, termination_reason)
self.category = result.get_result_category(self.identifier, self.status, self.properties)
for column in self.columns:
substitutedColumnText = substitute_vars([column.text], self.runSet, self.sourcefiles[0])[0]
column.value = self.runSet.benchmark.tool.get_value_from_output(output, substitutedColumnText) | IOError | dataset/ETHPy150Open sosy-lab/benchexec/benchexec/model.py/Run.after_execution |
723 | def __init__(self, tags, rlimits, config):
self.cpu_model = None
self.memory = None
self.cpu_cores = None
for requireTag in tags:
cpu_model = requireTag.get('cpuModel', None)
if cpu_model:
if self.cpu_model is None:
self.cpu_model = cpu_model
else:
raise Exception('Double specification of required CPU model.')
cpu_cores = requireTag.get('cpuCores', None)
if cpu_cores:
if self.cpu_cores is None:
if cpu_cores is not None:
self.cpu_cores = int(cpu_cores)
else:
raise Exception('Double specification of required CPU cores.')
memory = requireTag.get('memory', None)
if memory:
if self.memory is None:
if memory is not None:
try:
self.memory = int(memory) * _BYTE_FACTOR * _BYTE_FACTOR
logging.warning(
'Value "%s" for memory requirement interpreted as MB for backwards compatibility, '
'specify a unit to make this unambiguous.',
memory)
except __HOLE__:
self.memory = util.parse_memory_value(memory)
else:
raise Exception('Double specification of required memory.')
# TODO check, if we have enough requirements to reach the limits
# TODO is this really enough? we need some overhead!
if self.cpu_cores is None:
self.cpu_cores = rlimits.get(CORELIMIT, None)
if self.memory is None:
self.memory = rlimits.get(MEMLIMIT, None)
if hasattr(config, 'cpu_model') and config.cpu_model is not None:
# user-given model -> override value
self.cpu_model = config.cpu_model
if self.cpu_cores is not None and self.cpu_cores <= 0:
raise Exception('Invalid value {} for required CPU cores.'.format(self.cpu_cores))
if self.memory is not None and self.memory <= 0:
raise Exception('Invalid value {} for required memory.'.format(self.memory)) | ValueError | dataset/ETHPy150Open sosy-lab/benchexec/benchexec/model.py/Requirements.__init__ |
724 | def is_num_list(item):
try:
for thing in item:
if not isinstance(thing, Num):
raise TypeError
except __HOLE__:
return False
return True | TypeError | dataset/ETHPy150Open plotly/plotly.py/plotly/tests/utils.py/is_num_list |
725 | def _add_handler(self):
try:
handler = RotatingFileHandler(
'/var/log/%s.log' % self.log_name,
maxBytes=10485760,
backupCount=3
)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
handler.setFormatter(formatter)
self.logger.addHandler(handler)
except __HOLE__:
self.logger.addHandler(StreamHandler(sys.stderr)) | IOError | dataset/ETHPy150Open bcoe/smtproutes/smtproutes/config/log.py/Log._add_handler |
726 | def get_history(self):
try:
n = readline.get_current_history_length()
except __HOLE__:
return []
return [readline.get_history_item(i) for i in range(1, n + 1)] | NameError | dataset/ETHPy150Open njr0/fish/fish/fish.py/REPL.get_history |
727 | def _supports_stddev(self):
"Confirm support for STDDEV and related stats functions"
class StdDevPop(object):
sql_function = 'STDDEV_POP'
try:
self.connection.ops.check_aggregate_support(StdDevPop())
except __HOLE__:
self.supports_stddev = False | NotImplementedError | dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/db/backends/__init__.py/BaseDatabaseFeatures._supports_stddev |
728 | def lazy_imports(*args):
query = ' '.join([x for x in args if x])
regex = re.compile("([a-zA-Z_][a-zA-Z0-9_]*)\.?")
matches = regex.findall(query)
for raw_module_name in matches:
if re.match('np(\..*)?$', raw_module_name):
module_name = re.sub('^np', 'numpy', raw_module_name)
elif re.match('pd(\..*)?$', raw_module_name):
module_name = re.sub('^pd', 'pandas', raw_module_name)
else:
module_name = raw_module_name
try:
module = __import__(module_name)
globals()[raw_module_name] = module
except __HOLE__ as e:
pass | ImportError | dataset/ETHPy150Open Russell91/pythonpy/pythonpy/pycompleter.py/lazy_imports |
729 | def get_completerlib():
"""Implementations for various useful completers.
These are all loaded by default by IPython.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011 The IPython Development Team.
#
# Distributed under the terms of the BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
#from __future__ import print_function
import inspect
import os
#import re
#import sys
try:
# Python >= 3.3
from importlib.machinery import all_suffixes
_suffixes = all_suffixes()
except __HOLE__:
from imp import get_suffixes
_suffixes = [ s[0] for s in get_suffixes() ]
# Third-party imports
from time import time
from zipimport import zipimporter
TIMEOUT_STORAGE = 2
TIMEOUT_GIVEUP = 20
# Regular expression for the python import statement
import_re = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*?)'
r'(?P<package>[/\\]__init__)?'
r'(?P<suffix>%s)$' %
r'|'.join(re.escape(s) for s in _suffixes))
# RE for the ipython %run command (python + ipython scripts)
magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$')
def module_list(path):
"""
Return the list containing the names of the modules available in the given
folder.
"""
# sys.path has the cwd as an empty string, but isdir/listdir need it as '.'
if path == '':
path = '.'
# A few local constants to be used in loops below
pjoin = os.path.join
if os.path.isdir(path):
# Build a list of all files in the directory and all files
# in its subdirectories. For performance reasons, do not
# recurse more than one level into subdirectories.
files = []
for root, dirs, nondirs in os.walk(path):
subdir = root[len(path)+1:]
if subdir:
files.extend(pjoin(subdir, f) for f in nondirs)
dirs[:] = [] # Do not recurse into additional subdirectories.
else:
files.extend(nondirs)
else:
try:
files = list(zipimporter(path)._files.keys())
except:
files = []
# Build a list of modules which match the import_re regex.
modules = []
for f in files:
m = import_re.match(f)
if m:
modules.append(m.group('name'))
return list(set(modules))
def get_root_modules():
"""
Returns a list containing the names of all the modules available in the
folders of the pythonpath.
ip.db['rootmodules_cache'] maps sys.path entries to list of modules.
"""
#ip = get_ipython()
#rootmodules_cache = ip.db.get('rootmodules_cache', {})
rootmodules_cache = {}
rootmodules = list(sys.builtin_module_names)
start_time = time()
#store = False
for path in sys.path:
try:
modules = rootmodules_cache[path]
except KeyError:
modules = module_list(path)
try:
modules.remove('__init__')
except ValueError:
pass
if path not in ('', '.'): # cwd modules should not be cached
rootmodules_cache[path] = modules
if time() - start_time > TIMEOUT_STORAGE and not store:
#store = True
#print("\nCaching the list of root modules, please wait!")
#print("(This will only be done once - type '%rehashx' to "
#"reset cache!)\n")
sys.stdout.flush()
if time() - start_time > TIMEOUT_GIVEUP:
print("This is taking too long, we give up.\n")
return []
rootmodules.extend(modules)
#if store:
#ip.db['rootmodules_cache'] = rootmodules_cache
rootmodules = list(set(rootmodules))
return rootmodules
def is_importable(module, attr, only_modules):
if only_modules:
return inspect.ismodule(getattr(module, attr))
else:
return not(attr[:2] == '__' and attr[-2:] == '__')
def try_import(mod, only_modules=False):
try:
m = __import__(mod)
except:
return []
mods = mod.split('.')
for module in mods[1:]:
m = getattr(m, module)
m_is_init = hasattr(m, '__file__') and '__init__' in m.__file__
completions = []
if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init:
completions.extend( [attr for attr in dir(m) if
is_importable(m, attr, only_modules)])
completions.extend(getattr(m, '__all__', []))
if m_is_init:
completions.extend(module_list(os.path.dirname(m.__file__)))
completions = set(completions)
if '__init__' in completions:
completions.remove('__init__')
return list(completions)
def module_completion(line):
"""
Returns a list containing the completion possibilities for an import line.
The line looks like this :
'import xml.d'
'from xml.dom import'
"""
words = line.split(' ')
nwords = len(words)
# from whatever <tab> -> 'import '
if nwords == 3 and words[0] == 'from':
return ['import ']
# 'from xy<tab>' or 'import xy<tab>'
if nwords < 3 and (words[0] in ['import','from']) :
if nwords == 1:
return get_root_modules()
mod = words[1].split('.')
if len(mod) < 2:
return get_root_modules()
completion_list = try_import('.'.join(mod[:-1]), True)
return ['.'.join(mod[:-1] + [el]) for el in completion_list]
# 'from xyz import abc<tab>'
if nwords >= 3 and words[0] == 'from':
mod = words[1]
return try_import(mod)
return module_completion, module_list | ImportError | dataset/ETHPy150Open Russell91/pythonpy/pythonpy/pycompleter.py/get_completerlib |
730 | def render_option(self, name, selected_choices,
option_value, option_label):
option_value = force_text(option_value)
if option_label == BLANK_CHOICE_DASH[0][1]:
option_label = _("All")
data = self.data.copy()
data[name] = option_value
selected = data == self.data or option_value in selected_choices
try:
url = data.urlencode()
except __HOLE__:
url = urlencode(data)
return self.option_string() % {
'attrs': selected and ' class="selected"' or '',
'query_string': url,
'label': force_text(option_label)
} | AttributeError | dataset/ETHPy150Open carltongibson/django-filter/django_filters/widgets.py/LinkWidget.render_option |
731 | def render(self, name, value, attrs=None):
try:
value = {
True: 'true',
False: 'false',
'1': 'true',
'0': 'false'
}[value]
except __HOLE__:
value = ''
return super(BooleanWidget, self).render(name, value, attrs) | KeyError | dataset/ETHPy150Open carltongibson/django-filter/django_filters/widgets.py/BooleanWidget.render |
732 | def json_decode_hook(data):
for key, value in list(data.items()):
if not isinstance(value, six.string_types):
continue
for regex, fns in _PATTERNS:
if regex.match(value):
for fn in fns:
try:
data[key] = fn(value)
break
except __HOLE__:
pass
break
return data | ValueError | dataset/ETHPy150Open matthiask/plata/plata/fields.py/json_decode_hook |
733 | def clean(self, value, *args, **kwargs):
if value:
try:
# Run the value through JSON so we can normalize formatting
# and at least learn about malformed data:
value = json.dumps(
json.loads(
value,
use_decimal=True,
object_hook=json_decode_hook,
),
use_decimal=True,
default=json_encode_default,
)
except __HOLE__:
raise forms.ValidationError("Invalid JSON data!")
return super(JSONFormField, self).clean(value, *args, **kwargs) | ValueError | dataset/ETHPy150Open matthiask/plata/plata/fields.py/JSONFormField.clean |
734 | def to_python(self, value):
"""Convert our string value to JSON after we load it from the DB"""
if isinstance(value, dict):
return value
elif isinstance(value, six.string_types):
# Avoid asking the JSON decoder to handle empty values:
if not value:
return {}
try:
return json.loads(
value, use_decimal=True,
object_hook=json_decode_hook)
except __HOLE__:
logging.getLogger("plata.fields").exception(
"Unable to deserialize stored JSONField data: %s", value)
return {}
else:
assert value is None
return {} | ValueError | dataset/ETHPy150Open matthiask/plata/plata/fields.py/JSONField.to_python |
735 | def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
n = self.len - 4
if n > len(self.data):
raise dpkt.NeedData('not enough data')
self.msg, self.data = self.data[:n], self.data[n:]
try:
p = self._msgsw[self.msgid](self.msg)
setattr(self, p.__class__.__name__.lower(), p)
except (__HOLE__, dpkt.UnpackError):
pass | KeyError | dataset/ETHPy150Open dragondjf/QMarkdowner/dpkt/sccp.py/SCCP.unpack |
736 | def parse(self, *args, **kwargs):
text = get_text(self.view)
try:
text = strip_js_comments(text)
data = json.loads(text)
except __HOLE__ as e:
self.output.write_line(self.debug_base % (self.file_path, str(e)))
else:
return data | ValueError | dataset/ETHPy150Open SublimeText/PackageDev/fileconv/loaders.py/JSONLoader.parse |
737 | def parse(self, *args, **kwargs):
text = get_text(self.view)
try:
data = yaml.safe_load(text)
except yaml.YAMLError as e:
out = self.debug_base % str(e).replace("<unicode string>", self.file_path)
self.output.write_line(out)
except __HOLE__ as e:
self.output.write_line('Error opening "%s": %s' % (self.file_path, str(e)))
else:
return data
###############################################################################
# Collect all the loaders and assign them to `get` | IOError | dataset/ETHPy150Open SublimeText/PackageDev/fileconv/loaders.py/YAMLLoader.parse |
738 | def _setup_dir(self, base_dir):
""" Creates stats directory for storing stat files.
`base_dir`
Base directory.
"""
stats_dir = self._sdir(base_dir)
if not os.path.isdir(stats_dir):
try:
os.mkdir(stats_dir)
except __HOLE__:
raise errors.DirectorySetupFail() | OSError | dataset/ETHPy150Open xtrementl/focus/focus/plugin/modules/stats.py/Stats._setup_dir |
739 | def _log_task(self, task):
""" Logs task record to file.
`task`
``Task`` instance.
"""
if not task.duration:
return
self._setup_dir(task.base_dir)
stats_dir = self._sdir(task.base_dir)
duration = task.duration
while duration > 0:
# build filename
date = (datetime.datetime.now() -
datetime.timedelta(minutes=duration))
date_str = date.strftime('%Y%m%d')
filename = os.path.join(stats_dir, '{0}.json'.format(date_str))
with open(filename, 'a+') as file_:
# fetch any existing data
try:
file_.seek(0)
data = json.loads(file_.read())
except (ValueError, OSError):
data = {}
if not task.name in data:
data[task.name] = 0
# how much total time for day
try:
total_time = sum(int(x) for x in data.values())
if total_time > MINS_IN_DAY:
total_time = MINS_IN_DAY
except __HOLE__:
total_time = 0
# constrain to single day
amount = duration
if amount + total_time > MINS_IN_DAY:
amount = MINS_IN_DAY - total_time
# invalid or broken state, bail
if amount <= 0:
break
data[task.name] += amount
duration -= amount
# write file
try:
file_.seek(0)
file_.truncate(0)
file_.write(json.dumps(data))
except (ValueError, OSError):
pass | ValueError | dataset/ETHPy150Open xtrementl/focus/focus/plugin/modules/stats.py/Stats._log_task |
740 | def _get_stats(self, task, start_date):
""" Fetches statistic information for given task and start range.
"""
stats = []
stats_dir = self._sdir(task.base_dir)
date = start_date
end_date = datetime.date.today()
delta = datetime.timedelta(days=1)
while date <= end_date:
date_str = date.strftime('%Y%m%d')
filename = os.path.join(stats_dir, '{0}.json'.format(date_str))
if os.path.exists(filename):
try:
# fetch stats content
with open(filename, 'r') as file_:
data = json.loads(file_.read())
# sort descending by time
stats.append((date, sorted(data.iteritems(),
key=lambda x: x[1],
reverse=True)))
except (json.JSONDecodeError, __HOLE__):
pass
date += delta # next day
return stats | OSError | dataset/ETHPy150Open xtrementl/focus/focus/plugin/modules/stats.py/Stats._get_stats |
741 | def parse_script_name(self, name):
if name is None:
return (None, None)
try:
lang, script_id = name.split("/")
except __HOLE__:
return (None, None)
return (lang, script_id) | ValueError | dataset/ETHPy150Open KunihikoKido/sublime-elasticsearch-client/commands/put_script.py/PutScriptCommand.parse_script_name |
742 | def update(self, row, keys, ensure=None, types={}):
"""
Update a row in the table.
The update is managed via the set of column names stated in ``keys``:
they will be used as filters for the data to be updated, using the values
in ``row``.
::
# update all entries with id matching 10, setting their title columns
data = dict(id=10, title='I am a banana!')
table.update(data, ['id'])
If keys in ``row`` update columns not present in the table,
they will be created based on the settings of ``ensure`` and
``types``, matching the behavior of :py:meth:`insert() <dataset.Table.insert>`.
"""
# check whether keys arg is a string and format as a list
if not isinstance(keys, (list, tuple)):
keys = [keys]
self._check_dropped()
if not keys or len(keys) == len(row):
return False
clause = [(u, row.get(u)) for u in keys]
ensure = self.database.ensure_schema if ensure is None else ensure
if ensure:
self._ensure_columns(row, types=types)
# Don't update the key itself, so remove any keys from the row dict
clean_row = row.copy()
for key in keys:
if key in clean_row.keys():
del clean_row[key]
try:
filters = self._args_to_clause(dict(clause))
stmt = self.table.update(filters, clean_row)
rp = self.database.executable.execute(stmt)
return rp.rowcount
except __HOLE__:
return 0 | KeyError | dataset/ETHPy150Open pudo/dataset/dataset/persistence/table.py/Table.update |
743 | def upsert(self, row, keys, ensure=None, types={}):
"""
An UPSERT is a smart combination of insert and update.
If rows with matching ``keys`` exist they will be updated, otherwise a
new row is inserted in the table.
::
data = dict(id=10, title='I am a banana!')
table.upsert(data, ['id'])
"""
# check whether keys arg is a string and format as a list
if not isinstance(keys, (list, tuple)):
keys = [keys]
self._check_dropped()
ensure = self.database.ensure_schema if ensure is None else ensure
if ensure:
self.create_index(keys)
filters = {}
for key in keys:
filters[key] = row.get(key)
res = self.find_one(**filters)
if res is not None:
row_count = self.update(row, keys, ensure=ensure, types=types)
if row_count == 0:
return False
elif row_count == 1:
try:
return res['id']
except __HOLE__:
return True
else:
return True
else:
return self.insert(row, ensure=ensure, types=types) | KeyError | dataset/ETHPy150Open pudo/dataset/dataset/persistence/table.py/Table.upsert |
744 | def find_one(self, *args, **kwargs):
"""
Get a single result from the table.
Works just like :py:meth:`find() <dataset.Table.find>` but returns one result, or None.
::
row = table.find_one(country='United States')
"""
kwargs['_limit'] = 1
iterator = self.find(*args, **kwargs)
try:
return next(iterator)
except __HOLE__:
return None | StopIteration | dataset/ETHPy150Open pudo/dataset/dataset/persistence/table.py/Table.find_one |
745 | def distinct(self, *args, **_filter):
"""
Return all rows of a table, but remove rows in with duplicate values in ``columns``.
Interally this creates a `DISTINCT statement <http://www.w3schools.com/sql/sql_distinct.asp>`_.
::
# returns only one row per year, ignoring the rest
table.distinct('year')
# works with multiple columns, too
table.distinct('year', 'country')
# you can also combine this with a filter
table.distinct('year', country='China')
"""
self._check_dropped()
qargs = []
columns = []
try:
for c in args:
if isinstance(c, ClauseElement):
qargs.append(c)
else:
columns.append(self.table.c[c])
for col, val in _filter.items():
qargs.append(self.table.c[col] == val)
except __HOLE__:
return []
q = expression.select(columns, distinct=True,
whereclause=and_(*qargs),
order_by=[c.asc() for c in columns])
return self.database.query(q) | KeyError | dataset/ETHPy150Open pudo/dataset/dataset/persistence/table.py/Table.distinct |
746 | def ensure_tree(path):
"""Create a directory (and any ancestor directories required)
:param path: Directory to create
"""
try:
os.makedirs(path)
except __HOLE__ as exc:
if exc.errno == errno.EEXIST:
if not os.path.isdir(path):
raise
else:
raise | OSError | dataset/ETHPy150Open openstack/solum/solum/openstack/common/fileutils.py/ensure_tree |
747 | def delete_if_exists(path, remove=os.unlink):
"""Delete a file, but ignore file not found error.
:param path: File to delete
:param remove: Optional function to remove passed path
"""
try:
remove(path)
except __HOLE__ as e:
if e.errno != errno.ENOENT:
raise | OSError | dataset/ETHPy150Open openstack/solum/solum/openstack/common/fileutils.py/delete_if_exists |
748 | def _scoreLoadRecipeChoice(labelPath, version):
# FIXME I'm quite sure this heuristic will get replaced with
# something smarter/more sane as time progresses
if not labelPath:
return 0
score = 0
labelPath = [ x for x in reversed(labelPath)]
branch = version.branch()
while True:
label = branch.label()
try:
index = labelPath.index(label)
except __HOLE__:
index = -1
score += index
if not branch.hasParentBranch():
break
branch = branch.parentBranch()
return score | ValueError | dataset/ETHPy150Open sassoftware/conary/conary/build/loadrecipe.py/_scoreLoadRecipeChoice |
749 | @classmethod
def get_identifier(cls, obj):
identifier = super(BootstrapPicturePlugin, cls).get_identifier(obj)
try:
content = force_text(obj.image)
except __HOLE__:
content = _("No Picture")
return format_html('{0}{1}', identifier, content) | AttributeError | dataset/ETHPy150Open jrief/djangocms-cascade/cmsplugin_cascade/bootstrap3/picture.py/BootstrapPicturePlugin.get_identifier |
750 | def unregister_image_format(format_name):
global FORMATS
# handle being passed a format object rather than a format name string
try:
format_name = format_name.name
except __HOLE__:
pass
try:
del FORMATS_BY_NAME[format_name]
FORMATS = [fmt for fmt in FORMATS if fmt.name != format_name]
except KeyError:
raise KeyError("Image format '%s' is not registered" % format_name) | AttributeError | dataset/ETHPy150Open torchbox/wagtail/wagtail/wagtailimages/formats.py/unregister_image_format |
751 | def capture_payment(self, testing=False, order=None, amount=None):
"""
Creates and sends XML representation of transaction to Cybersource
"""
if not order:
order = self.order
if order.paid_in_full:
self.log_extra('%s is paid in full, no capture attempted.', order)
self.record_payment()
return ProcessorResult(self.key, True, _("No charge needed, paid in full."))
self.log_extra('Capturing payment for %s', order)
if amount is None:
amount = order.balance
self.prepare_content(order, amount)
invoice = "%s" % order.id
failct = order.paymentfailures.count()
if failct > 0:
invoice = "%s_%i" % (invoice, failct)
# XML format is very simple, using ElementTree for generation would be overkill
t = loader.get_template('shop/checkout/cybersource/request.xml')
c = Context({
'config' : self.configuration,
'merchantReferenceCode' : invoice,
'billTo' : self.bill_to,
'purchaseTotals' : self.purchase_totals,
'card' : self.card,
})
request = t.render(c)
conn = urllib2.Request(url=self.connection, data=request)
try:
f = urllib2.urlopen(conn)
except urllib2.HTTPError, e:
# we probably didn't authenticate properly
# make sure the 'v' in your account number is lowercase
return ProcessorResult(self.key, False, 'Problem parsing results')
f = urllib2.urlopen(conn)
all_results = f.read()
tree = fromstring(all_results)
parsed_results = tree.getiterator('{urn:schemas-cybersource-com:transaction-data-1.26}reasonCode')
try:
reason_code = parsed_results[0].text
except __HOLE__:
return ProcessorResult(self.key, False, 'Problem parsing results')
response_text = CYBERSOURCE_RESPONSES.get(reason_code, 'Unknown Failure')
if reason_code == '100':
payment = self.record_payment(order=order, amount=amount,
transaction_id="", reason_code=reason_code)
return ProcessorResult(self.key, True, response_text, payment=payment)
else:
payment = self.record_failure(order=order, amount=amount,
transaction_id="", reason_code=reason_code,
details=response_text)
return ProcessorResult(self.key, False, response_text) | KeyError | dataset/ETHPy150Open dokterbob/satchmo/satchmo/apps/payment/modules/cybersource/processor.py/PaymentProcessor.capture_payment |
752 | def is_fits(filename):
from astropy.io import fits
try:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
with fits.open(filename, ignore_missing_end=True):
return True
except __HOLE__:
return False | IOError | dataset/ETHPy150Open glue-viz/glue/glue/core/data_factories/fits.py/is_fits |
753 | @data_factory(
label='FITS file',
identifier=is_fits,
priority=100,
)
def fits_reader(source, auto_merge=False, exclude_exts=None, label=None):
"""
Read in all extensions from a FITS file.
Parameters
----------
source: str or HDUList
The pathname to the FITS file.
If an HDUList is passed in, simply use that.
auto_merge: bool
Merge extensions that have the same shape
and only one has a defined WCS.
exclude_exts: [hdu, ] or [index, ]
List of HDU's to exclude from reading.
This can be a list of HDU's or a list
of HDU indexes.
"""
from astropy.io import fits
from astropy.table import Table
exclude_exts = exclude_exts or []
if not isinstance(source, fits.hdu.hdulist.HDUList):
hdulist = fits.open(source, ignore_missing_end=True)
hdulist.verify('fix')
else:
hdulist = source
groups = OrderedDict()
extension_by_shape = OrderedDict()
if label is not None:
label_base = label
else:
hdulist_name = hdulist.filename()
if hdulist_name is None:
hdulist_name = "HDUList"
label_base = basename(hdulist_name).rpartition('.')[0]
if not label_base:
label_base = basename(hdulist_name)
# Create a new image Data.
def new_data():
label = '{0}[{1}]'.format(
label_base,
hdu_name
)
data = Data(label=label)
data.coords = coords
groups[hdu_name] = data
extension_by_shape[shape] = hdu_name
return data
for extnum, hdu in enumerate(hdulist):
hdu_name = hdu.name if hdu.name else "HDU{0}".format(extnum)
if (hdu.data is not None and
hdu.data.size > 0 and
hdu_name not in exclude_exts and
extnum not in exclude_exts):
if is_image_hdu(hdu):
shape = hdu.data.shape
coords = coordinates_from_header(hdu.header)
if not auto_merge or has_wcs(coords):
data = new_data()
else:
try:
data = groups[extension_by_shape[shape]]
except __HOLE__:
data = new_data()
data.add_component(component=hdu.data,
label=hdu_name)
elif is_table_hdu(hdu):
# Loop through columns and make component list
table = Table(hdu.data)
label = '{0}[{1}]'.format(
label_base,
hdu_name
)
data = Data(label=label)
groups[hdu_name] = data
for column_name in table.columns:
column = table[column_name]
if column.ndim != 1:
warnings.warn("Dropping column '{0}' since it is not 1-dimensional".format(column_name))
continue
component = Component.autotyped(column, units=column.unit)
data.add_component(component=component,
label=column_name)
return [groups[idx] for idx in groups]
# Utilities | KeyError | dataset/ETHPy150Open glue-viz/glue/glue/core/data_factories/fits.py/fits_reader |
754 | def prep_jid(nocache=False, passed_jid=None, recurse_count=0):
'''
Return a job id and prepare the job id directory.
This is the function responsible for making sure jids don't collide (unless
it is passed a jid).
So do what you have to do to make sure that stays the case
'''
if recurse_count >= 5:
err = 'prep_jid could not store a jid after {0} tries.'.format(recurse_count)
log.error(err)
raise salt.exceptions.SaltCacheError(err)
if passed_jid is None: # this can be a None or an empty string.
jid = salt.utils.jid.gen_jid()
else:
jid = passed_jid
jid_dir_ = _jid_dir(jid)
# Make sure we create the jid dir, otherwise someone else is using it,
# meaning we need a new jid.
try:
os.makedirs(jid_dir_)
except __HOLE__:
time.sleep(0.1)
if passed_jid is None:
return prep_jid(nocache=nocache, recurse_count=recurse_count+1)
try:
with salt.utils.fopen(os.path.join(jid_dir_, 'jid'), 'wb+') as fn_:
if six.PY2:
fn_.write(jid)
else:
fn_.write(bytes(jid, 'utf-8'))
if nocache:
with salt.utils.fopen(os.path.join(jid_dir_, 'nocache'), 'wb+') as fn_:
fn_.write(b'')
except IOError:
log.warning('Could not write out jid file for job {0}. Retrying.'.format(jid))
time.sleep(0.1)
return prep_jid(passed_jid=jid, nocache=nocache,
recurse_count=recurse_count+1)
return jid | OSError | dataset/ETHPy150Open saltstack/salt/salt/returners/local_cache.py/prep_jid |
755 | def returner(load):
'''
Return data to the local job cache
'''
serial = salt.payload.Serial(__opts__)
# if a minion is returning a standalone job, get a jobid
if load['jid'] == 'req':
load['jid'] = prep_jid(nocache=load.get('nocache', False))
jid_dir = _jid_dir(load['jid'])
if os.path.exists(os.path.join(jid_dir, 'nocache')):
return
hn_dir = os.path.join(jid_dir, load['id'])
try:
os.makedirs(hn_dir)
except __HOLE__ as err:
if err.errno == errno.EEXIST:
# Minion has already returned this jid and it should be dropped
log.error(
'An extra return was detected from minion {0}, please verify '
'the minion, this could be a replay attack'.format(
load['id']
)
)
return False
elif err.errno == errno.ENOENT:
log.error(
'An inconsistency occurred, a job was received with a job id '
'that is not present in the local cache: {jid}'.format(**load)
)
return False
raise
serial.dump(
load['return'],
# Use atomic open here to avoid the file being read before it's
# completely written to. Refs #1935
salt.utils.atomicfile.atomic_open(
os.path.join(hn_dir, RETURN_P), 'w+b'
)
)
if 'out' in load:
serial.dump(
load['out'],
# Use atomic open here to avoid the file being read before
# it's completely written to. Refs #1935
salt.utils.atomicfile.atomic_open(
os.path.join(hn_dir, OUT_P), 'w+b'
)
) | OSError | dataset/ETHPy150Open saltstack/salt/salt/returners/local_cache.py/returner |
756 | def save_load(jid, clear_load, minions=None, recurse_count=0):
'''
Save the load to the specified jid
minions argument is to provide a pre-computed list of matched minions for
the job, for cases when this function can't compute that list itself (such
as for salt-ssh)
'''
if recurse_count >= 5:
err = ('save_load could not write job cache file after {0} retries.'
.format(recurse_count))
log.error(err)
raise salt.exceptions.SaltCacheError(err)
jid_dir = _jid_dir(jid)
serial = salt.payload.Serial(__opts__)
# Save the invocation information
try:
if not os.path.exists(jid_dir):
os.makedirs(jid_dir)
except __HOLE__ as exc:
if exc.errno == errno.EEXIST:
# rarely, the directory can be already concurrently created between
# the os.path.exists and the os.makedirs lines above
pass
else:
raise
try:
serial.dump(
clear_load,
salt.utils.fopen(os.path.join(jid_dir, LOAD_P), 'w+b')
)
except IOError as exc:
log.warning(
'Could not write job invocation cache file: %s', exc
)
time.sleep(0.1)
return save_load(jid=jid, clear_load=clear_load,
recurse_count=recurse_count+1)
# if you have a tgt, save that for the UI etc
if 'tgt' in clear_load:
if minions is None:
ckminions = salt.utils.minions.CkMinions(__opts__)
# Retrieve the minions list
minions = ckminions.check_minions(
clear_load['tgt'],
clear_load.get('tgt_type', 'glob')
)
# save the minions to a cache so we can see in the UI
save_minions(jid, minions) | OSError | dataset/ETHPy150Open saltstack/salt/salt/returners/local_cache.py/save_load |
757 | def save_minions(jid, minions, syndic_id=None):
'''
Save/update the serialized list of minions for a given job
'''
log.debug(
'Adding minions for job %s%s: %s',
jid,
' from syndic master \'{0}\''.format(syndic_id) if syndic_id else '',
minions
)
serial = salt.payload.Serial(__opts__)
jid_dir = _jid_dir(jid)
if syndic_id is not None:
minions_path = os.path.join(
jid_dir,
SYNDIC_MINIONS_P.format(syndic_id)
)
else:
minions_path = os.path.join(jid_dir, MINIONS_P)
try:
serial.dump(minions, salt.utils.fopen(minions_path, 'w+b'))
except __HOLE__ as exc:
log.error(
'Failed to write minion list {0} to job cache file {1}: {2}'
.format(minions, minions_path, exc)
) | IOError | dataset/ETHPy150Open saltstack/salt/salt/returners/local_cache.py/save_minions |
758 | def get_load(jid):
'''
Return the load data that marks a specified jid
'''
jid_dir = _jid_dir(jid)
load_fn = os.path.join(jid_dir, LOAD_P)
if not os.path.exists(jid_dir) or not os.path.exists(load_fn):
return {}
serial = salt.payload.Serial(__opts__)
ret = serial.load(salt.utils.fopen(os.path.join(jid_dir, LOAD_P), 'rb'))
minions_cache = [os.path.join(jid_dir, MINIONS_P)]
minions_cache.extend(
glob.glob(os.path.join(jid_dir, SYNDIC_MINIONS_P.format('*')))
)
all_minions = set()
for minions_path in minions_cache:
log.debug('Reading minion list from %s', minions_path)
try:
all_minions.update(
serial.load(salt.utils.fopen(minions_path, 'rb'))
)
except __HOLE__ as exc:
salt.utils.files.process_read_exception(exc, minions_path)
if all_minions:
ret['Minions'] = sorted(all_minions)
return ret | IOError | dataset/ETHPy150Open saltstack/salt/salt/returners/local_cache.py/get_load |
759 | def update_endtime(jid, time):
'''
Update (or store) the end time for a given job
Endtime is stored as a plain text string
'''
jid_dir = _jid_dir(jid)
try:
if not os.path.exists(jid_dir):
os.makedirs(jid_dir)
with salt.utils.fopen(os.path.join(jid_dir, ENDTIME), 'w') as etfile:
etfile.write(time)
except __HOLE__ as exc:
log.warning('Could not write job invocation cache file: {0}'.format(exc)) | IOError | dataset/ETHPy150Open saltstack/salt/salt/returners/local_cache.py/update_endtime |
760 | def split_fd(self, fd):
"""Returns an (fd, obj) pair from an ``fd`` parameter.
We accept both raw file descriptors and file-like objects as
input to `add_handler` and related methods. When a file-like
object is passed, we must retain the object itself so we can
close it correctly when the `IOLoop` shuts down, but the
poller interfaces favor file descriptors (they will accept
file-like objects and call ``fileno()`` for you, but they
always return the descriptor itself).
This method is provided for use by `IOLoop` subclasses and should
not generally be used by application code.
.. versionadded:: 4.0
"""
try:
return fd.fileno(), fd
except __HOLE__:
return fd, fd | AttributeError | dataset/ETHPy150Open RobotWebTools/rosbridge_suite/rosbridge_server/src/tornado/ioloop.py/IOLoop.split_fd |
761 | def close_fd(self, fd):
"""Utility method to close an ``fd``.
If ``fd`` is a file-like object, we close it directly; otherwise
we use `os.close`.
This method is provided for use by `IOLoop` subclasses (in
implementations of ``IOLoop.close(all_fds=True)`` and should
not generally be used by application code.
.. versionadded:: 4.0
"""
try:
try:
fd.close()
except __HOLE__:
os.close(fd)
except OSError:
pass | AttributeError | dataset/ETHPy150Open RobotWebTools/rosbridge_suite/rosbridge_server/src/tornado/ioloop.py/IOLoop.close_fd |
762 | def start(self):
if self._running:
raise RuntimeError("IOLoop is already running")
self._setup_logging()
if self._stopped:
self._stopped = False
return
old_current = getattr(IOLoop._current, "instance", None)
IOLoop._current.instance = self
self._thread_ident = thread.get_ident()
self._running = True
# signal.set_wakeup_fd closes a race condition in event loops:
# a signal may arrive at the beginning of select/poll/etc
# before it goes into its interruptible sleep, so the signal
# will be consumed without waking the select. The solution is
# for the (C, synchronous) signal handler to write to a pipe,
# which will then be seen by select.
#
# In python's signal handling semantics, this only matters on the
# main thread (fortunately, set_wakeup_fd only works on the main
# thread and will raise a ValueError otherwise).
#
# If someone has already set a wakeup fd, we don't want to
# disturb it. This is an issue for twisted, which does its
# SIGCHILD processing in response to its own wakeup fd being
# written to. As long as the wakeup fd is registered on the IOLoop,
# the loop will still wake up and everything should work.
old_wakeup_fd = None
if hasattr(signal, 'set_wakeup_fd') and os.name == 'posix':
# requires python 2.6+, unix. set_wakeup_fd exists but crashes
# the python process on windows.
try:
old_wakeup_fd = signal.set_wakeup_fd(self._waker.write_fileno())
if old_wakeup_fd != -1:
# Already set, restore previous value. This is a little racy,
# but there's no clean get_wakeup_fd and in real use the
# IOLoop is just started once at the beginning.
signal.set_wakeup_fd(old_wakeup_fd)
old_wakeup_fd = None
except ValueError: # non-main thread
pass
try:
while True:
# Prevent IO event starvation by delaying new callbacks
# to the next iteration of the event loop.
with self._callback_lock:
callbacks = self._callbacks
self._callbacks = []
# Add any timeouts that have come due to the callback list.
# Do not run anything until we have determined which ones
# are ready, so timeouts that call add_timeout cannot
# schedule anything in this iteration.
due_timeouts = []
if self._timeouts:
now = self.time()
while self._timeouts:
if self._timeouts[0].callback is None:
# The timeout was cancelled. Note that the
# cancellation check is repeated below for timeouts
# that are cancelled by another timeout or callback.
heapq.heappop(self._timeouts)
self._cancellations -= 1
elif self._timeouts[0].deadline <= now:
due_timeouts.append(heapq.heappop(self._timeouts))
else:
break
if (self._cancellations > 512
and self._cancellations > (len(self._timeouts) >> 1)):
# Clean up the timeout queue when it gets large and it's
# more than half cancellations.
self._cancellations = 0
self._timeouts = [x for x in self._timeouts
if x.callback is not None]
heapq.heapify(self._timeouts)
for callback in callbacks:
self._run_callback(callback)
for timeout in due_timeouts:
if timeout.callback is not None:
self._run_callback(timeout.callback)
# Closures may be holding on to a lot of memory, so allow
# them to be freed before we go into our poll wait.
callbacks = callback = due_timeouts = timeout = None
if self._callbacks:
# If any callbacks or timeouts called add_callback,
# we don't want to wait in poll() before we run them.
poll_timeout = 0.0
elif self._timeouts:
# If there are any timeouts, schedule the first one.
# Use self.time() instead of 'now' to account for time
# spent running callbacks.
poll_timeout = self._timeouts[0].deadline - self.time()
poll_timeout = max(0, min(poll_timeout, _POLL_TIMEOUT))
else:
# No timeouts and no callbacks, so use the default.
poll_timeout = _POLL_TIMEOUT
if not self._running:
break
if self._blocking_signal_threshold is not None:
# clear alarm so it doesn't fire while poll is waiting for
# events.
signal.setitimer(signal.ITIMER_REAL, 0, 0)
try:
event_pairs = self._impl.poll(poll_timeout)
except Exception as e:
# Depending on python version and IOLoop implementation,
# different exception types may be thrown and there are
# two ways EINTR might be signaled:
# * e.errno == errno.EINTR
# * e.args is like (errno.EINTR, 'Interrupted system call')
if errno_from_exception(e) == errno.EINTR:
continue
else:
raise
if self._blocking_signal_threshold is not None:
signal.setitimer(signal.ITIMER_REAL,
self._blocking_signal_threshold, 0)
# Pop one fd at a time from the set of pending fds and run
# its handler. Since that handler may perform actions on
# other file descriptors, there may be reentrant calls to
# this IOLoop that update self._events
self._events.update(event_pairs)
while self._events:
fd, events = self._events.popitem()
try:
fd_obj, handler_func = self._handlers[fd]
handler_func(fd_obj, events)
except (OSError, __HOLE__) as e:
if errno_from_exception(e) == errno.EPIPE:
# Happens when the client closes the connection
pass
else:
self.handle_callback_exception(self._handlers.get(fd))
except Exception:
self.handle_callback_exception(self._handlers.get(fd))
fd_obj = handler_func = None
finally:
# reset the stopped flag so another start/stop pair can be issued
self._stopped = False
if self._blocking_signal_threshold is not None:
signal.setitimer(signal.ITIMER_REAL, 0, 0)
IOLoop._current.instance = old_current
if old_wakeup_fd is not None:
signal.set_wakeup_fd(old_wakeup_fd) | IOError | dataset/ETHPy150Open RobotWebTools/rosbridge_suite/rosbridge_server/src/tornado/ioloop.py/PollIOLoop.start |
763 | def show_state(*arguments):
"""Shows the contents of the state loaded by the configuration or from
the file specified as an argument.
"""
if len(arguments) == 0:
state_file = conf['pyexperiment.state_filename']
else:
state_file = arguments[0]
print_bold("Load state from file '%s'",
state_file)
try:
state.load(state_file, lazy=False, raise_error=True)
except __HOLE__ as err:
print(err)
else:
if len(state) > 0:
state.show()
else:
print("State empty") | IOError | dataset/ETHPy150Open duerrp/pyexperiment/pyexperiment/experiment.py/show_state |
764 | @classmethod
def RemoveInstance(cls, name):
"""Removes load information entry of the instance.
Args:
name: Name of the instance to remove from load information list.
"""
# Use cas operation to remove from instance name list.
memcache_client = memcache.Client()
while True:
instances = memcache_client.gets(cls.ALL_INSTANCES)
if not instances:
break
try:
instances.remove(name)
except __HOLE__:
# The instance name was not in the list.
break
if memcache_client.cas(cls.ALL_INSTANCES, instances):
break
# Delete the entry for the instance in Memcache and Datastore.
memcache.delete(name)
datastore_single_instance = SingleInstance.GetByName(name)
if datastore_single_instance:
datastore_single_instance.delete() | ValueError | dataset/ETHPy150Open GoogleCloudPlatform/solutions-load-balanced-gaming-server-on-google-compute-engine/load_info.py/LoadInfo.RemoveInstance |
765 | def previous_current_next(items):
"""
From http://www.wordaligned.org/articles/zippy-triples-served-with-python
Creates an iterator which returns (previous, current, next) triples,
with ``None`` filling in when there is no previous or next
available.
"""
extend = itertools.chain([None], items, [None])
previous, current, next = itertools.tee(extend, 3)
try:
current.next()
next.next()
next.next()
except __HOLE__:
pass
return itertools.izip(previous, current, next) | StopIteration | dataset/ETHPy150Open fabiocorneti/django-easytree/easytree/templatetags/easytree_tags.py/previous_current_next |
766 | def matches_requirement(req, wheels):
"""List of wheels matching a requirement.
:param req: The requirement to satisfy
:param wheels: List of wheels to search.
"""
try:
from pkg_resources import Distribution, Requirement
except __HOLE__:
raise RuntimeError("Cannot use requirements without pkg_resources")
req = Requirement.parse(req)
selected = []
for wf in wheels:
f = wf.parsed_filename
dist = Distribution(project_name=f.group("name"), version=f.group("ver"))
if dist in req:
selected.append(wf)
return selected | ImportError | dataset/ETHPy150Open chalasr/Flask-P2P/venv/lib/python2.7/site-packages/wheel/util.py/matches_requirement |
767 | def optimize(self, optimizer=None, start=None, messages=False, max_iters=1000, ipython_notebook=True, clear_after_finish=False, **kwargs):
"""
Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors.
kwargs are passed to the optimizer. They can be:
:param max_iters: maximum number of function evaluations
:type max_iters: int
:messages: whether to display during optimisation
:type messages: bool
:param optimizer: which optimizer to use (defaults to self.preferred optimizer), a range of optimisers can be found in :module:`~GPy.inference.optimization`, they include 'scg', 'lbfgs', 'tnc'.
:type optimizer: string
:param bool ipython_notebook: whether to use ipython notebook widgets or not.
:param bool clear_after_finish: if in ipython notebook, we can clear the widgets after optimization.
"""
self.inference_method.on_optimization_start()
try:
super(GP, self).optimize(optimizer, start, messages, max_iters, ipython_notebook, clear_after_finish, **kwargs)
except __HOLE__:
print("KeyboardInterrupt caught, calling on_optimization_end() to round things up")
self.inference_method.on_optimization_end()
raise | KeyboardInterrupt | dataset/ETHPy150Open SheffieldML/GPy/GPy/core/gp.py/GP.optimize |
768 | def lazy_property(undecorated):
name = '_' + undecorated.__name__
@property
@wraps(undecorated)
def decorated(self):
try:
return getattr(self, name)
except __HOLE__:
v = undecorated(self)
setattr(self, name, v)
return v
return decorated | AttributeError | dataset/ETHPy150Open samuel/kokki/kokki/system.py/lazy_property |
769 | @classmethod
def get_instance(cls):
try:
return cls._instance
except __HOLE__:
cls._instance = cls()
return cls._instance | AttributeError | dataset/ETHPy150Open samuel/kokki/kokki/system.py/System.get_instance |
770 | def get_cinspect_object(obj):
""" Returns the object wrapped in the appropriate CInspectObject class. """
try:
with inspect_restored():
inspect.getsource(obj)
except (TypeError, __HOLE__):
wrapped = _get_cinspect_object(obj)
else:
wrapped = PythonObject(obj)
return wrapped | IOError | dataset/ETHPy150Open punchagan/cinspect/cinspect/_types.py/get_cinspect_object |
771 | @register.filter(name="markdown")
def markdown(value, arg=''):
try:
from niwi.contrib.markdown2 import markdown
except __HOLE__:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in {% markdown %} filter: The Python markdown library isn't installed.")
return force_unicode(value)
else:
extensions = {'code-color':{'style':'trac'}}
return mark_safe(markdown(force_unicode(value), extras=extensions)) | ImportError | dataset/ETHPy150Open niwinz/niwi-web/src/niwi/web/templatetags/utils.py/markdown |
772 | @register.tag(name="show_page")
def show_page(parser, token):
"""
Render litle block obtaingin source from dinamicaly
writed on Page model.
"""
try:
tag_name, page_name = token.split_contents()
except __HOLE__:
raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0])
return ShowPageNode(parser.compile_filter(page_name)) | ValueError | dataset/ETHPy150Open niwinz/niwi-web/src/niwi/web/templatetags/utils.py/show_page |
773 | @register.tag(name="render_page_as_template")
def render_tmpl(parser, token):
"""
Renders model objects content as template.
"""
try:
tag_name, obj = token.split_contents()
except __HOLE__:
raise template.TemplateSyntaxError("%r tag requires a single argument" % \
token.contents.split()[0])
obj = parser.compile_filter(obj)
return TemplateNode(obj) | ValueError | dataset/ETHPy150Open niwinz/niwi-web/src/niwi/web/templatetags/utils.py/render_tmpl |
774 | @register.tag(name='post_file_link')
def post_file_link(parser, token):
try:
tag_name, slug = token.split_contents()
except __HOLE__:
raise template.TemplateSyntaxError("%r tag requires a single argument" % \
token.contents.split()[0])
slug = parser.compile_filter(slug)
return PostFileLinkTagNode(slug) | ValueError | dataset/ETHPy150Open niwinz/niwi-web/src/niwi/web/templatetags/utils.py/post_file_link |
775 | @register.tag(name='post_file_url')
def post_file_url(parser, token):
try:
tag_name, slug = token.split_contents()
except __HOLE__:
raise template.TemplateSyntaxError("%r tag requires a single argument" % \
token.contents.split()[0])
slug = parser.compile_filter(slug)
return PostFileLinkNode(slug) | ValueError | dataset/ETHPy150Open niwinz/niwi-web/src/niwi/web/templatetags/utils.py/post_file_url |
776 | def elem_to_internal(elem, strip_ns=1, strip=1):
"""Convert an Element into an internal dictionary (not JSON!)."""
d = {}
elem_tag = elem.tag
if strip_ns:
elem_tag = strip_tag(elem.tag)
else:
for key, value in list(elem.attrib.items()):
d['@' + key] = value
# loop over subelements to merge them
for subelem in elem:
v = elem_to_internal(subelem, strip_ns=strip_ns, strip=strip)
tag = subelem.tag
if strip_ns:
tag = strip_tag(subelem.tag)
value = v[tag]
try:
# add to existing list for this tag
d[tag].append(value)
except __HOLE__:
# turn existing entry into a list
d[tag] = [d[tag], value]
except KeyError:
# add a new non-list entry
d[tag] = value
text = elem.text
tail = elem.tail
if strip:
# ignore leading and trailing whitespace
if text:
text = text.strip()
if tail:
tail = tail.strip()
if tail:
d['#tail'] = tail
if d:
# use #text element if other attributes exist
if text:
d["#text"] = text
else:
# text is the value if no attributes
d = text or None
return {elem_tag: d} | AttributeError | dataset/ETHPy150Open hay/xml2json/xml2json.py/elem_to_internal |
777 | def panset(self, fitsimage, channel, paninfo):
image = fitsimage.get_image()
if image is None:
return
x, y = fitsimage.get_pan()
points = fitsimage.get_pan_rect()
# calculate pan position point radius
p_image = paninfo.panimage.get_image()
try:
obj = paninfo.panimage.canvas.getObjectByTag('__image')
except __HOLE__:
obj = None
#print(('panset', image, p_image, obj, obj.image, paninfo.panimage._imgobj))
width, height = image.get_size()
edgew = math.sqrt(width**2 + height**2)
radius = int(0.015 * edgew)
# Mark pan rectangle and pan position
#p_canvas = paninfo.panimage.get_canvas()
p_canvas = paninfo.panimage.private_canvas
try:
obj = p_canvas.getObjectByTag(paninfo.panrect)
if obj.kind != 'compound':
return False
point, bbox = obj.objects
self.logger.debug("starting panset")
point.x, point.y = x, y
point.radius = radius
bbox.points = points
p_canvas.update_canvas(whence=0)
except KeyError:
paninfo.panrect = p_canvas.add(self.dc.CompoundObject(
self.dc.Point(x, y, radius=radius, style='plus',
color=self.settings.get('pan_position_color', 'yellow')),
self.dc.Polygon(points,
color=self.settings.get('pan_rectangle_color', 'red'))))
p_canvas.update_canvas(whence=0)
return True | KeyError | dataset/ETHPy150Open ejeschke/ginga/ginga/misc/plugins/Pan.py/Pan.panset |
778 | def test_class_object_qualname(self):
# Test preservation of instance method __qualname__ attribute.
try:
__qualname__ = Original.original.__qualname__
except __HOLE__:
pass
else:
self.assertEqual(Class.function.__qualname__, __qualname__) | AttributeError | dataset/ETHPy150Open GrahamDumpleton/wrapt/tests/test_outer_staticmethod.py/TestNamingOuterStaticMethod.test_class_object_qualname |
779 | def test_instance_object_qualname(self):
# Test preservation of instance method __qualname__ attribute.
try:
__qualname__ = Original().original.__qualname__
except __HOLE__:
pass
else:
self.assertEqual(Class().function.__qualname__, __qualname__) | AttributeError | dataset/ETHPy150Open GrahamDumpleton/wrapt/tests/test_outer_staticmethod.py/TestNamingOuterStaticMethod.test_instance_object_qualname |
780 | def enable_tty_echo(tty=None):
"""
Re-enables proper console behavior, primarily for when a reload is
triggered at a PDB prompt.
TODO: context manager for ignoring signals
"""
if tty is None:
tty = sys.stdin
if not tty.isatty():
return
try:
import termios
except __HOLE__:
return
attr_list = termios.tcgetattr(tty)
attr_list[3] |= termios.ECHO
try:
orig_handler = signal.getsignal(signal.SIGTTOU)
except AttributeError:
termios.tcsetattr(tty, termios.TCSANOW, attr_list)
else:
try:
signal.signal(signal.SIGTTOU, signal.SIG_IGN)
termios.tcsetattr(tty, termios.TCSANOW, attr_list)
finally:
signal.signal(signal.SIGTTOU, orig_handler)
return | ImportError | dataset/ETHPy150Open mahmoud/clastic/clastic/server.py/enable_tty_echo |
781 | def restart_with_reloader(error_func=None):
to_mon = []
while 1:
_log('info', ' * Clastic restarting with reloader')
args = [sys.executable] + sys.argv
new_environ = os.environ.copy()
new_environ['WERKZEUG_RUN_MAIN'] = 'true'
if os.name == 'nt':
for key, value in new_environ.iteritems():
if isinstance(value, unicode):
new_environ[key] = value.encode('iso-8859-1')
child_proc = subprocess.Popen(args,
env=new_environ,
stderr=subprocess.PIPE)
stderr_buff = deque(maxlen=_STDERR_BUFF_SIZE)
def consume_lines():
for line in iter(child_proc.stderr.readline, ''):
if line.startswith(_MON_PREFIX):
to_mon[:] = literal_eval(line[len(_MON_PREFIX):])
else:
sys.stderr.write(line)
stderr_buff.append(line)
while child_proc.poll() is None:
consume_lines()
consume_lines()
exit_code = child_proc.returncode
if exit_code == 3:
continue
elif error_func and exit_code == 1 and stderr_buff:
enable_tty_echo()
tb_str = ''.join(stderr_buff)
err_server = error_func(tb_str, to_mon)
try:
reloader_loop(to_mon, 1)
except __HOLE__:
return 0
except SystemExit as se:
if se.code == 3:
continue
return se.code
finally:
err_server.shutdown()
err_server.server_close()
return 0
else:
return exit_code | KeyboardInterrupt | dataset/ETHPy150Open mahmoud/clastic/clastic/server.py/restart_with_reloader |
782 | def run_with_reloader(main_func, extra_files=None, interval=1,
error_func=None):
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
enable_tty_echo()
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
thread.start_new_thread(main_func, ())
try:
reloader_loop(extra_files, interval)
except KeyboardInterrupt:
return
except SystemExit:
mon_list = list(chain(iter_monitor_files(), extra_files or ()))
sys.stderr.write('%s%r\n' % (_MON_PREFIX, mon_list))
raise
try:
sys.exit(restart_with_reloader(error_func=error_func))
except __HOLE__:
pass | KeyboardInterrupt | dataset/ETHPy150Open mahmoud/clastic/clastic/server.py/run_with_reloader |
783 | def process_queue(self):
curr_time = time.time()
elapsed = curr_time - self.last_time
if elapsed < self.seconds:
wait_ms = int(1000 * (self.seconds - elapsed))
else:
wait_ms = 0
try:
(fn, params) = self.queue.pop(0)
self.processing_queue = True
sublime.set_timeout(lambda: self.call_now(fn, *params), wait_ms)
except __HOLE__:
self.processing_queue = False | IndexError | dataset/ETHPy150Open accerqueira/sublime-test-runner/test_runner/decorators.py/LazyDecorator.process_queue |
784 | def nn_setsockopt(socket, level, option, value):
"""set a socket option
socket - socket number
level - option level
option - option
value - a readable byte buffer (not a Unicode string) containing the value
returns - 0 on success or < 0 on error
"""
try:
return _nn_setsockopt(socket, level, option, ctypes.addressof(value),
len(value))
except (__HOLE__, AttributeError):
buf_value = ctypes.create_string_buffer(value)
return _nn_setsockopt(socket, level, option,
ctypes.addressof(buf_value), len(value)) | TypeError | dataset/ETHPy150Open tonysimpson/nanomsg-python/_nanomsg_ctypes/__init__.py/nn_setsockopt |
785 | def nn_send(socket, msg, flags):
"send a message"
try:
return _nn_send(socket, ctypes.addressof(msg), len(buffer(msg)), flags)
except (__HOLE__, AttributeError):
buf_msg = ctypes.create_string_buffer(msg)
return _nn_send(socket, ctypes.addressof(buf_msg), len(msg), flags) | TypeError | dataset/ETHPy150Open tonysimpson/nanomsg-python/_nanomsg_ctypes/__init__.py/nn_send |
786 | def __get__(self, instance, owner):
if instance is not None:
try:
return instance._data[self]
except __HOLE__:
return instance._data.setdefault(self, self._default(instance))
return self | KeyError | dataset/ETHPy150Open jaimegildesagredo/booby/booby/fields.py/Field.__get__ |
787 | def __call_default(self, *args):
try:
return self.default()
except TypeError as error:
try:
return self.default(*args)
except __HOLE__:
raise error | TypeError | dataset/ETHPy150Open jaimegildesagredo/booby/booby/fields.py/Field.__call_default |
788 | def test_credentials(self):
logging.debug('')
logging.debug('test_credentials')
# Basic form.
owner = Credentials()
if sys.platform == 'win32' and not HAVE_PYWIN32:
self.assertEqual('%s' % owner, owner.user+' (transient)')
else:
self.assertEqual('%s' % owner, owner.user)
# Comparison.
user = Credentials()
self.assertEqual(user, owner)
user.user = 'anyone@hostname'
self.assertNotEqual(user, owner)
self.assertNotEqual(user, 'xyzzy')
# Thread storage.
try:
del threading.current_thread().credentials # Ensure empty.
except __HOLE__:
pass
self.assertEqual(get_credentials(), owner)
# Sign/verify.
encoded = owner.encode()
Credentials.verify(encoded, allowed_users=None) # 'First sighting'.
Credentials.verify(encoded, allowed_users=None) # Cached verification.
data, signature, client_creds = encoded
encoded = (data[:1], signature, client_creds)
assert_raises(self, 'Credentials.verify(encoded, None)',
globals(), locals(), CredentialsError, 'Invalid data')
encoded = (data[:-1], signature, client_creds)
assert_raises(self, 'Credentials.verify(encoded, None)',
globals(), locals(), CredentialsError, 'Invalid signature')
encoded = (data, signature[:-1], client_creds)
assert_raises(self, 'Credentials.verify(encoded, None)',
globals(), locals(), CredentialsError, 'Invalid signature')
newline = data.find('\n') # .user
newline = data.find('\n', newline+1) # .transient
# Expecting '-'
mangled = data[:newline+1] + '*' + data[newline+2:]
encoded = (mangled, signature, client_creds)
assert_raises(self, 'Credentials.verify(encoded, None)',
globals(), locals(), CredentialsError, 'Invalid key')
# Detect mismatched key.
get_key_pair(owner.user, overwrite_cache=True)
spook = Credentials()
encoded = spook.encode()
assert_raises(self, 'Credentials.verify(encoded, None)',
globals(), locals(), CredentialsError,
'Public key mismatch')
# Check if remote access.
self.assertFalse(remote_access()) | AttributeError | dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.main/src/openmdao/main/test/test_rbac.py/TestCase.test_credentials |
789 | def fromPath(cls, path):
"""
@param path: A path object to use for both reading contents from and
later saving to.
@type path: L{FilePath}
"""
self = cls(path)
try:
fp = path.open()
except __HOLE__:
return self
for line in fp:
if line.startswith(HashedEntry.MAGIC):
entry = HashedEntry.fromString(line)
else:
try:
entry = PlainEntry.fromString(line)
except (DecodeError, InvalidEntry, BadKeyError):
entry = UnparsedEntry(line)
self._entries.append(entry)
return self | IOError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/conch/client/knownhosts.py/KnownHostsFile.fromPath |
790 | def _pushWhitespaceBehavior(self, attr):
"""Push a new string onto the whitespaceBehaviorStack.
The string's value is taken from the "xml:space" attribute, if it exists
and has a legal value ("default" or "preserve"). Otherwise, the previous
stack element is duplicated.
"""
assert len(self._whitespaceBehaviorStack) > 0, "Whitespace behavior stack should never be empty!"
try:
if attr["xml:space"] == "default" or attr["xml:space"] == "preserve":
self._whitespaceBehaviorStack.append(attr["xml:space"])
else:
raise AimlParserError, "Invalid value for xml:space attribute "+self._location()
except __HOLE__:
self._whitespaceBehaviorStack.append(self._whitespaceBehaviorStack[-1]) | KeyError | dataset/ETHPy150Open encorehu/django-buddy/core/aiml/AimlParser.py/AimlHandler._pushWhitespaceBehavior |
791 | def _startElement(self, name, attr):
if name == "aiml":
# <aiml> tags are only legal in the OutsideAiml state
if self._state != self._STATE_OutsideAiml:
raise AimlParserError, "Unexpected <aiml> tag "+self._location()
self._state = self._STATE_InsideAiml
self._insideTopic = False
self._currentTopic = u""
try: self._version = attr["version"]
except __HOLE__:
# This SHOULD be a syntax error, but so many AIML sets out there are missing
# "version" attributes that it just seems nicer to let it slide.
#raise AimlParserError, "Missing 'version' attribute in <aiml> tag "+self._location()
#print "WARNING: Missing 'version' attribute in <aiml> tag "+self._location()
#print " Defaulting to version 1.0"
self._version = "1.0"
self._forwardCompatibleMode = (self._version != "1.0.1")
self._pushWhitespaceBehavior(attr)
# Not sure about this namespace business yet...
#try:
# self._namespace = attr["xmlns"]
# if self._version == "1.0.1" and self._namespace != "http://alicebot.org/2001/AIML-1.0.1":
# raise AimlParserError, "Incorrect namespace for AIML v1.0.1 "+self._location()
#except KeyError:
# if self._version != "1.0":
# raise AimlParserError, "Missing 'version' attribute(s) in <aiml> tag "+self._location()
elif self._state == self._STATE_OutsideAiml:
# If we're outside of an AIML element, we ignore all tags.
return
elif name == "topic":
# <topic> tags are only legal in the InsideAiml state, and only
# if we're not already inside a topic.
if (self._state != self._STATE_InsideAiml) or self._insideTopic:
raise AimlParserError, "Unexpected <topic> tag", self._location()
try: self._currentTopic = unicode(attr['name'])
except KeyError:
raise AimlParserError, "Required \"name\" attribute missing in <topic> element "+self._location()
self._insideTopic = True
elif name == "category":
# <category> tags are only legal in the InsideAiml state
if self._state != self._STATE_InsideAiml:
raise AimlParserError, "Unexpected <category> tag "+self._location()
self._state = self._STATE_InsideCategory
self._currentPattern = u""
self._currentThat = u""
# If we're not inside a topic, the topic is implicitly set to *
if not self._insideTopic: self._currentTopic = u"*"
self._elemStack = []
self._pushWhitespaceBehavior(attr)
elif name == "pattern":
# <pattern> tags are only legal in the InsideCategory state
if self._state != self._STATE_InsideCategory:
raise AimlParserError, "Unexpected <pattern> tag "+self._location()
self._state = self._STATE_InsidePattern
elif name == "that" and self._state == self._STATE_AfterPattern:
# <that> are legal either inside a <template> element, or
# inside a <category> element, between the <pattern> and the
# <template> elements. This clause handles the latter case.
self._state = self._STATE_InsideThat
elif name == "template":
# <template> tags are only legal in the AfterPattern and AfterThat
# states
if self._state not in [self._STATE_AfterPattern, self._STATE_AfterThat]:
raise AimlParserError, "Unexpected <template> tag "+self._location()
# if no <that> element was specified, it is implicitly set to *
if self._state == self._STATE_AfterPattern:
self._currentThat = u"*"
self._state = self._STATE_InsideTemplate
self._elemStack.append(['template',{}])
self._pushWhitespaceBehavior(attr)
elif self._state == self._STATE_InsidePattern:
# Certain tags are allowed inside <pattern> elements.
if name == "bot" and attr.has_key("name") and attr["name"] == u"name":
# Insert a special character string that the PatternMgr will
# replace with the bot's name.
self._currentPattern += u" BOT_NAME "
else:
raise AimlParserError, ("Unexpected <%s> tag " % name)+self._location()
elif self._state == self._STATE_InsideThat:
# Certain tags are allowed inside <that> elements.
if name == "bot" and attr.has_key("name") and attr["name"] == u"name":
# Insert a special character string that the PatternMgr will
# replace with the bot's name.
self._currentThat += u" BOT_NAME "
else:
raise AimlParserError, ("Unexpected <%s> tag " % name)+self._location()
elif self._state == self._STATE_InsideTemplate and self._validInfo.has_key(name):
# Starting a new element inside the current pattern. First
# we need to convert 'attr' into a native Python dictionary,
# so it can later be marshaled.
attrDict = {}
for k,v in attr.items():
#attrDict[k[1].encode(self._encoding)] = v.encode(self._encoding)
attrDict[k.encode(self._encoding)] = unicode(v)
self._validateElemStart(name, attrDict, self._version)
# Push the current element onto the element stack.
self._elemStack.append([name.encode(self._encoding),attrDict])
self._pushWhitespaceBehavior(attr)
# If this is a condition element, push a new entry onto the
# foundDefaultLiStack
if name == "condition":
self._foundDefaultLiStack.append(False)
else:
# we're now inside an unknown element.
if self._forwardCompatibleMode:
# In Forward Compatibility Mode, we ignore the element and its
# contents.
self._currentUnknown = name
else:
# Otherwise, unknown elements are grounds for error!
raise AimlParserError, ("Unexpected <%s> tag " % name)+self._location() | KeyError | dataset/ETHPy150Open encorehu/django-buddy/core/aiml/AimlParser.py/AimlHandler._startElement |
792 | def _characters(self, ch):
text = unicode(ch)
if self._state == self._STATE_InsidePattern:
# TODO: text inside patterns must be upper-case!
self._currentPattern += text
elif self._state == self._STATE_InsideThat:
self._currentThat += text
elif self._state == self._STATE_InsideTemplate:
# First, see whether the element at the top of the element stack
# is permitted to contain text.
try:
parent = self._elemStack[-1][0]
parentAttr = self._elemStack[-1][1]
required, optional, canBeParent = self._validInfo[parent]
nonBlockStyleCondition = (parent == "condition" and not (parentAttr.has_key("name") and parentAttr.has_key("value")))
if not canBeParent:
raise AimlParserError, ("Unexpected text inside <%s> element "%parent)+self._location()
elif parent == "random" or nonBlockStyleCondition:
# <random> elements can only contain <li> subelements. However,
# there's invariably some whitespace around the <li> that we need
# to ignore. Same for non-block-style <condition> elements (i.e.
# those which don't have both a "name" and a "value" attribute).
if len(text.strip()) == 0:
# ignore whitespace inside these elements.
return
else:
# non-whitespace text inside these elements is a syntax error.
raise AimlParserError, ("Unexpected text inside <%s> element "%parent)+self._location()
except __HOLE__:
# the element stack is empty. This should never happen.
raise AimlParserError, "Element stack is empty while validating text "+self._location()
# Add a new text element to the element at the top of the element
# stack. If there's already a text element there, simply append the
# new characters to its contents.
try: textElemOnStack = (self._elemStack[-1][-1][0] == "text")
except IndexError: textElemOnStack = False
except KeyError: textElemOnStack = False
if textElemOnStack:
self._elemStack[-1][-1][2] += text
else:
self._elemStack[-1].append(["text", {"xml:space": self._whitespaceBehaviorStack[-1]}, text])
else:
# all other text is ignored
pass | IndexError | dataset/ETHPy150Open encorehu/django-buddy/core/aiml/AimlParser.py/AimlHandler._characters |
793 | def _validateElemStart(self, name, attr, version):
"""Test the validity of an element starting inside a <template>
element.
This function raises an AimlParserError exception if it the tag is
invalid. Otherwise, no news is good news.
"""
# Check the element's attributes. Make sure that all required
# attributes are present, and that any remaining attributes are
# valid options.
required, optional, canBeParent = self._validInfo[name]
for a in required:
if a not in attr and not self._forwardCompatibleMode:
raise AimlParserError, ("Required \"%s\" attribute missing in <%s> element " % (a,name))+self._location()
for a in attr:
if a in required: continue
if a[0:4] == "xml:": continue # attributes in the "xml" namespace can appear anywhere
if a not in optional and not self._forwardCompatibleMode:
raise AimlParserError, ("Unexpected \"%s\" attribute in <%s> element " % (a,name))+self._location()
# special-case: several tags contain an optional "index" attribute.
# This attribute's value must be a positive integer.
if name in ["star", "thatstar", "topicstar"]:
for k,v in attr.items():
if k == "index":
temp = 0
try: temp = int(v)
except:
raise AimlParserError, ("Bad type for \"%s\" attribute (expected integer, found \"%s\") " % (k,v))+self._location()
if temp < 1:
raise AimlParserError, ("\"%s\" attribute must have non-negative value " % (k))+self._location()
# See whether the containing element is permitted to contain
# subelements. If not, this element is invalid no matter what it is.
try:
parent = self._elemStack[-1][0]
parentAttr = self._elemStack[-1][1]
except __HOLE__:
# If the stack is empty, no parent is present. This should never
# happen.
raise AimlParserError, ("Element stack is empty while validating <%s> " % name)+self._location()
required, optional, canBeParent = self._validInfo[parent]
nonBlockStyleCondition = (parent == "condition" and not (parentAttr.has_key("name") and parentAttr.has_key("value")))
if not canBeParent:
raise AimlParserError, ("<%s> elements cannot have any contents "%parent)+self._location()
# Special-case test if the parent element is <condition> (the
# non-block-style variant) or <random>: these elements can only
# contain <li> subelements.
elif (parent == "random" or nonBlockStyleCondition) and name!="li":
raise AimlParserError, ("<%s> elements can only contain <li> subelements "%parent)+self._location()
# Special-case test for <li> elements, which can only be contained
# by non-block-style <condition> and <random> elements, and whose
# required attributes are dependent upon which attributes are
# present in the <condition> parent.
elif name=="li":
if not (parent=="random" or nonBlockStyleCondition):
raise AimlParserError, ("Unexpected <li> element contained by <%s> element "%parent)+self._location()
if nonBlockStyleCondition:
if parentAttr.has_key("name"):
# Single-predicate condition. Each <li> element except the
# last must have a "value" attribute.
if len(attr) == 0:
# This could be the default <li> element for this <condition>,
# unless we've already found one.
if self._foundDefaultLiStack[-1]:
raise AimlParserError, "Unexpected default <li> element inside <condition> "+self._location()
else:
self._foundDefaultLiStack[-1] = True
elif len(attr) == 1 and attr.has_key("value"):
pass # this is the valid case
else:
raise AimlParserError, "Invalid <li> inside single-predicate <condition> "+self._location()
elif len(parentAttr) == 0:
# Multi-predicate condition. Each <li> element except the
# last must have a "name" and a "value" attribute.
if len(attr) == 0:
# This could be the default <li> element for this <condition>,
# unless we've already found one.
if self._foundDefaultLiStack[-1]:
raise AimlParserError, "Unexpected default <li> element inside <condition> "+self._location()
else:
self._foundDefaultLiStack[-1] = True
elif len(attr) == 2 and attr.has_key("value") and attr.has_key("name"):
pass # this is the valid case
else:
raise AimlParserError, "Invalid <li> inside multi-predicate <condition> "+self._location()
# All is well!
return True | IndexError | dataset/ETHPy150Open encorehu/django-buddy/core/aiml/AimlParser.py/AimlHandler._validateElemStart |
794 | def cast(self, d):
"""
Cast a single value to a :class:`datetime.date`.
:param date_format:
An optional :func:`datetime.strptime` format string for parsing
datetimes in this column.
:returns:
:class:`datetime.date` or :code:`None`.
"""
if type(d) is date or d is None:
return d
elif isinstance(d, six.string_types):
d = d.strip()
if d.lower() in self.null_values:
return None
else:
raise CastError('Can not parse value "%s" as date.' % d)
if self.date_format:
try:
dt = datetime.strptime(d, self.date_format)
except:
raise CastError('Value "%s" does not match date format.' % d)
return dt.date()
value, ctx = self.parser.parseDT(d, sourceTime=ZERO_DT)
if ctx.hasDate and not ctx.hasTime:
return value.date()
try:
dt = isodate.parse_date(d)
if ctx.hasTime:
raise ValueError('isodate.parse_date discarded a time component')
return dt
except (isodate.ISO8601Error, __HOLE__):
pass
raise CastError('Can not parse value "%s" as date.' % d) | ValueError | dataset/ETHPy150Open wireservice/agate/agate/data_types/date.py/Date.cast |
795 | def load(self, filename, groups = None, moderators = ()):
if PickleStorage.sharedDBs.has_key(filename):
self.db = PickleStorage.sharedDBs[filename]
else:
try:
self.db = pickle.load(open(filename))
PickleStorage.sharedDBs[filename] = self.db
except __HOLE__:
self.db = PickleStorage.sharedDBs[filename] = {}
self.db['groups'] = groups
if groups is not None:
for i in groups:
self.db[i] = {}
self.db['moderators'] = dict(moderators)
self.flush() | IOError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/news/database.py/PickleStorage.load |
796 | def getModerator(self, groups):
# first see if any groups are moderated. if so, nothing gets posted,
# but the whole messages gets forwarded to the moderator address
for group in groups:
try:
return self.dbm['moderators'][group]
except __HOLE__:
pass
return None | KeyError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/news/database.py/NewsShelf.getModerator |
797 | def postRequest(self, message):
cleave = message.find('\r\n\r\n')
headers, article = message[:cleave], message[cleave + 4:]
article = Article(headers, article)
groups = article.getHeader('Newsgroups').split()
xref = []
# Check for moderated status
moderator = self.getModerator(groups)
if moderator and not article.getHeader('Approved'):
return self.notifyModerators([moderator], article)
for group in groups:
try:
g = self.dbm['groups'][group]
except __HOLE__:
pass
else:
index = g.maxArticle + 1
g.maxArticle += 1
g.articles[index] = article
xref.append((group, str(index)))
self.dbm['groups'][group] = g
if not xref:
return defer.fail(NewsServerError("No groups carried: " + ' '.join(groups)))
article.putHeader('Xref', '%s %s' % (socket.gethostname().split()[0], ' '.join(map(lambda x: ':'.join(x), xref))))
self.dbm['Message-IDs'][article.getHeader('Message-ID')] = xref
return defer.succeed(None) | KeyError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/news/database.py/NewsShelf.postRequest |
798 | def groupRequest(self, group):
try:
g = self.dbm['groups'][group]
except __HOLE__:
return defer.fail(NewsServerError("No such group: " + group))
else:
flags = g.flags
low = g.minArticle
high = g.maxArticle
num = high - low + 1
return defer.succeed((group, num, high, low, flags)) | KeyError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/news/database.py/NewsShelf.groupRequest |
799 | def articleRequest(self, group, index, id = None):
if id is not None:
try:
xref = self.dbm['Message-IDs'][id]
except KeyError:
return defer.fail(NewsServerError("No such article: " + id))
else:
group, index = xref[0]
index = int(index)
try:
a = self.dbm['groups'][group].articles[index]
except __HOLE__:
return defer.fail(NewsServerError("No such group: " + group))
else:
return defer.succeed((
index,
a.getHeader('Message-ID'),
StringIO.StringIO(a.textHeaders() + '\r\n' + a.body)
)) | KeyError | dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/news/database.py/NewsShelf.articleRequest |
Subsets and Splits