Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
4,000 |
def Port(port):
"""Sanitize a port value.
Args:
port: a port value
Returns:
port: a port value
Raises:
BadPortValue: port is not valid integer or string
BadPortRange: port is outside valid range
"""
pval = -1
try:
pval = int(port)
except __HOLE__:
raise BadPortValue('port %s is not valid.' % port)
if pval < 0 or pval > 65535:
raise BadPortRange('port %s is out of range 0-65535.' % port)
return pval
|
ValueError
|
dataset/ETHPy150Open google/capirca/lib/port.py/Port
|
4,001 |
def _handle_message(self, opcode, data):
if self.client_terminated:
return
if opcode == 0x1:
# UTF-8 data
try:
decoded = data.decode("utf-8")
except __HOLE__:
self._abort()
return
self._async_callback(self.on_message)(decoded)
elif opcode == 0x2:
# Binary data
self._async_callback(self.on_message)(data)
elif opcode == 0x8:
# Close
self.client_terminated = True
self.close()
elif opcode == 0x9:
# Ping
self._write_frame(True, 0xA, data)
self._async_callback(self.on_ping)()
elif opcode == 0xA:
# Pong
self._async_callback(self.on_pong)()
else:
self._abort()
|
UnicodeDecodeError
|
dataset/ETHPy150Open jbalogh/tornado-websocket-client/websocket.py/WebSocket._handle_message
|
4,002 |
def main(url, message='hello, world'):
class HelloSocket(WebSocket):
def on_open(self):
self.ping()
print '>>', message
self.write_message(message)
def on_message(self, data):
print 'on_message:', data
msg = raw_input('>> ')
if msg == 'ping':
self.ping()
elif msg == 'die':
self.close()
else:
self.write_message(msg)
def on_close(self):
print 'on_close'
def on_pong(self):
print 'on_pong'
ws = HelloSocket(url)
try:
ioloop.IOLoop.instance().start()
except __HOLE__:
pass
finally:
ws.close()
|
KeyboardInterrupt
|
dataset/ETHPy150Open jbalogh/tornado-websocket-client/websocket.py/main
|
4,003 |
def run(self, address, credopts=None, sambaopts=None, versionopts=None):
lp = sambaopts.get_loadparm()
try:
res = netcmd_get_domain_infos_via_cldap(lp, None, address)
except __HOLE__:
raise CommandError("Invalid IP address '" + address + "'!")
self.outf.write("Forest : %s\n" % res.forest)
self.outf.write("Domain : %s\n" % res.dns_domain)
self.outf.write("Netbios domain : %s\n" % res.domain_name)
self.outf.write("DC name : %s\n" % res.pdc_dns_name)
self.outf.write("DC netbios name : %s\n" % res.pdc_name)
self.outf.write("Server site : %s\n" % res.server_site)
self.outf.write("Client site : %s\n" % res.client_site)
|
RuntimeError
|
dataset/ETHPy150Open byt3bl33d3r/pth-toolkit/lib/python2.7/site-packages/samba/netcmd/domain.py/cmd_domain_info.run
|
4,004 |
def run(self, sambaopts=None, credopts=None, versionopts=None,
interactive=None,
domain=None,
domain_guid=None,
domain_sid=None,
ntds_guid=None,
invocationid=None,
host_name=None,
host_ip=None,
host_ip6=None,
adminpass=None,
krbtgtpass=None,
machinepass=None,
dns_backend=None,
dns_forwarder=None,
dnspass=None,
ldapadminpass=None,
root=None,
nobody=None,
users=None,
quiet=None,
blank=None,
ldap_backend_type=None,
server_role=None,
function_level=None,
next_rid=None,
partitions_only=None,
targetdir=None,
ol_mmr_urls=None,
use_xattrs=None,
use_ntvfs=None,
use_rfc2307=None):
self.logger = self.get_logger("provision")
if quiet:
self.logger.setLevel(logging.WARNING)
else:
self.logger.setLevel(logging.INFO)
lp = sambaopts.get_loadparm()
smbconf = lp.configfile
creds = credopts.get_credentials(lp)
creds.set_kerberos_state(DONT_USE_KERBEROS)
if dns_forwarder is not None:
suggested_forwarder = dns_forwarder
else:
suggested_forwarder = self._get_nameserver_ip()
if suggested_forwarder is None:
suggested_forwarder = "none"
if len(self.raw_argv) == 1:
interactive = True
if interactive:
from getpass import getpass
import socket
def ask(prompt, default=None):
if default is not None:
print "%s [%s]: " % (prompt, default),
else:
print "%s: " % (prompt,),
return sys.stdin.readline().rstrip("\n") or default
try:
default = socket.getfqdn().split(".", 1)[1].upper()
except IndexError:
default = None
realm = ask("Realm", default)
if realm in (None, ""):
raise CommandError("No realm set!")
try:
default = realm.split(".")[0]
except __HOLE__:
default = None
domain = ask("Domain", default)
if domain is None:
raise CommandError("No domain set!")
server_role = ask("Server Role (dc, member, standalone)", "dc")
dns_backend = ask("DNS backend (SAMBA_INTERNAL, BIND9_FLATFILE, BIND9_DLZ, NONE)", "SAMBA_INTERNAL")
if dns_backend in (None, ''):
raise CommandError("No DNS backend set!")
if dns_backend == "SAMBA_INTERNAL":
dns_forwarder = ask("DNS forwarder IP address (write 'none' to disable forwarding)", suggested_forwarder)
if dns_forwarder.lower() in (None, 'none'):
suggested_forwarder = None
dns_forwarder = None
while True:
adminpassplain = getpass("Administrator password: ")
if not adminpassplain:
self.errf.write("Invalid administrator password.\n")
else:
adminpassverify = getpass("Retype password: ")
if not adminpassplain == adminpassverify:
self.errf.write("Sorry, passwords do not match.\n")
else:
adminpass = adminpassplain
break
else:
realm = sambaopts._lp.get('realm')
if realm is None:
raise CommandError("No realm set!")
if domain is None:
raise CommandError("No domain set!")
if not adminpass:
self.logger.info("Administrator password will be set randomly!")
if function_level == "2000":
dom_for_fun_level = DS_DOMAIN_FUNCTION_2000
elif function_level == "2003":
dom_for_fun_level = DS_DOMAIN_FUNCTION_2003
elif function_level == "2008":
dom_for_fun_level = DS_DOMAIN_FUNCTION_2008
elif function_level == "2008_R2":
dom_for_fun_level = DS_DOMAIN_FUNCTION_2008_R2
if dns_backend == "SAMBA_INTERNAL" and dns_forwarder is None:
dns_forwarder = suggested_forwarder
samdb_fill = FILL_FULL
if blank:
samdb_fill = FILL_NT4SYNC
elif partitions_only:
samdb_fill = FILL_DRS
if targetdir is not None:
if not os.path.isdir(targetdir):
os.mkdir(targetdir)
eadb = True
if use_xattrs == "yes":
eadb = False
elif use_xattrs == "auto" and not lp.get("posix:eadb"):
if targetdir:
file = tempfile.NamedTemporaryFile(dir=os.path.abspath(targetdir))
else:
file = tempfile.NamedTemporaryFile(dir=os.path.abspath(os.path.dirname(lp.get("private dir"))))
try:
try:
samba.ntacls.setntacl(lp, file.name,
"O:S-1-5-32G:S-1-5-32", "S-1-5-32", "native")
eadb = False
except Exception:
self.logger.info("You are not root or your system do not support xattr, using tdb backend for attributes. ")
finally:
file.close()
if eadb:
self.logger.info("not using extended attributes to store ACLs and other metadata. If you intend to use this provision in production, rerun the script as root on a system supporting xattrs.")
session = system_session()
try:
result = provision(self.logger,
session, creds, smbconf=smbconf, targetdir=targetdir,
samdb_fill=samdb_fill, realm=realm, domain=domain,
domainguid=domain_guid, domainsid=domain_sid,
hostname=host_name,
hostip=host_ip, hostip6=host_ip6,
ntdsguid=ntds_guid,
invocationid=invocationid, adminpass=adminpass,
krbtgtpass=krbtgtpass, machinepass=machinepass,
dns_backend=dns_backend, dns_forwarder=dns_forwarder,
dnspass=dnspass, root=root, nobody=nobody,
users=users,
serverrole=server_role, dom_for_fun_level=dom_for_fun_level,
backend_type=ldap_backend_type,
ldapadminpass=ldapadminpass, ol_mmr_urls=ol_mmr_urls,
useeadb=eadb, next_rid=next_rid, lp=lp, use_ntvfs=use_ntvfs,
use_rfc2307=use_rfc2307, skip_sysvolacl=False)
except ProvisioningError, e:
raise CommandError("Provision failed", e)
result.report_logger(self.logger)
|
IndexError
|
dataset/ETHPy150Open byt3bl33d3r/pth-toolkit/lib/python2.7/site-packages/samba/netcmd/domain.py/cmd_domain_provision.run
|
4,005 |
def run(self, subcommand, H=None, forest_level=None, domain_level=None,
quiet=False, credopts=None, sambaopts=None, versionopts=None):
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp, fallback_machine=True)
samdb = SamDB(url=H, session_info=system_session(),
credentials=creds, lp=lp)
domain_dn = samdb.domain_dn()
res_forest = samdb.search("CN=Partitions,%s" % samdb.get_config_basedn(),
scope=ldb.SCOPE_BASE, attrs=["msDS-Behavior-Version"])
assert len(res_forest) == 1
res_domain = samdb.search(domain_dn, scope=ldb.SCOPE_BASE,
attrs=["msDS-Behavior-Version", "nTMixedDomain"])
assert len(res_domain) == 1
res_dc_s = samdb.search("CN=Sites,%s" % samdb.get_config_basedn(),
scope=ldb.SCOPE_SUBTREE, expression="(objectClass=nTDSDSA)",
attrs=["msDS-Behavior-Version"])
assert len(res_dc_s) >= 1
try:
level_forest = int(res_forest[0]["msDS-Behavior-Version"][0])
level_domain = int(res_domain[0]["msDS-Behavior-Version"][0])
level_domain_mixed = int(res_domain[0]["nTMixedDomain"][0])
min_level_dc = int(res_dc_s[0]["msDS-Behavior-Version"][0]) # Init value
for msg in res_dc_s:
if int(msg["msDS-Behavior-Version"][0]) < min_level_dc:
min_level_dc = int(msg["msDS-Behavior-Version"][0])
if level_forest < 0 or level_domain < 0:
raise CommandError("Domain and/or forest function level(s) is/are invalid. Correct them or reprovision!")
if min_level_dc < 0:
raise CommandError("Lowest function level of a DC is invalid. Correct this or reprovision!")
if level_forest > level_domain:
raise CommandError("Forest function level is higher than the domain level(s). Correct this or reprovision!")
if level_domain > min_level_dc:
raise CommandError("Domain function level is higher than the lowest function level of a DC. Correct this or reprovision!")
except __HOLE__:
raise CommandError("Could not retrieve the actual domain, forest level and/or lowest DC function level!")
if subcommand == "show":
self.message("Domain and forest function level for domain '%s'" % domain_dn)
if level_forest == DS_DOMAIN_FUNCTION_2000 and level_domain_mixed != 0:
self.message("\nATTENTION: You run SAMBA 4 on a forest function level lower than Windows 2000 (Native). This isn't supported! Please raise!")
if level_domain == DS_DOMAIN_FUNCTION_2000 and level_domain_mixed != 0:
self.message("\nATTENTION: You run SAMBA 4 on a domain function level lower than Windows 2000 (Native). This isn't supported! Please raise!")
if min_level_dc == DS_DOMAIN_FUNCTION_2000 and level_domain_mixed != 0:
self.message("\nATTENTION: You run SAMBA 4 on a lowest function level of a DC lower than Windows 2003. This isn't supported! Please step-up or upgrade the concerning DC(s)!")
self.message("")
if level_forest == DS_DOMAIN_FUNCTION_2000:
outstr = "2000"
elif level_forest == DS_DOMAIN_FUNCTION_2003_MIXED:
outstr = "2003 with mixed domains/interim (NT4 DC support)"
elif level_forest == DS_DOMAIN_FUNCTION_2003:
outstr = "2003"
elif level_forest == DS_DOMAIN_FUNCTION_2008:
outstr = "2008"
elif level_forest == DS_DOMAIN_FUNCTION_2008_R2:
outstr = "2008 R2"
else:
outstr = "higher than 2008 R2"
self.message("Forest function level: (Windows) " + outstr)
if level_domain == DS_DOMAIN_FUNCTION_2000 and level_domain_mixed != 0:
outstr = "2000 mixed (NT4 DC support)"
elif level_domain == DS_DOMAIN_FUNCTION_2000 and level_domain_mixed == 0:
outstr = "2000"
elif level_domain == DS_DOMAIN_FUNCTION_2003_MIXED:
outstr = "2003 with mixed domains/interim (NT4 DC support)"
elif level_domain == DS_DOMAIN_FUNCTION_2003:
outstr = "2003"
elif level_domain == DS_DOMAIN_FUNCTION_2008:
outstr = "2008"
elif level_domain == DS_DOMAIN_FUNCTION_2008_R2:
outstr = "2008 R2"
else:
outstr = "higher than 2008 R2"
self.message("Domain function level: (Windows) " + outstr)
if min_level_dc == DS_DOMAIN_FUNCTION_2000:
outstr = "2000"
elif min_level_dc == DS_DOMAIN_FUNCTION_2003:
outstr = "2003"
elif min_level_dc == DS_DOMAIN_FUNCTION_2008:
outstr = "2008"
elif min_level_dc == DS_DOMAIN_FUNCTION_2008_R2:
outstr = "2008 R2"
else:
outstr = "higher than 2008 R2"
self.message("Lowest function level of a DC: (Windows) " + outstr)
elif subcommand == "raise":
msgs = []
if domain_level is not None:
if domain_level == "2003":
new_level_domain = DS_DOMAIN_FUNCTION_2003
elif domain_level == "2008":
new_level_domain = DS_DOMAIN_FUNCTION_2008
elif domain_level == "2008_R2":
new_level_domain = DS_DOMAIN_FUNCTION_2008_R2
if new_level_domain <= level_domain and level_domain_mixed == 0:
raise CommandError("Domain function level can't be smaller than or equal to the actual one!")
if new_level_domain > min_level_dc:
raise CommandError("Domain function level can't be higher than the lowest function level of a DC!")
# Deactivate mixed/interim domain support
if level_domain_mixed != 0:
# Directly on the base DN
m = ldb.Message()
m.dn = ldb.Dn(samdb, domain_dn)
m["nTMixedDomain"] = ldb.MessageElement("0",
ldb.FLAG_MOD_REPLACE, "nTMixedDomain")
samdb.modify(m)
# Under partitions
m = ldb.Message()
m.dn = ldb.Dn(samdb, "CN=" + lp.get("workgroup") + ",CN=Partitions,%s" % samdb.get_config_basedn())
m["nTMixedDomain"] = ldb.MessageElement("0",
ldb.FLAG_MOD_REPLACE, "nTMixedDomain")
try:
samdb.modify(m)
except ldb.LdbError, (enum, emsg):
if enum != ldb.ERR_UNWILLING_TO_PERFORM:
raise
# Directly on the base DN
m = ldb.Message()
m.dn = ldb.Dn(samdb, domain_dn)
m["msDS-Behavior-Version"]= ldb.MessageElement(
str(new_level_domain), ldb.FLAG_MOD_REPLACE,
"msDS-Behavior-Version")
samdb.modify(m)
# Under partitions
m = ldb.Message()
m.dn = ldb.Dn(samdb, "CN=" + lp.get("workgroup")
+ ",CN=Partitions,%s" % samdb.get_config_basedn())
m["msDS-Behavior-Version"]= ldb.MessageElement(
str(new_level_domain), ldb.FLAG_MOD_REPLACE,
"msDS-Behavior-Version")
try:
samdb.modify(m)
except ldb.LdbError, (enum, emsg):
if enum != ldb.ERR_UNWILLING_TO_PERFORM:
raise
level_domain = new_level_domain
msgs.append("Domain function level changed!")
if forest_level is not None:
if forest_level == "2003":
new_level_forest = DS_DOMAIN_FUNCTION_2003
elif forest_level == "2008":
new_level_forest = DS_DOMAIN_FUNCTION_2008
elif forest_level == "2008_R2":
new_level_forest = DS_DOMAIN_FUNCTION_2008_R2
if new_level_forest <= level_forest:
raise CommandError("Forest function level can't be smaller than or equal to the actual one!")
if new_level_forest > level_domain:
raise CommandError("Forest function level can't be higher than the domain function level(s). Please raise it/them first!")
m = ldb.Message()
m.dn = ldb.Dn(samdb, "CN=Partitions,%s" % samdb.get_config_basedn())
m["msDS-Behavior-Version"]= ldb.MessageElement(
str(new_level_forest), ldb.FLAG_MOD_REPLACE,
"msDS-Behavior-Version")
samdb.modify(m)
msgs.append("Forest function level changed!")
msgs.append("All changes applied successfully!")
self.message("\n".join(msgs))
else:
raise CommandError("invalid argument: '%s' (choose from 'show', 'raise')" % subcommand)
|
KeyError
|
dataset/ETHPy150Open byt3bl33d3r/pth-toolkit/lib/python2.7/site-packages/samba/netcmd/domain.py/cmd_domain_level.run
|
4,006 |
def decode_body(content_type, response):
"""Headers: A dict of key value
Content: str
returns files dict filename: body
"""
if not content_type.startswith("multipart/mixed"):
raise ValueError("Invalid content type")
boundary = content_type.split("boundary=")[1]
#charset = _parse_value_in_header(content_type, "charset")
body_content = []
for content in response.iter_content(2 * (1024 ** 2)):
body_content.append(content)
body_content = "".join(body_content)
if not body_content.startswith("--%s" % boundary):
raise ValueError("Invalid body")
files = {}
body_file_chuncks = body_content.split("--%s\n" % boundary)
for body_file_chunk in body_file_chuncks[1:-2]: # First is a \n, last 2 too
headers, file_body = _parse_file_chunk(body_file_chunk)
try:
content_disposition = headers["content-disposition"]
except __HOLE__:
raise ValueError("Invalid file headers, Content-disposition not found")
filename = content_disposition.split('filename="')[1][:-1]
files[filename] = file_body
return files
|
KeyError
|
dataset/ETHPy150Open conan-io/conan/conans/client/rest/multipart_decode.py/decode_body
|
4,007 |
def __init__(self, message, status_code, *args, **kwargs):
super(OsbsResponseException, self).__init__(message, *args, **kwargs)
self.status_code = status_code
# try decoding openshift Status object
# https://docs.openshift.org/latest/rest_api/openshift_v1.html#v1-status
try:
self.json = json.loads(message)
except __HOLE__:
self.json = None
|
ValueError
|
dataset/ETHPy150Open projectatomic/osbs-client/osbs/exceptions.py/OsbsResponseException.__init__
|
4,008 |
def term_width():
"""
Return the column width of the terminal, or None if it can't be determined.
"""
if fcntl and termios:
try:
winsize = fcntl.ioctl(0, termios.TIOCGWINSZ, ' ')
_, width = struct.unpack('hh', winsize)
return width
except __HOLE__:
pass
elif windll and create_string_buffer:
stderr_handle, struct_size = -12, 22
handle = windll.kernel32.GetStdHandle(stderr_handle)
csbi = create_string_buffer(struct_size)
res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
if res:
(_, _, _, _, _, left, _, right, _,
_, _) = struct.unpack('hhhhHhhhhhh', csbi.raw)
return right - left + 1
|
IOError
|
dataset/ETHPy150Open araile/see/see.py/term_width
|
4,009 |
def see(obj=_LOCALS, pattern=None, r=None):
"""
Inspect an object. Like the dir() builtin, but easier on the eyes.
Keyword arguments (all optional):
obj -- object to be inspected
pattern -- shell-style search pattern (e.g. '*len*')
r -- regular expression
If obj is omitted, objects in the current scope are listed instead.
Some unique symbols are used:
.* implements obj.anything
[] implements obj[key]
in implements membership tests (e.g. x in obj)
+obj unary positive operator (e.g. +2)
-obj unary negative operator (e.g. -2)
? raised an exception
"""
use_locals = obj is _LOCALS
actions = []
dot = not use_locals and '.' or ''
name = lambda a, f: ''.join((dot, a, suffix(f)))
def suffix(f):
if isinstance(f, SeeError):
return '?'
elif hasattr(f, '__call__'):
return '()'
else:
return ''
if use_locals:
obj.__dict__ = inspect.currentframe().f_back.f_locals
attrs = dir(obj)
if not use_locals:
for var, symbol in SYMBOLS:
if var not in attrs or symbol in actions:
continue
elif var == '__doc__':
if not obj.__doc__ or not obj.__doc__.strip():
continue
actions.append(symbol)
for attr in filter(lambda a: not a.startswith('_'), attrs):
try:
prop = getattr(obj, attr)
except (__HOLE__, Exception):
prop = SeeError()
actions.append(name(attr, prop))
if pattern is not None:
actions = fn_filter(actions, pattern)
if r is not None:
actions = regex_filter(actions, r)
return _SeeOutput(actions)
|
AttributeError
|
dataset/ETHPy150Open araile/see/see.py/see
|
4,010 |
def test_suite():
suite = unittest.TestSuite()
try:
import cssselect
except __HOLE__:
# no 'cssselect' installed
print("Skipping tests in lxml.cssselect - external cssselect package is not installed")
return suite
import lxml.cssselect
suite.addTests(doctest.DocTestSuite(lxml.cssselect))
suite.addTests([unittest.makeSuite(CSSTestCase)])
return suite
|
ImportError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/lxml-3.3.6/src/lxml/tests/test_css.py/test_suite
|
4,011 |
def _GenerateSection(
self, template_filename, template_mappings, output_writer,
output_filename, access_mode='wb'):
"""Generates a section from template filename.
Args:
template_filename: a string containing the name of the template file.
template_mpppings: a dictionary containing the template mappings, where
the key maps to the name of a template variable.
output_writer: an output writer object (instance of OutputWriter).
output_filename: string containing the name of the output file.
access_mode: optional string containing the output file access mode.
"""
template_string = self._ReadTemplateFile(template_filename)
try:
output_data = template_string.substitute(template_mappings)
except __HOLE__ as exception:
logging.error(u'Unable to format template: {0:s}'.format(
template_filename))
return
output_writer.WriteFile(
output_filename, output_data, access_mode=access_mode)
|
ValueError
|
dataset/ETHPy150Open libyal/libyal/scripts/source-generate.py/SourceFileGenerator._GenerateSection
|
4,012 |
def read(self):
try:
end = self.items.pop()
except __HOLE__:
cmd_title('All items have been read.')
return None
if DEBUG:
print('Reading new item from stream... {}\n'.format(end))
print('[CURRENT STREAM] {}\n'.format(' -- '.join(self.items)))
self.total_read += 1
return end
|
IndexError
|
dataset/ETHPy150Open christabor/MoAL/MOAL/data_structures/abstract/stream.py/Stream.read
|
4,013 |
def _initialize():
global _initialized
def _init_simplejson():
global _decode, _encode
import simplejson
_decode = lambda string, loads=simplejson.loads: loads(string)
_encode = lambda obj, dumps=simplejson.dumps: \
dumps(obj, allow_nan=False, ensure_ascii=False)
def _init_cjson():
global _decode, _encode
import cjson
_decode = lambda string, decode=cjson.decode: decode(string)
_encode = lambda obj, encode=cjson.encode: encode(obj)
def _init_stdlib():
global _decode, _encode
json = __import__('json', {}, {})
def _decode(string_, loads=json.loads):
if isinstance(string_, util.btype):
string_ = string_.decode("utf-8")
return loads(string_)
_encode = lambda obj, dumps=json.dumps: \
dumps(obj, allow_nan=False, ensure_ascii=False)
if _using == 'simplejson':
_init_simplejson()
elif _using == 'cjson':
warnings.warn("Builtin cjson support is deprecated. Please use the "
"default or provide custom decode/encode functions "
"[2011-11-09].",
DeprecationWarning, stacklevel=1)
_init_cjson()
elif _using == 'json':
_init_stdlib()
elif _using != 'custom':
try:
_init_simplejson()
except __HOLE__:
_init_stdlib()
_initialized = True
|
ImportError
|
dataset/ETHPy150Open djc/couchdb-python/couchdb/json.py/_initialize
|
4,014 |
@capabilities.check
def add(self, image_id, image_file, image_size, context=None,
verifier=None):
"""
Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information
about the stored image.
:param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes
:param context: The request context
:param verifier: An object used to verify signatures for images
:retval tuple of URL in backing store, bytes written, checksum
and a dictionary with storage system specific information
:raises `glance_store.exceptions.Duplicate` if the image already
existed
"""
self._check_context(context, require_tenant=True)
client = get_cinderclient(self.conf, context)
checksum = hashlib.md5()
bytes_written = 0
size_gb = int((image_size + units.Gi - 1) / units.Gi)
if size_gb == 0:
size_gb = 1
name = "image-%s" % image_id
owner = context.tenant
metadata = {'glance_image_id': image_id,
'image_size': str(image_size),
'image_owner': owner}
LOG.debug('Creating a new volume: image_size=%d size_gb=%d',
image_size, size_gb)
if image_size == 0:
LOG.info(_LI("Since image size is zero, we will be doing "
"resize-before-write for each GB which "
"will be considerably slower than normal."))
volume = client.volumes.create(size_gb, name=name, metadata=metadata)
volume = self._wait_volume_status(volume, 'creating', 'available')
failed = True
need_extend = True
buf = None
try:
while need_extend:
with self._open_cinder_volume(client, volume, 'wb') as f:
f.seek(bytes_written)
if buf:
f.write(buf)
bytes_written += len(buf)
while True:
buf = image_file.read(self.WRITE_CHUNKSIZE)
if not buf:
need_extend = False
break
checksum.update(buf)
if verifier:
verifier.update(buf)
if (bytes_written + len(buf) > size_gb * units.Gi and
image_size == 0):
break
f.write(buf)
bytes_written += len(buf)
if need_extend:
size_gb += 1
LOG.debug("Extending volume %(volume_id)s to %(size)s GB.",
{'volume_id': volume.id, 'size': size_gb})
volume.extend(volume, size_gb)
try:
volume = self._wait_volume_status(volume,
'extending',
'available')
except exceptions.BackendException:
raise exceptions.StorageFull()
failed = False
except __HOLE__ as e:
# Convert IOError reasons to Glance Store exceptions
errors = {errno.EFBIG: exceptions.StorageFull(),
errno.ENOSPC: exceptions.StorageFull(),
errno.EACCES: exceptions.StorageWriteDenied()}
raise errors.get(e.errno, e)
finally:
if failed:
LOG.error(_LE("Failed to write to volume %(volume_id)s."),
{'volume_id': volume.id})
try:
volume.delete()
except Exception:
LOG.exception(_LE('Failed to delete of volume '
'%(volume_id)s.'),
{'volume_id': volume.id})
if image_size == 0:
metadata.update({'image_size': str(bytes_written)})
volume.update_all_metadata(metadata)
volume.update_readonly_flag(volume, True)
checksum_hex = checksum.hexdigest()
LOG.debug("Wrote %(bytes_written)d bytes to volume %(volume_id)s "
"with checksum %(checksum_hex)s.",
{'bytes_written': bytes_written,
'volume_id': volume.id,
'checksum_hex': checksum_hex})
return ('cinder://%s' % volume.id, bytes_written, checksum_hex, {})
|
IOError
|
dataset/ETHPy150Open openstack/glance_store/glance_store/_drivers/cinder.py/Store.add
|
4,015 |
def _run(self, scanObject, result, depth, args):
'''
Arguments:
unixsocket -- Path to the clamd unix socket (str)
maxbytes -- Maximum number of bytes to scan (0 is unlimited) (int)
Returns:
Flags -- Virus name hits
Metadata -- Unix socket or daemon errors
'''
moduleResult = []
unix_socket = str(get_option(args, 'unixsocket', 'scanclamavunixsocket', '/var/run/clamav/clamd.ctl'))
max_bytes = int(get_option(args, 'maxbytes', 'scanclamavmaxbytes', 20000000))
# Connect to daemon
if not self.clam:
try:
self.clam = pyclamd.ClamdUnixSocket(filename=unix_socket)
except __HOLE__:
logging.debug('IOError: Cannot connect to clamd unix socket file')
scanObject.addMetadata(self.module_name, 'Error', 'IOError: clamd socket')
raise
try:
# Scan the buffer with clamav
if max_bytes <= 0:
clam_result = self.clam.scan_stream(scanObject.buffer)
else:
clam_result = self.clam.scan_stream(str(buffer(scanObject.buffer, 0, max_bytes)))
# Process a result
if clam_result:
status, virusname = clam_result['stream']
scanObject.addFlag("%s:%s" % (self.flag_prefix, str(virusname)))
except ValueError as e:
scanObject.addMetadata(self.module_name, 'Error', 'ValueError (BufferTooLong): %s' % str(e))
except IOError as e:
scanObject.addMetadata(self.module_name, 'Error', 'IOError (ScanError): %s' % str(e))
return moduleResult
|
IOError
|
dataset/ETHPy150Open lmco/laikaboss/laikaboss/modules/scan_clamav.py/SCAN_CLAMAV._run
|
4,016 |
def get_src_parts(self, bundle):
try:
return bundle.src_parts
except __HOLE__:
parts = ['','']
bill = bundle.obj
try:
ps = bill.proposals.order_by('-date')[0]
if ps.content_html:
parts = ps.content_html.split(p_explanation)
except IndexError:
pass
bundle.src_parts = parts
return parts
|
AttributeError
|
dataset/ETHPy150Open ofri/Open-Knesset/laws/api.py/BillResource.get_src_parts
|
4,017 |
def get_int_arg(request, field, default=None):
"""Try to get an integer value from a query arg."""
try:
val = int(request.arguments.get(field, [default])[0])
except (ValueError, __HOLE__):
val = default
return val
|
TypeError
|
dataset/ETHPy150Open YelpArchive/pushmanager/pushmanager/core/util.py/get_int_arg
|
4,018 |
def get_servlet_urlspec(servlet):
try:
return (servlet.regexp, servlet)
except __HOLE__:
name = servlet.__name__
regexp = r"/%s" % name[:-len("Servlet")].lower()
return (regexp, servlet)
|
AttributeError
|
dataset/ETHPy150Open YelpArchive/pushmanager/pushmanager/core/util.py/get_servlet_urlspec
|
4,019 |
def get_data_directory():
FONTBAKERY_DATA_DIRNAME = 'fontbakery'
try:
#Windows code:
d = op.join(os.environ["FONTBAKERY_DATA_DIRNAME"],
FONTBAKERY_DATA_DIRNAME)
except __HOLE__:
#GNU/Linux and Mac code:
d = op.join(op.expanduser("~"),
".local", "share", FONTBAKERY_DATA_DIRNAME)
if not op.exists(d):
os.makedirs(d)
return d
|
KeyError
|
dataset/ETHPy150Open googlefonts/fontbakery/bakery_cli/utils.py/get_data_directory
|
4,020 |
def runtests(*test_args):
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.runner import DiscoverRunner
def run_tests(test_args, verbosity, interactive):
runner = DiscoverRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
if not test_args:
test_args = ['tests']
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except __HOLE__:
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
|
ImportError
|
dataset/ETHPy150Open carljm/django-localeurl/runtests.py/runtests
|
4,021 |
def request_spectra(self):
cmd = struct.pack('<B', 0x09)
self.log_debug('Requesting spectra')
self.usb_send_ep.write(cmd)
data = np.zeros(shape=(3840,), dtype='<u2')
try:
data_lo = self._spec_lo.read(512*4, timeout=100)
data_hi = self._spec_hi.read(512*11, timeout=100)
data_sync = self._spec_hi.read(1, timeout=100)
assert struct.unpack('<B', data_sync)[0] == 0x69
data[:1024], data[1024:] = np.frombuffer(data_lo, dtype='<u2'), \
np.frombuffer(data_hi, dtype='<u2')
except __HOLE__:
self.log_error('Not synchronized')
except VisaIOError:
self.log_error('Timeout on usb')
finally:
self.log_debug('Obtained spectra')
return data
|
AssertionError
|
dataset/ETHPy150Open LabPy/lantz/lantz/drivers/oceanoptics/usb4000.py/USB4000.request_spectra
|
4,022 |
def main():
try:
_setup()
collection_interval = cfg.CONF.garbagecollector.collection_interval
garbage_collector = GarbageCollectorService(collection_interval=collection_interval)
exit_code = garbage_collector.run()
except __HOLE__ as exit_code:
return exit_code
except:
LOG.exception('(PID:%s) GarbageCollector quit due to exception.', os.getpid())
return FAILURE_EXIT_CODE
finally:
_teardown()
return exit_code
|
SystemExit
|
dataset/ETHPy150Open StackStorm/st2/st2reactor/st2reactor/cmd/garbagecollector.py/main
|
4,023 |
def _GetQueryImplementation(name):
"""Returns the implementation for a query type.
@param name: Query type, must be one of L{constants.QR_VIA_OP}
"""
try:
return _QUERY_IMPL[name]
except __HOLE__:
raise errors.OpPrereqError("Unknown query resource '%s'" % name,
errors.ECODE_INVAL)
|
KeyError
|
dataset/ETHPy150Open ganeti/ganeti/lib/cmdlib/query.py/_GetQueryImplementation
|
4,024 |
def is_ipv4(ip):
try:
socket.inet_aton(ip)
except (__HOLE__, socket.error):
return False
return True
|
ValueError
|
dataset/ETHPy150Open abusesa/abusehelper/abusehelper/bots/experts/geoipexpert.py/is_ipv4
|
4,025 |
def load_geodb(path, log=None):
def geoip(reader, ip):
try:
record = reader.city(ip)
except (AddressNotFoundError, __HOLE__):
return {}
if record is None:
return {}
result = {}
geoip_cc = record.country.iso_code
if geoip_cc:
result["geoip cc"] = [geoip_cc]
latitude = record.location.latitude
longitude = record.location.longitude
if latitude and longitude:
result["latitude"] = [unicode(latitude)]
result["longitude"] = [unicode(longitude)]
return result
def legacy_geoip(reader, ip):
if not is_ipv4(ip):
return {}
try:
record = reader.record_by_addr(ip)
except GeoIPError:
return {}
if record is None:
return {}
result = {}
geoip_cc = record.get("country_code", None)
if geoip_cc:
result["geoip cc"] = [geoip_cc]
latitude = record.get("latitude", None)
longitude = record.get("longitude", None)
if latitude and longitude:
result["latitude"] = [unicode(latitude)]
result["longitude"] = [unicode(longitude)]
return result
try:
from geoip2.database import Reader
from maxminddb.errors import InvalidDatabaseError
from geoip2.errors import AddressNotFoundError
try:
reader = Reader(path)
fun = geoip
except InvalidDatabaseError:
raise ImportError
if log:
log.info("GeoIP2 initiated")
except ImportError:
from pygeoip import GeoIP, GeoIPError
reader = GeoIP(path)
fun = legacy_geoip
if log:
log.info("Legacy GeoIP initiated")
def geoip_reader(ip):
return fun(reader, ip)
return geoip_reader
|
ValueError
|
dataset/ETHPy150Open abusesa/abusehelper/abusehelper/bots/experts/geoipexpert.py/load_geodb
|
4,026 |
def tearDown(self):
for recorder in self.top.recorders:
recorder.close()
os.chdir(self.startdir)
if not os.environ.get('OPENMDAO_KEEPDIRS', False):
try:
shutil.rmtree(self.tempdir)
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/casehandlers/test/test_csv_post_processor.py/CSVPostProcessorTestCase.tearDown
|
4,027 |
def generate_and_compare(self, name):
directory = os.path.abspath(os.path.dirname(__file__))
name = os.path.join(directory, name)
cds = CaseDataset(name + '.json', 'json')
data = cds.data.fetch()
caseset_query_to_csv(data, self.filename_csv)
with open(name + '.csv', 'r') as inp1:
expected = inp1.readlines()
with open(self.filename_csv, 'r') as inp2:
actual = inp2.readlines()
# Strip off trailing whitespace (newlines and carriage returns)
# Don't check time-stamp because some OS round it.
for exp, act in zip(expected, actual):
# skip timestamps, and uuids
items2 = act.rstrip().split(",")[1:-3]
for i, item1 in enumerate(exp.rstrip().split(",")[1:-3]):
item2 = items2[i]
try: # (str).isnumeric() only works on unicode
item1, item2 = float(item1), float(item2)
# nan equality check fails by definition
if isnan(item1) and isnan(item2):
continue
self.assertAlmostEqual(item1, item2)
except (ValueError, __HOLE__):
self.assertEqual(item1, item2)
|
TypeError
|
dataset/ETHPy150Open OpenMDAO/OpenMDAO-Framework/openmdao.lib/src/openmdao/lib/casehandlers/test/test_csv_post_processor.py/CSVPostProcessorTestCase.generate_and_compare
|
4,028 |
@permission_required("core.manage_shop")
def manage_accessories_inline(request, product_id, as_string=False, template_name="manage/product/accessories_inline.html"):
"""View which shows all accessories for the product with the passed id.
"""
product = Product.objects.get(pk=product_id)
product_accessories = ProductAccessories.objects.filter(product=product_id)
accessory_ids = [p.accessory.id for p in product_accessories]
r = request.POST if request.method == 'POST' else request.GET
s = request.session
# If we get the parameter ``keep-filters`` or ``page`` we take the
# filters out of the request resp. session. The request takes precedence.
# The page parameter is given if the user clicks on the next/previous page
# links. The ``keep-filters`` parameters is given is the users adds/removes
# products. In this way we keeps the current filters when we needed to. If
# the whole page is reloaded there is no ``keep-filters`` or ``page`` and
# all filters are reset as they should.
if r.get("keep-filters") or r.get("page"):
page = r.get("page", s.get("accessories_page", 1))
filter_ = r.get("filter", s.get("filter"))
category_filter = r.get("accessories_category_filter",
s.get("accessories_category_filter"))
else:
page = r.get("page", 1)
filter_ = r.get("filter")
category_filter = r.get("accessories_category_filter")
# The current filters are saved in any case for later use.
s["accessories_page"] = page
s["filter"] = filter_
s["accessories_category_filter"] = category_filter
try:
s["accessories-amount"] = int(r.get("accessories-amount",
s.get("accessories-amount")))
except __HOLE__:
s["accessories-amount"] = 25
filters = Q()
if filter_:
filters &= Q(name__icontains=filter_)
filters |= Q(sku__icontains=filter_)
filters |= (Q(sub_type=VARIANT) & Q(active_sku=False) & Q(parent__sku__icontains=filter_))
filters |= (Q(sub_type=VARIANT) & Q(active_name=False) & Q(parent__name__icontains=filter_))
if category_filter:
if category_filter == "None":
filters &= Q(categories=None)
elif category_filter == "All":
pass
else:
# First we collect all sub categories and using the `in` operator
category = lfs_get_object_or_404(Category, pk=category_filter)
categories = [category]
categories.extend(category.get_all_children())
filters &= Q(categories__in=categories)
products = Product.objects.filter(filters).exclude(pk=product_id)
paginator = Paginator(products.exclude(pk__in=accessory_ids), s["accessories-amount"])
try:
page = paginator.page(page)
except EmptyPage:
page = 0
result = render_to_string(template_name, RequestContext(request, {
"product": product,
"product_accessories": product_accessories,
"page": page,
"paginator": paginator,
"filter": filter_,
}))
if as_string:
return result
else:
return HttpResponse(
json.dumps({
"html": [["#accessories-inline", result]],
}), content_type='application/json')
# Actions
|
TypeError
|
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/product/accessories.py/manage_accessories_inline
|
4,029 |
def decode(self, encoded_packet):
"""Decode a transmitted package.
The return value indicates how many binary attachment packets are
necessary to fully decode the packet.
"""
ep = encoded_packet
try:
self.packet_type = int(ep[0:1])
except __HOLE__:
self.packet_type = ep
ep = ''
self.namespace = None
self.data = None
ep = ep[1:]
dash = (ep + '-').find('-')
comma = (ep + ',').find(',')
attachment_count = 0
if dash < comma:
attachment_count = int(ep[0:dash])
ep = ep[dash + 1:]
if ep and ep[0:1] == '/':
sep = ep.find(',')
if sep == -1:
self.namespace = ep
ep = ''
else:
self.namespace = ep[0:sep]
ep = ep[sep + 1:]
if ep and ep[0].isdigit():
self.id = 0
while ep[0].isdigit():
self.id = self.id * 10 + int(ep[0])
ep = ep[1:]
if ep:
self.data = self.json.loads(ep)
return attachment_count
|
TypeError
|
dataset/ETHPy150Open miguelgrinberg/python-socketio/socketio/packet.py/Packet.decode
|
4,030 |
def generate_tokens(readline):
"""
The generate_tokens() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string. Alternately, readline
can be a callable function terminating with StopIteration:
readline = open(myfile).next # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
"""
lnum = parenlev = continued = 0
namechars, numchars = string.ascii_letters + '_', '0123456789'
contstr, needcont = '', 0
contline = None
indents = [0]
while 1: # loop over lines in stream
try:
line = readline()
except __HOLE__:
line = ''
lnum = lnum + 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError, ("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
yield (STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield (ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ': column = column + 1
elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
elif line[pos] == '\f': column = 0
else: break
pos = pos + 1
if pos == max: break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token)
yield (COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
yield (NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line)
else:
yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level",
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = pseudoprog.match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
token, initial = line[start:end], line[start]
if initial in numchars or \
(initial == '.' and token != '.'): # ordinary number
yield (NUMBER, token, spos, epos, line)
elif initial in '\r\n':
yield (NL if parenlev > 0 else NEWLINE,
token, spos, epos, line)
elif initial == '#':
assert not token.endswith("\n")
yield (COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = endprogs[token]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
yield (STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = (endprogs[initial] or endprogs[token[1]] or
endprogs[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
yield (STRING, token, spos, epos, line)
elif initial in namechars: # ordinary name
yield (NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
if initial in '([{': parenlev = parenlev + 1
elif initial in ')]}': parenlev = parenlev - 1
yield (OP, token, spos, epos, line)
else:
yield (ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos = pos + 1
for indent in indents[1:]: # pop remaining indent levels
yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
|
StopIteration
|
dataset/ETHPy150Open Southpaw-TACTIC/TACTIC/src/context/client/tactic-api-python-4.0.api04/Lib/tokenize.py/generate_tokens
|
4,031 |
def check_exists(fips_dir) :
try :
subprocess.check_output(['python2', '--version'], stderr=subprocess.STDOUT)
return True
except (__HOLE__, subprocess.CalledProcessError) :
return False
|
OSError
|
dataset/ETHPy150Open floooh/fips/mod/tools/python2.py/check_exists
|
4,032 |
def start(self):
self.log.info("Refreshing existing files.")
for root, dirs, files in os.walk(self.data_dir):
for filename in files:
self._maybe_queue_file(os.path.join(root, filename),
enqueue=not(self.ignore_existing))
# watch if configured
if self.ignore_changes:
return
observer = observers.Observer()
observer.schedule(self, path=self.data_dir, recursive=True)
self.log.info("Watching for new file under %s", self.data_dir)
observer.start()
try:
while True:
time.sleep(1)
except __HOLE__:
observer.stop()
observer.join(timeout=30)
if observer.isAlive():
self.log.error("Watchdog Observer failed to stop. Aborting.")
os.kill(os.getpid(), signal.SIGKILL)
return
|
KeyboardInterrupt
|
dataset/ETHPy150Open amorton/cassback/cassback/subcommands/backup_subcommand.py/WatchdogWatcher.start
|
4,033 |
def get_fields(obj):
try:
return obj._meta.fields
except __HOLE__:
return []
|
AttributeError
|
dataset/ETHPy150Open davedash/django-fixture-magic/fixture_magic/utils.py/get_fields
|
4,034 |
def create(self, req, body):
"""Creates an aggregate, given its name and
optional availability zone.
"""
context = _get_context(req)
authorize(context)
if len(body) != 1:
raise exc.HTTPBadRequest()
try:
host_aggregate = body["aggregate"]
name = host_aggregate["name"]
except __HOLE__:
raise exc.HTTPBadRequest()
avail_zone = host_aggregate.get("availability_zone")
try:
utils.check_string_length(name, "Aggregate name", 1, 255)
if avail_zone is not None:
utils.check_string_length(avail_zone, "Availability_zone", 1,
255)
except exception.InvalidInput as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
try:
aggregate = self.api.create_aggregate(context, name, avail_zone)
except exception.AggregateNameExists as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InvalidAggregateAction as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
agg = self._marshall_aggregate(aggregate)
# To maintain the same API result as before the changes for returning
# nova objects were made.
del agg['aggregate']['hosts']
del agg['aggregate']['metadata']
return agg
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/aggregates.py/AggregateController.create
|
4,035 |
def update(self, req, id, body):
"""Updates the name and/or availability_zone of given aggregate."""
context = _get_context(req)
authorize(context)
if len(body) != 1:
raise exc.HTTPBadRequest()
try:
updates = body["aggregate"]
except __HOLE__:
raise exc.HTTPBadRequest()
if len(updates) < 1:
raise exc.HTTPBadRequest()
for key in updates.keys():
if key not in ["name", "availability_zone"]:
raise exc.HTTPBadRequest()
try:
if 'name' in updates:
utils.check_string_length(updates['name'], "Aggregate name", 1,
255)
if updates.get("availability_zone") is not None:
utils.check_string_length(updates['availability_zone'],
"Availability_zone", 1, 255)
except exception.InvalidInput as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
try:
aggregate = self.api.update_aggregate(context, id, updates)
except exception.AggregateNameExists as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.AggregateNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
except exception.InvalidAggregateAction as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
return self._marshall_aggregate(aggregate)
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/aggregates.py/AggregateController.update
|
4,036 |
def _set_metadata(self, req, id, body):
"""Replaces the aggregate's existing metadata with new metadata."""
context = _get_context(req)
authorize(context)
if len(body) != 1:
raise exc.HTTPBadRequest()
try:
metadata = body["metadata"]
except __HOLE__:
raise exc.HTTPBadRequest()
# The metadata should be a dict
if not isinstance(metadata, dict):
msg = _('The value of metadata must be a dict')
raise exc.HTTPBadRequest(explanation=msg)
try:
for key, value in metadata.items():
utils.check_string_length(key, "metadata.key", 1, 255)
if value is not None:
utils.check_string_length(value, "metadata.value", 0, 255)
except exception.InvalidInput as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
try:
aggregate = self.api.update_aggregate_metadata(context,
id, metadata)
except exception.AggregateNotFound:
msg = _('Cannot set metadata %(metadata)s in aggregate'
' %(id)s') % {'metadata': metadata, 'id': id}
raise exc.HTTPNotFound(explanation=msg)
except exception.InvalidAggregateAction as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
return self._marshall_aggregate(aggregate)
|
KeyError
|
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/legacy_v2/contrib/aggregates.py/AggregateController._set_metadata
|
4,037 |
def _doClone(self, source, cloneSource, branch, repo, dbLog):
args = ('git', 'ls-remote', cloneSource, branch)
output = check_output(args, cwd=repo, stderr=DEVNULL)
if len(output) == 0:
syslog(LOG_WARNING, "No branch '%s' in '%s'" % (branch, source))
return []
# For Git versions <1.9 clone of a shallow repository will fail, so
# check the git version and clone the whole repository if it is <1.9
gitVersOut = check_output(('git', '--version'), stderr=DEVNULL).decode('utf-8')
if gitVersOut[:12] == "git version ":
gitVers = LooseVersion(gitVersOut[12:])
else:
gitVers = LooseVersion("0")
if len(dbLog) == 0 or gitVers < LooseVersion("1.9"):
depth = None
args = ('git', 'clone', '-q', '-b', branch, cloneSource, repo)
else:
output = output.decode('utf-8')
if output[:40] == dbLog[-1].hash:
return []
depth = 5
args = ('git', 'clone', '-q', '--depth', str(depth), '-b', branch, cloneSource, repo)
lastGitLogLen = 0
while True:
check_call(args, cwd=repo, stdout=DEVNULL, stderr=DEVNULL)
try:
return self._catchUp(repo, branch, dbLog, source, lastGitLogLen)
except NewCommitsNotReachedError as e:
try:
lastGitLogLen = e.gitLogLen
depth += 10
args = ('git', 'fetch', '--depth', str(depth))
except __HOLE__:
return self._catchUp(repo, branch, [], source, lastGitLogLen)
|
TypeError
|
dataset/ETHPy150Open seoester/Git-Deployment-Handler/gitdh/modules/external.py/External._doClone
|
4,038 |
def __enter__(self):
matchObj = SSHEnvironment.gitSshUrlPattern.match(self.source)
if matchObj is None:
self.isSshUrl = False
return None
self.isSshUrl = True
sshUser = matchObj.group('user')
sshHost = matchObj.group('host')
sshPort = matchObj.group('port')
sshRepositoryPath = matchObj.group('repositoryPath')
if sshPort is None:
sshPort = '22'
tmpHostName = generateRandomString(20)
sshConf = "\n\nHost {0}\n\tHostName {1}\n\tUser {2}\n\tPort {3}\n\tStrictHostKeyChecking no\n".format(tmpHostName, sshHost, sshUser, sshPort)
try:
sshConf += "\tIdentityFile {0}\n".format(self.config['Git']['IdentityFile'])
except KeyError:
pass
cloneSource = 'ssh://{0}/{1}'.format(tmpHostName, sshRepositoryPath)
sshConfigDirPath = os.path.join(os.path.expanduser('~'), '.ssh')
sshConfigFilePath = os.path.join(sshConfigDirPath, 'config')
if not os.path.exists(sshConfigDirPath):
try:
os.mkdir(sshConfigDirPath, mode=0o700)
except __HOLE__:
# < Python 3.3 compatibility
os.mkdir(sshConfigDirPath, 0o700)
if not os.path.exists(sshConfigFilePath):
with open(sshConfigFilePath, 'w'):
pass
os.chmod(sshConfigFilePath, 0o600)
self.sshOrigConfigFile = TmpOrigFile(sshConfigFilePath, postfix="gitdh")
self.sshOrigConfigFile.create()
with open(sshConfigFilePath, 'a') as sshConfigFileObj:
sshConfigFileObj.write(sshConf)
return cloneSource
|
TypeError
|
dataset/ETHPy150Open seoester/Git-Deployment-Handler/gitdh/modules/external.py/SSHEnvironment.__enter__
|
4,039 |
def _getFileNums(self):
base = self.base
confFileNums = []
for f in os.listdir(self.dir):
if f[:len(base)] == base:
try:
confFileNums.append(int(f[len(base):]))
except __HOLE__:
pass
confFileNums.sort()
return confFileNums
|
ValueError
|
dataset/ETHPy150Open seoester/Git-Deployment-Handler/gitdh/modules/external.py/TmpOrigFile._getFileNums
|
4,040 |
def run(self):
s = self.source
t = self.target
cs = self.chunk_size
ccm = self.chunk_count_max
kr = self.keep_reading
da = self.data_added
go = self.go
try:
b = s.read(cs)
except __HOLE__:
b = ''
while b and go.is_set():
if len(t) > ccm:
kr.clear()
kr.wait(2)
# # this only works on 2.7.x and up
# if not kr.wait(10):
# raise Exception("Timed out while waiting for input to be read.")
# instead we'll use this
if len(t) > ccm + 3:
raise IOError("Timed out while waiting for input from subprocess.")
t.append(b)
da.set()
b = s.read(cs)
self.EOF.set()
da.set() # for cases when done but there was no input.
|
ValueError
|
dataset/ETHPy150Open codeinn/vcs/vcs/subprocessio.py/InputStreamChunker.run
|
4,041 |
def close(self):
try:
self.worker.stop()
self.throw(GeneratorExit)
except (GeneratorExit, __HOLE__):
pass
|
StopIteration
|
dataset/ETHPy150Open codeinn/vcs/vcs/subprocessio.py/BufferedGenerator.close
|
4,042 |
def __exists(self, path):
"""Return True if the remote path exists
"""
try:
self.__sftp.stat(path)
except __HOLE__, e:
if e.errno == errno.ENOENT:
return False
raise
else:
return True
|
IOError
|
dataset/ETHPy150Open saga-project/BigJob/pilot/filemanagement/globusonline_adaptor.py/GlobusOnlineFileAdaptor.__exists
|
4,043 |
def path(self, name):
try:
path = safe_join(getattr(settings, 'MEDIA_ROOT'), name)
except __HOLE__:
raise SuspiciousOperation(
"Attempted access to '%s' denied." % name)
return os.path.normpath(path)
|
ValueError
|
dataset/ETHPy150Open treeio/treeio/treeio/documents/files.py/FileStorage.path
|
4,044 |
def _edit_task_config(env, task_config, confirm):
""" Launches text editor to edit provided task configuration file.
`env`
Runtime ``Environment`` instance.
`task_config`
Path to task configuration file.
`confirm`
If task config is invalid after edit, prompt to re-edit.
Return boolean.
* Raises ``InvalidTaskConfig`` if edited task config fails to parse
and `confirm` is ``False``.
"""
# get editor program
if common.IS_MACOSX:
def_editor = 'open'
else:
def_editor = 'vi'
editor = os.environ.get('EDITOR', def_editor)
def _edit_file(filename):
""" Launches editor for given filename.
"""
proc = subprocess.Popen('{0} {1}'.format(editor, filename),
shell=True)
proc.communicate()
if proc.returncode == 0:
try:
# parse temp configuration file
parser_ = parser.parse_config(filename, 'task')
registration.run_option_hooks(parser_,
disable_missing=False)
except (parser.ParseError, errors.InvalidTaskConfig) as exc:
reason = unicode(getattr(exc, 'reason', exc))
raise errors.InvalidTaskConfig(task_config, reason=reason)
return True
else:
return False
try:
# create temp copy of task config
fd, tmpname = tempfile.mkstemp(suffix='.cfg', prefix='focus_')
with open(task_config, 'r') as file_:
os.write(fd, file_.read())
os.close(fd)
while True:
try:
# launch editor
if not _edit_file(tmpname):
return False
# overwrite original with temp
with open(tmpname, 'r') as temp:
with open(task_config, 'w', 0) as config:
config.write(temp.read())
return True
except errors.InvalidTaskConfig as exc:
if not confirm:
raise # reraise
# prompt to re-edit
env.io.error(unicode(exc))
while True:
try:
resp = env.io.prompt('Would you like to retry? (y/n) ')
resp = resp.strip().lower()
except __HOLE__:
return True
if resp == 'y':
break
elif resp == 'n':
return True
except OSError:
return False
finally:
common.safe_remove_file(tmpname) # cleanup temp
|
KeyboardInterrupt
|
dataset/ETHPy150Open xtrementl/focus/focus/plugin/modules/tasks.py/_edit_task_config
|
4,045 |
def execute(self, env, args):
""" Removes a task.
`env`
Runtime ``Environment`` instance.
`args`
Arguments object from arg parser.
"""
# extract args
task_name = args.task_name
force = args.force
if env.task.active and env.task.name == task_name:
raise errors.ActiveTask
if not env.task.exists(task_name):
raise errors.TaskNotFound(task_name)
if force:
env.task.remove(task_name)
else:
try:
while True:
prompt = ('Are you sure you want to delete "{0}" (y/n)? '
.format(task_name))
resp = env.io.prompt(prompt, newline=False).lower()
if resp in ('y', 'n'):
if resp == 'y':
env.task.remove(task_name)
break
except __HOLE__:
pass
|
KeyboardInterrupt
|
dataset/ETHPy150Open xtrementl/focus/focus/plugin/modules/tasks.py/TaskRemove.execute
|
4,046 |
def get(self, key, default=None):
"""Returns an item from the template context, if it doesn't exist
`default` is returned.
"""
try:
return self[key]
except __HOLE__:
return default
|
KeyError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/jinja2-2.6/jinja2/runtime.py/Context.get
|
4,047 |
@internalcode
def call(__self, __obj, *args, **kwargs):
"""Call the callable with the arguments and keyword arguments
provided but inject the active context or environment as first
argument if the callable is a :func:`contextfunction` or
:func:`environmentfunction`.
"""
if __debug__:
__traceback_hide__ = True
if isinstance(__obj, _context_function_types):
if getattr(__obj, 'contextfunction', 0):
args = (__self,) + args
elif getattr(__obj, 'evalcontextfunction', 0):
args = (__self.eval_ctx,) + args
elif getattr(__obj, 'environmentfunction', 0):
args = (__self.environment,) + args
try:
return __obj(*args, **kwargs)
except __HOLE__:
return __self.environment.undefined('value was undefined because '
'a callable raised a '
'StopIteration exception')
|
StopIteration
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/jinja2-2.6/jinja2/runtime.py/Context.call
|
4,048 |
def __init__(self, iterable, recurse=None):
self._iterator = iter(iterable)
self._recurse = recurse
self.index0 = -1
# try to get the length of the iterable early. This must be done
# here because there are some broken iterators around where there
# __len__ is the number of iterations left (i'm looking at your
# listreverseiterator!).
try:
self._length = len(iterable)
except (__HOLE__, AttributeError):
self._length = None
|
TypeError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/jinja2-2.6/jinja2/runtime.py/LoopContext.__init__
|
4,049 |
@internalcode
def __call__(self, *args, **kwargs):
# try to consume the positional arguments
arguments = list(args[:self._argument_count])
off = len(arguments)
# if the number of arguments consumed is not the number of
# arguments expected we start filling in keyword arguments
# and defaults.
if off != self._argument_count:
for idx, name in enumerate(self.arguments[len(arguments):]):
try:
value = kwargs.pop(name)
except KeyError:
try:
value = self.defaults[idx - self._argument_count + off]
except __HOLE__:
value = self._environment.undefined(
'parameter %r was not provided' % name, name=name)
arguments.append(value)
# it's important that the order of these arguments does not change
# if not also changed in the compiler's `function_scoping` method.
# the order is caller, keyword arguments, positional arguments!
if self.caller:
caller = kwargs.pop('caller', None)
if caller is None:
caller = self._environment.undefined('No caller defined',
name='caller')
arguments.append(caller)
if self.catch_kwargs:
arguments.append(kwargs)
elif kwargs:
raise TypeError('macro %r takes no keyword argument %r' %
(self.name, next(iter(kwargs))))
if self.catch_varargs:
arguments.append(args[self._argument_count:])
elif len(args) > self._argument_count:
raise TypeError('macro %r takes not more than %d argument(s)' %
(self.name, len(self.arguments)))
return self._func(*arguments)
|
IndexError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/jinja2-2.6/jinja2/runtime.py/Macro.__call__
|
4,050 |
def restore(self, fileContents, root, target, journal=None, sha1 = None,
nameLookup=True, **kwargs):
keepTempfile = kwargs.get('keepTempfile', False)
destTarget = target
if fileContents is not None:
# this is first to let us copy the contents of a file
# onto itself; the unlink helps that to work
src = fileContents.get()
inFd = None
if fileContents.isCompressed() and hasattr(src, '_fdInfo'):
# inFd is None if we can't figure this information out
# (for _LazyFile for instance)
(inFd, inStart, inSize) = src._fdInfo()
path, name = os.path.split(target)
if not os.path.isdir(path):
util.mkdirChain(path)
# Uncompress to a temporary file, using the accelerated
# implementation if possible.
if inFd is not None and util.sha1Uncompress is not None:
actualSha1, tmpname = util.sha1Uncompress(
inFd, inStart, inSize, path, name)
else:
if fileContents.isCompressed():
src = gzip.GzipFile(mode='r', fileobj=src)
tmpfd, tmpname = tempfile.mkstemp(name, '.ct', path)
try:
d = digestlib.sha1()
f = os.fdopen(tmpfd, 'w')
util.copyfileobj(src, f, digest = d)
f.close()
actualSha1 = d.digest()
except:
os.unlink(tmpname)
raise
if keepTempfile:
# Make a hardlink "copy" for the caller to use
destTarget = tmpname + '.ptr'
os.link(tmpname, destTarget)
try:
os.rename(tmpname, target)
except __HOLE__, err:
if err.args[0] != errno.EISDIR:
raise
os.rmdir(target)
os.rename(tmpname, target)
if (sha1 is not None and sha1 != actualSha1):
raise Sha1Exception(target)
File.restore(self, root, target, journal=journal,
nameLookup=nameLookup, **kwargs)
return destTarget
|
OSError
|
dataset/ETHPy150Open sassoftware/conary/conary/files.py/RegularFile.restore
|
4,051 |
def FileFromFilesystem(path, pathId, possibleMatch = None, inodeInfo = False,
assumeRoot=False, statBuf=None, sha1FailOk=False):
if statBuf:
s = statBuf
else:
s = os.lstat(path)
global userCache, groupCache, _havePrelink
if assumeRoot:
owner = 'root'
group = 'root'
elif isinstance(s.st_uid, basestring):
# Already stringified -- some capsule code will fabricate a stat result
# from e.g. a RPM header
owner = s.st_uid
group = s.st_gid
else:
# + is not a valid char in user/group names; if the uid is not mapped
# to a user, prepend it with + and store it as a string
try:
owner = userCache.lookupId('/', s.st_uid)
except KeyError:
owner = '+%d' % s.st_uid
try:
group = groupCache.lookupId('/', s.st_gid)
except __HOLE__:
group = '+%d' % s.st_gid
needsSha1 = 0
inode = InodeStream(s.st_mode & 07777, s.st_mtime, owner, group)
if (stat.S_ISREG(s.st_mode)):
f = RegularFile(pathId)
needsSha1 = 1
elif (stat.S_ISLNK(s.st_mode)):
f = SymbolicLink(pathId)
if hasattr(s, 'linkto'):
f.target.set(s.linkto)
else:
f.target.set(os.readlink(path))
elif (stat.S_ISDIR(s.st_mode)):
f = Directory(pathId)
elif (stat.S_ISSOCK(s.st_mode)):
f = Socket(pathId)
elif (stat.S_ISFIFO(s.st_mode)):
f = NamedPipe(pathId)
elif (stat.S_ISBLK(s.st_mode)):
f = BlockDevice(pathId)
f.devt.major.set(s.st_rdev >> 8)
f.devt.minor.set(s.st_rdev & 0xff)
elif (stat.S_ISCHR(s.st_mode)):
f = CharacterDevice(pathId)
f.devt.major.set(s.st_rdev >> 8)
f.devt.minor.set(s.st_rdev & 0xff)
else:
raise FilesError("unsupported file type for %s" % path)
f.inode = inode
f.flags = FlagsStream(0)
# assume we have a match if the FileMode and object type match
if possibleMatch and (possibleMatch.__class__ == f.__class__) \
and f.inode == possibleMatch.inode \
and f.inode.mtime() == possibleMatch.inode.mtime() \
and (not s.st_size or
(possibleMatch.hasContents and
s.st_size == possibleMatch.contents.size())):
f.flags.set(possibleMatch.flags())
return possibleMatch
elif (possibleMatch and (isinstance(f, RegularFile) and
isinstance(possibleMatch, RegularFile))
and (f.inode.isExecutable())
and f.inode.mtime() == possibleMatch.inode.mtime()
and f.inode.owner == possibleMatch.inode.owner
and f.inode.group == possibleMatch.inode.group
and f.inode.perms == possibleMatch.inode.perms):
# executable RegularFiles match even if there sizes are different
# as long as everything else is the same; this is to stop size
# changes from prelink from changing fileids
return possibleMatch
if needsSha1:
f.contents = RegularFileStream()
undoPrelink = False
if _havePrelink != False and f.inode.isExecutable():
try:
from conary.lib import elf
if elf.prelinked(path):
undoPrelink = True
except:
pass
if undoPrelink and _havePrelink is None:
_havePrelink = bool(os.access(PRELINK_CMD[0], os.X_OK))
if undoPrelink and _havePrelink:
prelink = subprocess.Popen(
PRELINK_CMD + ('-uo', '-', path),
stdout = subprocess.PIPE,
close_fds = True,
shell = False)
d = digestlib.sha1()
content = prelink.stdout.read()
size = 0
while content:
d.update(content)
size += len(content)
content = prelink.stdout.read()
prelink.wait()
f.contents.size.set(size)
sha1 = d.digest()
else:
try:
sha1 = sha1helper.sha1FileBin(path)
except OSError:
if sha1FailOk:
sha1 = sha1helper.sha1Empty
else:
raise
f.contents.size.set(s.st_size)
f.contents.sha1.set(sha1)
if inodeInfo:
return (f, s.st_nlink, (s.st_rdev, s.st_ino))
return f
|
KeyError
|
dataset/ETHPy150Open sassoftware/conary/conary/files.py/FileFromFilesystem
|
4,052 |
def lookupName(self, root, name):
theId = self.nameCache.get(name, None)
if theId is not None:
return theId
# if not root, cannot chroot and so fall back to system ids
getChrootIds = root and root != '/' and not os.getuid()
if getChrootIds:
if root[0] != '/':
root = os.sep.join((os.getcwd(), root))
curDir = os.open(".", os.O_RDONLY)
# chdir to the current root to allow us to chroot
# back out again
os.chdir('/')
os.chroot(root)
if name and name[0] == '+':
# An id mapped as a string
try:
theId = int(name)
except __HOLE__:
log.warning('%s %s does not exist - using root', self.name,
name)
else:
try:
theId = self.nameLookupFn(name)[2]
except KeyError:
log.warning('%s %s does not exist - using root', self.name, name)
theId = 0
if getChrootIds:
os.chroot(".")
os.fchdir(curDir)
os.close(curDir)
self.nameCache[name] = theId
self.idCache[theId] = name
return theId
|
ValueError
|
dataset/ETHPy150Open sassoftware/conary/conary/files.py/UserGroupIdCache.lookupName
|
4,053 |
@classmethod
def upload_app(cls, options):
"""Uploads the given App Engine application into AppScale.
Args:
options: A Namespace that has fields for each parameter that can be
passed in via the command-line interface.
Returns:
A tuple containing the host and port where the application is serving
traffic from.
"""
if cls.TAR_GZ_REGEX.search(options.file):
file_location = LocalState.extract_tgz_app_to_dir(options.file,
options.verbose)
created_dir = True
elif cls.ZIP_REGEX.search(options.file):
file_location = LocalState.extract_zip_app_to_dir(options.file,
options.verbose)
created_dir = True
elif os.path.isdir(options.file):
file_location = options.file
created_dir = False
else:
raise AppEngineConfigException('{0} is not a tar.gz file, a zip file, ' \
'or a directory. Please try uploading either a tar.gz file, a zip ' \
'file, or a directory.'.format(options.file))
try:
app_id = AppEngineHelper.get_app_id_from_app_config(file_location)
except AppEngineConfigException as config_error:
AppScaleLogger.log(config_error)
if 'yaml' in str(config_error):
raise config_error
# Java App Engine users may have specified their war directory. In that
# case, just move up one level, back to the app's directory.
file_location = file_location + os.sep + ".."
app_id = AppEngineHelper.get_app_id_from_app_config(file_location)
app_language = AppEngineHelper.get_app_runtime_from_app_config(
file_location)
AppEngineHelper.validate_app_id(app_id)
if app_language == 'java':
if AppEngineHelper.is_sdk_mismatch(file_location):
AppScaleLogger.warn('AppScale did not find the correct SDK jar ' +
'versions in your app. The current supported ' +
'SDK version is ' + AppEngineHelper.SUPPORTED_SDK_VERSION + '.')
login_host = LocalState.get_login_host(options.keyname)
secret_key = LocalState.get_secret_key(options.keyname)
acc = AppControllerClient(login_host, secret_key)
if options.test:
username = LocalState.DEFAULT_USER
elif options.email:
username = options.email
else:
username = LocalState.get_username_from_stdin(is_admin=False)
if not acc.does_user_exist(username):
password = LocalState.get_password_from_stdin()
RemoteHelper.create_user_accounts(username, password,
login_host, options.keyname, clear_datastore=False)
app_exists = acc.does_app_exist(app_id)
app_admin = acc.get_app_admin(app_id)
if app_admin is not None and username != app_admin:
raise AppScaleException("The given user doesn't own this application" + \
", so they can't upload an app with that application ID. Please " + \
"change the application ID and try again.")
if app_exists:
AppScaleLogger.log("Uploading new version of app {0}".format(app_id))
else:
AppScaleLogger.log("Uploading initial version of app {0}".format(app_id))
acc.reserve_app_id(username, app_id, app_language)
# Ignore all .pyc files while tarring.
if app_language == 'python27':
AppScaleLogger.log("Ignoring .pyc files")
remote_file_path = RemoteHelper.copy_app_to_host(file_location,
options.keyname, options.verbose)
acc.done_uploading(app_id, remote_file_path)
acc.update([app_id])
# now that we've told the AppController to start our app, find out what port
# the app is running on and wait for it to start serving
AppScaleLogger.log("Please wait for your app to start serving.")
if app_exists:
time.sleep(20) # give the AppController time to restart the app
# Makes a call to the AppController to get all the stats and looks
# through them for the http port the app can be reached on.
sleep_time = 2 * cls.SLEEP_TIME
current_app = None
for i in range(cls.MAX_RETRIES):
try:
result = acc.get_all_stats()
json_result = json.loads(result)
apps_result = json_result['apps']
current_app = apps_result[app_id]
http_port = current_app['http']
break
except ValueError:
pass
except __HOLE__:
pass
AppScaleLogger.verbose("Waiting {0} second(s) for a port to be assigned to {1}".\
format(sleep_time, app_id), options.verbose)
time.sleep(sleep_time)
if not current_app:
raise AppScaleException("Unable to get the serving port for the application.")
RemoteHelper.sleep_until_port_is_open(login_host, http_port, options.verbose)
AppScaleLogger.success("Your app can be reached at the following URL: " +
"http://{0}:{1}".format(login_host, http_port))
if created_dir:
shutil.rmtree(file_location)
return (login_host, http_port)
|
KeyError
|
dataset/ETHPy150Open AppScale/appscale-tools/lib/appscale_tools.py/AppScaleTools.upload_app
|
4,054 |
def determine_clipboard():
# Determine the OS/platform and set the copy() and paste() functions accordingly.
if 'cygwin' in platform.system().lower():
return init_windows_clipboard(cygwin=True)
if os.name == 'nt' or platform.system() == 'Windows':
return init_windows_clipboard()
if os.name == 'mac' or platform.system() == 'Darwin':
return init_osx_clipboard()
if HAS_DISPLAY:
# Determine which command/module is installed, if any.
try:
import gtk # check if gtk is installed
except __HOLE__:
pass
else:
return init_gtk_clipboard()
try:
import PyQt4 # check if PyQt4 is installed
except ImportError:
pass
else:
return init_qt_clipboard()
if _executable_exists("xclip"):
return init_xclip_clipboard()
if _executable_exists("xsel"):
return init_xsel_clipboard()
if _executable_exists("klipper") and _executable_exists("qdbus"):
return init_klipper_clipboard()
return init_no_clipboard()
|
ImportError
|
dataset/ETHPy150Open BergWerkGIS/QGIS-CKAN-Browser/CKAN-Browser/pyperclip/__init__.py/determine_clipboard
|
4,055 |
def getCurrentEntry(self):
try:
return self.ordering[self.currentIndex]
except __HOLE__:
# This happens when the diff is empty
raise NoCurrentEntryError()
|
IndexError
|
dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/__init__.py/Review.getCurrentEntry
|
4,056 |
def expandCommitName(self, name):
# Split apart the commit name from any suffix
commit_name, suffix = git.commit.split_rev_name(name)
try:
real_commit = self.commitAliases[commit_name]
except __HOLE__:
real_commit = commit_name
return real_commit + suffix
|
KeyError
|
dataset/ETHPy150Open facebookarchive/git-review/src/gitreview/review/__init__.py/Review.expandCommitName
|
4,057 |
def readRoot(self):
result = None
self.reset()
# Get the header, make sure it's a valid file.
if not is_stream_binary_plist(self.file):
raise NotBinaryPlistException()
self.file.seek(0)
self.contents = self.file.read()
if len(self.contents) < 32:
raise InvalidPlistException("File is too short.")
trailerContents = self.contents[-32:]
try:
self.trailer = PlistTrailer._make(unpack("!xxxxxxBBQQQ", trailerContents))
offset_size = self.trailer.offsetSize * self.trailer.offsetCount
offset = self.trailer.offsetTableOffset
offset_contents = self.contents[offset:offset+offset_size]
offset_i = 0
while offset_i < self.trailer.offsetCount:
begin = self.trailer.offsetSize*offset_i
tmp_contents = offset_contents[begin:begin+self.trailer.offsetSize]
tmp_sized = self.getSizedInteger(tmp_contents, self.trailer.offsetSize)
self.offsets.append(tmp_sized)
offset_i += 1
self.setCurrentOffsetToObjectNumber(self.trailer.topLevelObjectNumber)
result = self.readObject()
except __HOLE__, e:
raise InvalidPlistException(e)
return result
|
TypeError
|
dataset/ETHPy150Open pascalw/Airplayer/airplayer/lib/biplist/__init__.py/PlistReader.readRoot
|
4,058 |
def main():
"""
main method
"""
# initialize parser
usage = "usage: %prog [-u USER] [-p PASSWORD] [-t TITLE] [-s selection] url"
parser = OptionParser(usage, version="%prog "+instapaperlib.__version__)
parser.add_option("-u", "--user", action="store", dest="user",
metavar="USER", help="instapaper username")
parser.add_option("-p", "--password", action="store", dest="password",
metavar="USER", help="instapaper password")
parser.add_option("-t", "--title", action="store", dest="title",
metavar="TITLE", help="title of the link to add")
parser.add_option("-s", "--selection", action="store", dest="selection",
metavar="SELECTION", help="short text for description")
(options, args) = parser.parse_args()
if not len(args) > 0:
parser.error("What do you want to read later?")
if not options.user:
# auth regex
login = re.compile("(.+?):(.+)")
try:
config = open(os.path.expanduser("~") + "/.instapaperrc")
for line in config:
matches = login.match(line)
if matches:
user = matches.group(1).strip()
password = matches.group(2).strip()
except __HOLE__:
parser.error("No login information present.")
sys.exit(-1)
else:
user = options.user
# make sure all parameters are present
if not options.password:
password = getpass()
else:
password = options.password
(status, text) = instapaperlib.add_item(user, password, args[0],
options.title, options.selection)
print text
|
IOError
|
dataset/ETHPy150Open mrtazz/InstapaperLibrary/bin/instapaper.py/main
|
4,059 |
def getJob(conn):
"""
Get the next available sandbox request and set its status to pending.
"""
cur = conn.execute("SELECT cocreate_sandboxrequest.id, cocreate_sandboxrequest.sandbox_id, cocreate_sandboxrequest.requested_by_id, cocreate_sandboxrequest.sandbox_name, cocreate_sandboxtemplate.recipe FROM cocreate_sandboxrequest, cocreate_sandboxtemplate WHERE cocreate_sandboxrequest.request_status = 'app' AND cocreate_sandboxrequest.template_id = cocreate_sandboxtemplate.id LIMIT 1")
try:
request_id, sandbox_id, requested_by_id, sandbox_name, sandbox_recipe = cur.fetchone()
conn.execute("UPDATE cocreate_sandboxrequest SET request_status = 'pen' WHERE id = ?", (request_id,))
conn.commit()
except __HOLE__:
return None
return {"request_id": request_id, "sandbox_id": sandbox_id, "requested_by_id": requested_by_id, "sandbox_name": sandbox_name, "sandbox_recipe": sandbox_recipe}
|
TypeError
|
dataset/ETHPy150Open ngageoint/cocreate/ccl-cookbook/files/default/cocreatelite/cocreate/worker.py/getJob
|
4,060 |
def updateProgress(request_id, percent_complete, message, url=None):
"""
Update the progress of the sandbox request in the database.
"""
cur = conn.execute("SELECT sandbox_id, sandbox_name, requested_by_id FROM cocreate_sandboxrequest WHERE id = ?", (request_id,))
try:
sandbox_id, sandbox_name, requested_by_id = cur.fetchone()
except __HOLE__:
raise LookupError('specified request not found')
if percent_complete < 0 or percent_complete > 100:
raise ValueError('percent_complete out of range')
if len(message) > 100:
raise ValueError('message too long')
if percent_complete == 100:
conn.execute("UPDATE cocreate_sandboxrequest SET request_status = ?, request_progress = ?, request_progress_msg = ? WHERE id = ?", ('avl', percent_complete, message, request_id))
else:
conn.execute("UPDATE cocreate_sandboxrequest SET request_progress = ?, request_progress_msg = ? WHERE id = ?", (percent_complete, message, request_id))
conn.execute(
"INSERT INTO cocreate_notification (added_at, read, msg, related_model_type, related_model_id, owner_id, progress, object_name, error) values (datetime('now'), 0, ?, 'srq', ?, ?, ?, ?, 0)",
(message, request_id, requested_by_id, percent_complete, sandbox_name)
)
if url:
conn.execute("UPDATE cocreate_sandbox SET url = ? WHERE id = ?", (url,sandbox_id ))
conn.commit()
return
|
TypeError
|
dataset/ETHPy150Open ngageoint/cocreate/ccl-cookbook/files/default/cocreatelite/cocreate/worker.py/updateProgress
|
4,061 |
def celery_check():
try:
from celery import Celery
from django.conf import settings
app = Celery()
app.config_from_object(settings)
i = app.control.inspect()
ping = i.ping()
if not ping:
chk = (False, 'No running Celery workers were found.')
else:
chk = (True, None)
except __HOLE__ as e:
chk = (False, "Error connecting to the backend: " + str(e))
except ImportError as e:
chk = (False, str(e))
return chk
|
IOError
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/hqwebapp/views.py/celery_check
|
4,062 |
def redis_check():
try:
redis = cache.caches['redis']
result = redis.set('serverup_check_key', 'test')
except (InvalidCacheBackendError, __HOLE__):
result = True # redis not in use, ignore
except:
result = False
return (result, None)
|
ValueError
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/hqwebapp/views.py/redis_check
|
4,063 |
@require_GET
def redirect_to_default(req, domain=None):
if not req.user.is_authenticated():
if domain != None:
url = reverse('domain_login', args=[domain])
else:
if settings.ENABLE_PRELOGIN_SITE:
try:
from corehq.apps.prelogin.views import HomePublicView
url = reverse(HomePublicView.urlname)
except __HOLE__:
# this happens when the prelogin app is not included.
url = reverse('landing_page')
else:
url = reverse('landing_page')
elif domain and _two_factor_needed(domain, req):
return TemplateResponse(
request=req,
template='two_factor/core/otp_required.html',
status=403,
)
else:
if domain:
domain = normalize_domain_name(domain)
domains = [Domain.get_by_name(domain)]
else:
domains = Domain.active_for_user(req.user)
if 0 == len(domains) and not req.user.is_superuser:
return redirect('registration_domain')
elif 1 == len(domains):
if domains[0]:
domain = domains[0].name
if (req.couch_user.is_commcare_user()
and not is_mobile_worker_with_report_access(
req.couch_user, domain)):
url = reverse("cloudcare_main", args=[domain, ""])
else:
from corehq.apps.dashboard.views import dashboard_default
return dashboard_default(req, domain)
else:
raise Http404
else:
url = settings.DOMAIN_SELECT_URL
return HttpResponseRedirect(url)
|
ImportError
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/hqwebapp/views.py/redirect_to_default
|
4,064 |
def _safe_escape(self, expression, default):
try:
return expression()
except __HOLE__:
return default
|
ValueError
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/hqwebapp/views.py/CRUDPaginatedViewMixin._safe_escape
|
4,065 |
@property
def item_id(self):
try:
return self.parameters['itemId']
except __HOLE__:
raise PaginatedItemException(_("The item's ID was not passed to the server."))
|
KeyError
|
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/hqwebapp/views.py/CRUDPaginatedViewMixin.item_id
|
4,066 |
def main(args):
usage = "usage: %prog [options] <html directory>"
parser = op.OptionParser(usage=usage)
parser.add_option("-v", "--verbose", action="store_true",
dest="verbose", default=False, help="Provides verbose output")
opts, args = parser.parse_args(args)
try:
path = args[0]
except __HOLE__:
sys.stderr.write(
"Error - Expecting path to html directory:"
"sphinx-to-github <path>\n"
)
return
dir_helper = DirHelper(
os.path.isdir,
os.listdir,
os.walk,
shutil.rmtree
)
file_helper = FileSystemHelper(
open,
os.path.join,
shutil.move,
os.path.exists
)
operations_factory = OperationsFactory()
handler_factory = HandlerFactory()
layout_factory = LayoutFactory(
operations_factory,
handler_factory,
file_helper,
dir_helper,
opts.verbose,
sys.stdout,
force=False
)
try:
layout = layout_factory.create_layout(path)
except NoDirectoriesError:
sys.stderr.write(
"Error - No top level directories starting with an underscore "
"were found in '%s'\n" % path
)
return
layout.process()
|
IndexError
|
dataset/ETHPy150Open benoitc/dj-webmachine/doc/sphinxtogithub.py/main
|
4,067 |
def _on_connection_socket_select(self, fd):
try:
conn = self.connections[fd]
except __HOLE__:
# fd could have already been removed if the other end of the socket closed
# and was handled before fd. fd has already been handled so
# move along
return
try:
cont = conn.bridge(fd)
if not cont:
self.remove(conn)
except Exception as e:
self.logger.exception(e)
self.remove(conn)
|
KeyError
|
dataset/ETHPy150Open google/nogotofail/nogotofail/mitm/connection/server.py/Server._on_connection_socket_select
|
4,068 |
def wait(self):
"""Wait until all servers have completed running."""
try:
self._server.wait()
except __HOLE__:
pass
|
KeyboardInterrupt
|
dataset/ETHPy150Open openstack/neutron/neutron/wsgi.py/Server.wait
|
4,069 |
def get_body_serializer(self, content_type):
try:
return self.body_serializers[content_type]
except (KeyError, __HOLE__):
raise exception.InvalidContentType(content_type=content_type)
|
TypeError
|
dataset/ETHPy150Open openstack/neutron/neutron/wsgi.py/ResponseSerializer.get_body_serializer
|
4,070 |
def _from_json(self, datastring):
try:
return jsonutils.loads(datastring)
except __HOLE__:
msg = _("Cannot understand JSON")
raise n_exc.MalformedRequestBody(reason=msg)
|
ValueError
|
dataset/ETHPy150Open openstack/neutron/neutron/wsgi.py/JSONDeserializer._from_json
|
4,071 |
def get_body_deserializer(self, content_type):
try:
return self.body_deserializers[content_type]
except (__HOLE__, TypeError):
raise exception.InvalidContentType(content_type=content_type)
|
KeyError
|
dataset/ETHPy150Open openstack/neutron/neutron/wsgi.py/RequestDeserializer.get_body_deserializer
|
4,072 |
def get_action_args(self, request_environment):
"""Parse dictionary created by routes library."""
try:
args = request_environment['wsgiorg.routing_args'][1].copy()
except Exception:
return {}
try:
del args['controller']
except __HOLE__:
pass
try:
del args['format']
except KeyError:
pass
return args
|
KeyError
|
dataset/ETHPy150Open openstack/neutron/neutron/wsgi.py/RequestDeserializer.get_action_args
|
4,073 |
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
"""WSGI method that controls (de)serialization and method dispatch."""
LOG.info(_LI("%(method)s %(url)s"),
{"method": request.method, "url": request.url})
try:
action, args, accept = self.deserializer.deserialize(request)
except exception.InvalidContentType:
msg = _("Unsupported Content-Type")
LOG.exception(_LE("InvalidContentType: %s"), msg)
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
except n_exc.MalformedRequestBody:
msg = _("Malformed request body")
LOG.exception(_LE("MalformedRequestBody: %s"), msg)
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
try:
action_result = self.dispatch(request, action, args)
except webob.exc.HTTPException as ex:
LOG.info(_LI("HTTP exception thrown: %s"), ex)
action_result = Fault(ex, self._fault_body_function)
except Exception:
LOG.exception(_LE("Internal error"))
# Do not include the traceback to avoid returning it to clients.
action_result = Fault(webob.exc.HTTPServerError(),
self._fault_body_function)
if isinstance(action_result, dict) or action_result is None:
response = self.serializer.serialize(action_result,
accept,
action=action)
else:
response = action_result
try:
LOG.info(_LI("%(url)s returned with HTTP %(status)d"),
dict(url=request.url, status=response.status_int))
except __HOLE__ as e:
LOG.info(_LI("%(url)s returned a fault: %(exception)s"),
dict(url=request.url, exception=e))
return response
|
AttributeError
|
dataset/ETHPy150Open openstack/neutron/neutron/wsgi.py/Resource.__call__
|
4,074 |
def dispatch(self, request, action, action_args):
"""Find action-specific method on controller and call it."""
controller_method = getattr(self.controller, action)
try:
#NOTE(salvatore-orlando): the controller method must have
# an argument whose name is 'request'
return controller_method(request=request, **action_args)
except __HOLE__ as exc:
LOG.exception(exc)
return Fault(webob.exc.HTTPBadRequest())
|
TypeError
|
dataset/ETHPy150Open openstack/neutron/neutron/wsgi.py/Resource.dispatch
|
4,075 |
def refreshHistory(self):
output = ''
for line in self.history:
try:
output += self.lineFilter( *line )
except __HOLE__: pass
self.bufferLength = len(output)
cmd = 'scrollField -e -text \"%s\" "%s";' % ( output, self.name )
self.executeCommand( cmd )
|
TypeError
|
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/tools/scriptEditor/pymelScrollFieldReporter.py/Reporter.refreshHistory
|
4,076 |
def cmdCallback( nativeMsg, messageType, data ):
global callbackState
#outputFile = open( '/var/tmp/commandOutput', 'a')
#outputFile.write( '============\n%s\n%s %s, length %s \n' % (nativeMsg, messageType, callbackState, len(nativeMsg)) )
#outputFile.close()
if callbackState == 'ignoreCommand':
callbackState = 'ignoreResult'
return
elif callbackState == 'ignoreResult':
callbackState = 'normal'
return
global sourceType
global allHistory
syntaxError = False
convertedMsg = None
# Command History
if messageType == OpenMaya.MCommandMessage.kHistory:
callbackState = 'normal'
#if nativeMsg.rfind(';') == len(nativeMsg)-2 : # and len(nativeMsg) >= 2:
if nativeMsg.endswith(';\n') : # and len(nativeMsg) >= 2:
sourceType = kMel
try:
#convertedMsg = mel2py.mel2pyStr( nativeMsg )
convertedMsg = mparser.parse( nativeMsg )
except Exception, msg:
syntaxError = True
pass
else:
sourceType = kPython
# Display - unaltered strings, such as that printed by the print command
elif messageType == OpenMaya.MCommandMessage.kDisplay and ( nativeMsg.endswith(';\n') or nativeMsg.startswith( '//' ) ):
try:
#convertedMsg = mel2py.mel2pyStr( nativeMsg )
convertedMsg = mparser.parse( nativeMsg )
except Exception, msg:
pass
else:
try:
nativeMsg = {
#OpenMaya.MCommandMessage.kDisplay: 'Output',
OpenMaya.MCommandMessage.kInfo: '',
OpenMaya.MCommandMessage.kWarning: 'Warning: ',
OpenMaya.MCommandMessage.kError: 'Error: ',
OpenMaya.MCommandMessage.kResult: 'Result: '
}[ messageType ] + nativeMsg
if sourceType == kMel:
convertedMsg = '# %s #\n' % nativeMsg
nativeMsg = '// %s //\n' % nativeMsg
else:
nativeMsg = '# %s #\n' % nativeMsg
except __HOLE__:
pass
nativeMsg = encodeString( nativeMsg )
if convertedMsg is not None:
convertedMsg = encodeString( convertedMsg )
#outputFile = open( '/var/tmp/commandOutput', 'a')
#outputFile.write( '---------\n%s %s\n' % ( convertedMsg, sourceType ) )
#outputFile.close()
line = [ messageType, sourceType, nativeMsg, convertedMsg ]
allHistory.append( line )
#if messageType == OpenMaya.MCommandMessage.kError : # and 'Syntax error' in nativeMsg:
# return
for reporter in reporters.values():
reporter.appendHistory( line )
if syntaxError:
callbackState = 'syntax_error'
#elif callbackState == 'syntax_error' and 'Syntax error' in nativeMsg:
# callbackState = 'normal'
#global output
#output += encodeString( message )
#cmd = 'global string $gCommandReporter;cmdScrollFieldReporter -edit -text \"%s\" $gCommandReporter;' % output
#cmd = 'scrollField -e -text \"%s\" %s;\n' % ( output, scrollFieldName )
#OpenMaya.MGlobal.executeCommand( cmd, False, False )
# command
|
KeyError
|
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/tools/scriptEditor/pymelScrollFieldReporter.py/cmdCallback
|
4,077 |
def run_cmd(dir, args, with_retcode=False, with_stderr=False, raise_error=False, input=None, env={}, run_bg=False, setup_askpass=False):
# Check args
if type(args) in [str, unicode]:
args = [args]
args = [str(a) for a in args]
# Check directory
if not os.path.isdir(dir):
raise GitError, 'Directory not exists: ' + dir
try:
os.chdir(dir)
except __HOLE__, msg:
raise GitError, msg
# Run command
if type(args) != list:
args = [args]
# Setup environment
git_env = dict(os.environ)
if setup_askpass and 'SSH_ASKPASS' not in git_env:
git_env['SSH_ASKPASS'] = '%s-askpass' % os.path.realpath(os.path.abspath(sys.argv[0]))
git_env.update(env)
preexec_fn = os.setsid if setup_askpass else None
p = Popen([git_binary()] + args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, stdin=subprocess.PIPE,
env=git_env, shell=False, preexec_fn=preexec_fn)
if run_bg:
return p
if input == None:
stdout,stderr = p.communicate('')
else:
stdout,stderr = p.communicate(utf8_str(input))
# Return command output in a form given by arguments
ret = []
if p.returncode != 0 and raise_error:
raise GitError, 'git returned with the following error:\n%s' % stderr
if with_retcode:
ret.append(p.returncode)
ret.append(stdout)
if with_stderr:
ret.append(stderr)
if len(ret) == 1:
return ret[0]
else:
return tuple(ret)
|
OSError
|
dataset/ETHPy150Open gyim/stupidgit/stupidgit_gui/git.py/run_cmd
|
4,078 |
def __init__(self, repodir, name='Main module', parent=None):
self.name = name
self.parent = parent
# Search for .git directory in repodir ancestors
repodir = os.path.abspath(repodir)
try:
if parent:
if not os.path.isdir(os.path.join(repodir, '.git')):
raise GitError, "Not a git repository: %s" % repodir
else:
while not os.path.isdir(os.path.join(repodir, '.git')):
new_repodir = os.path.abspath(os.path.join(repodir, '..'))
if new_repodir == repodir or (parent and new_repodir == parent.dir):
raise GitError, "Directory is not a git repository"
else:
repodir = new_repodir
except __HOLE__:
raise GitError, "Directory is not a git repository or it is not readable"
self.dir = repodir
# Remotes
self.config = ConfigFile(os.path.join(self.dir, '.git', 'config'))
self.url = self.config.get_option('remote', 'origin', 'url')
self.remotes = {}
for remote, opts in self.config.sections_for_type('remote'):
if 'url' in opts:
self.remotes[remote] = opts['url']
# Run a git status to see whether this is really a git repository
retcode,output = self.run_cmd(['status'], with_retcode=True)
if retcode not in [0,1]:
raise GitError, "Directory is not a git repository"
# Load refs
self.load_refs()
# Get submodule info
self.submodules = self.get_submodules()
self.all_modules = [self] + self.submodules
|
OSError
|
dataset/ETHPy150Open gyim/stupidgit/stupidgit_gui/git.py/Repository.__init__
|
4,079 |
def load_refs(self):
self.refs = {}
self.branches = {}
self.remote_branches = {}
self.tags = {}
# HEAD, current branch
self.head = self.run_cmd(['rev-parse', 'HEAD']).strip()
self.current_branch = None
try:
f = open(os.path.join(self.dir, '.git', 'HEAD'))
head = f.read().strip()
f.close()
if head.startswith('ref: refs/heads/'):
self.current_branch = head[16:]
except __HOLE__:
pass
# Main module references
if self.parent:
self.main_ref = self.parent.get_submodule_version(self.name, 'HEAD')
if os.path.exists(os.path.join(self.parent.dir, '.git', 'MERGE_HEAD')):
self.main_merge_ref = self.parent.get_submodule_version(self.name, 'MERGE_HEAD')
else:
self.main_merge_ref = None
else:
self.main_ref = None
self.main_merge_ref = None
# References
for line in self.run_cmd(['show-ref']).split('\n'):
commit_id, _, refname = line.partition(' ')
self.refs[refname] = commit_id
if refname.startswith('refs/heads/'):
branchname = refname[11:]
self.branches[branchname] = commit_id
elif refname.startswith('refs/remotes/'):
branchname = refname[13:]
self.remote_branches[branchname] = commit_id
elif refname.startswith('refs/tags/'):
# Load the referenced commit for tags
tagname = refname[10:]
try:
self.tags[tagname] = self.run_cmd(['rev-parse', '%s^{commit}' % refname], raise_error=True).strip()
except GitError:
pass
# Inverse reference hashes
self.refs_by_sha1 = invert_hash(self.refs)
self.branches_by_sha1 = invert_hash(self.branches)
self.remote_branches_by_sha1 = invert_hash(self.remote_branches)
self.tags_by_sha1 = invert_hash(self.tags)
|
OSError
|
dataset/ETHPy150Open gyim/stupidgit/stupidgit_gui/git.py/Repository.load_refs
|
4,080 |
def commit(self, author_name, author_email, msg, amend=False):
if amend:
# Get details of current HEAD
is_merge_resolve = False
output = self.run_cmd(['log', '-1', '--pretty=format:%P%n%an%n%ae%n%aD'])
if not output.strip():
raise GitError, "Cannot amend in an empty repository!"
parents, author_name, author_email, author_date = output.split('\n')
parents = parents.split(' ')
else:
author_date = None # Use current date
# Get HEAD sha1 id
if self.head == 'HEAD':
parents = []
else:
head = self.run_cmd(['rev-parse', 'HEAD']).strip()
parents = [head]
# Get merge head if exists
is_merge_resolve = False
try:
merge_head_filename = os.path.join(self.dir, '.git', 'MERGE_HEAD')
if os.path.isfile(merge_head_filename):
f = open(merge_head_filename)
p = f.read().strip()
f.close()
parents.append(p)
is_merge_resolve = True
except OSError:
raise GitError, "Cannot open MERGE_HEAD file"
# Write tree
tree = self.run_cmd(['write-tree'], raise_error=True).strip()
# Write commit
parent_args = []
for parent in parents:
parent_args += ['-p', parent]
env = {}
if author_name: env['GIT_AUTHOR_NAME'] = author_name
if author_email: env['GIT_AUTHOR_EMAIL'] = author_email
if author_date: env['GIT_AUTHOR_DATE'] = author_date
commit = self.run_cmd(
['commit-tree', tree] + parent_args,
raise_error=True,
input=msg,
env=env
).strip()
# Update reference
self.run_cmd(['update-ref', 'HEAD', commit], raise_error=True)
# Remove MERGE_HEAD
if is_merge_resolve:
try:
os.unlink(os.path.join(self.dir, '.git', 'MERGE_HEAD'))
os.unlink(os.path.join(self.dir, '.git', 'MERGE_MODE'))
os.unlink(os.path.join(self.dir, '.git', 'MERGE_MSG'))
os.unlink(os.path.join(self.dir, '.git', 'ORIG_HEAD'))
except __HOLE__:
pass
|
OSError
|
dataset/ETHPy150Open gyim/stupidgit/stupidgit_gui/git.py/Repository.commit
|
4,081 |
def update_head(self, content):
try:
f = open(os.path.join(self.dir, '.git', 'HEAD'), 'w')
f.write(content)
f.close()
except __HOLE__:
raise GitError, "Write error:\nCannot write into .git/HEAD"
|
OSError
|
dataset/ETHPy150Open gyim/stupidgit/stupidgit_gui/git.py/Repository.update_head
|
4,082 |
def diff_for_untracked_file(filename):
# Start "diff" text
diff_text = 'New file: %s\n' % filename
# Detect whether file is binary
if is_binary_file(filename):
diff_text += "@@ File is binary.\n\n"
else:
# Text file => show lines
newfile_text = ''
try:
f = open(filename, 'r')
lines = f.readlines()
f.close()
newfile_text += '@@ -1,0 +1,%d @@\n' % len(lines)
for line in lines:
newfile_text += '+ ' + line
diff_text += newfile_text
except __HOLE__:
diff_text += '@@ Error: Cannot open file\n\n'
return diff_text
|
OSError
|
dataset/ETHPy150Open gyim/stupidgit/stupidgit_gui/git.py/diff_for_untracked_file
|
4,083 |
def to_normalized(self, doc):
# make the new dict actually contain real items
normed = {}
do_not_include = ['docID', 'doc', 'filetype', 'timestamps', 'source', 'versions', 'key']
for key, value in dict(doc).items():
if value and key not in do_not_include:
try:
normed[key] = json.loads(value)
except (ValueError, __HOLE__):
normed[key] = value
if normed.get('versions'):
normed['versions'] = list(map(str, normed['versions']))
# No datetime means the document wasn't normalized (probably wasn't on the approved list)
# TODO - fix odd circular import that makes us import this here
from scrapi.base.helpers import datetime_formatter
if normed.get('providerUpdatedDateTime'):
normed['providerUpdatedDateTime'] = datetime_formatter(normed['providerUpdatedDateTime'].isoformat())
else:
return None
return NormalizedDocument(normed, validate=False, clean=False)
|
TypeError
|
dataset/ETHPy150Open CenterForOpenScience/scrapi/scrapi/processing/cassandra.py/CassandraProcessor.to_normalized
|
4,084 |
def get(self, source, docID):
documents = DocumentModel.objects(source=source, docID=docID)
try:
doc = documents[0]
except __HOLE__:
return None
raw = self.to_raw(doc)
normalized = self.to_normalized(doc)
return DocumentTuple(raw, normalized)
|
IndexError
|
dataset/ETHPy150Open CenterForOpenScience/scrapi/scrapi/processing/cassandra.py/CassandraProcessor.get
|
4,085 |
def user_agent():
"""Return a string representing the user agent."""
_implementation = platform.python_implementation()
if _implementation == 'CPython':
_implementation_version = platform.python_version()
elif _implementation == 'PyPy':
_implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro)
if sys.pypy_version_info.releaselevel != 'final':
_implementation_version = ''.join([
_implementation_version,
sys.pypy_version_info.releaselevel,
])
elif _implementation == 'Jython':
_implementation_version = platform.python_version() # Complete Guess
elif _implementation == 'IronPython':
_implementation_version = platform.python_version() # Complete Guess
else:
_implementation_version = 'Unknown'
try:
p_system = platform.system()
p_release = platform.release()
except __HOLE__:
p_system = 'Unknown'
p_release = 'Unknown'
return " ".join(['pip/%s' % pip.__version__,
'%s/%s' % (_implementation, _implementation_version),
'%s/%s' % (p_system, p_release)])
|
IOError
|
dataset/ETHPy150Open GeekTrainer/Flask/Work/Trivia - Module 5/env/Lib/site-packages/pip/download.py/user_agent
|
4,086 |
def get_file_content(url, comes_from=None, session=None):
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode."""
if session is None:
session = PipSession()
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from
and comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
if scheme == 'file':
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
else:
## FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
if six.PY3:
return resp.url, resp.text
else:
return resp.url, resp.content
try:
f = open(url)
content = f.read()
except __HOLE__:
e = sys.exc_info()[1]
raise InstallationError('Could not open requirements file: %s' % str(e))
else:
f.close()
return url, content
|
IOError
|
dataset/ETHPy150Open GeekTrainer/Flask/Work/Trivia - Module 5/env/Lib/site-packages/pip/download.py/get_file_content
|
4,087 |
def _get_hash_from_file(target_file, link):
try:
download_hash = hashlib.new(link.hash_name)
except (ValueError, __HOLE__):
logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
return None
fp = open(target_file, 'rb')
while True:
chunk = fp.read(4096)
if not chunk:
break
download_hash.update(chunk)
fp.close()
return download_hash
|
TypeError
|
dataset/ETHPy150Open GeekTrainer/Flask/Work/Trivia - Module 5/env/Lib/site-packages/pip/download.py/_get_hash_from_file
|
4,088 |
def _download_url(resp, link, temp_location):
fp = open(temp_location, 'wb')
download_hash = None
if link.hash and link.hash_name:
try:
download_hash = hashlib.new(link.hash_name)
except ValueError:
logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
try:
total_length = int(resp.headers['content-length'])
except (__HOLE__, KeyError, TypeError):
total_length = 0
downloaded = 0
show_progress = total_length > 40 * 1000 or not total_length
show_url = link.show_url
try:
if show_progress:
## FIXME: the URL can get really long in this message:
if total_length:
logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
else:
logger.start_progress('Downloading %s (unknown size): ' % show_url)
else:
logger.notify('Downloading %s' % show_url)
logger.info('Downloading from URL %s' % link)
def resp_read(chunk_size):
try:
# Special case for urllib3.
try:
for chunk in resp.raw.stream(
chunk_size, decode_content=False):
yield chunk
except IncompleteRead as e:
raise ChunkedEncodingError(e)
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
for chunk in resp_read(4096):
downloaded += len(chunk)
if show_progress:
if not total_length:
logger.show_progress('%s' % format_size(downloaded))
else:
logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded)))
if download_hash is not None:
download_hash.update(chunk)
fp.write(chunk)
fp.close()
finally:
if show_progress:
logger.end_progress('%s downloaded' % format_size(downloaded))
return download_hash
|
ValueError
|
dataset/ETHPy150Open GeekTrainer/Flask/Work/Trivia - Module 5/env/Lib/site-packages/pip/download.py/_download_url
|
4,089 |
def import_module(name, required=True):
"""
Import module by name
:param name:
Module name
:param required:
If set to `True` and module was not found - will throw exception.
If set to `False` and module was not found - will return None.
Default is `True`.
"""
try:
__import__(name, globals(), locals(), [])
except __HOLE__:
if not required and module_not_found():
return None
raise
return sys.modules[name]
|
ImportError
|
dataset/ETHPy150Open flask-admin/flask-admin/flask_admin/tools.py/import_module
|
4,090 |
def rec_getattr(obj, attr, default=None):
"""
Recursive getattr.
:param attr:
Dot delimited attribute name
:param default:
Default value
Example::
rec_getattr(obj, 'a.b.c')
"""
try:
return reduce(getattr, attr.split('.'), obj)
except __HOLE__:
return default
|
AttributeError
|
dataset/ETHPy150Open flask-admin/flask-admin/flask_admin/tools.py/rec_getattr
|
4,091 |
def cleanup(self):
try:
self.shutdown()
self.server_close()
except __HOLE__:
pass
logging.info('Stopped %s server. Total time processing requests: %dms',
self.protocol, self.total_request_time)
|
KeyboardInterrupt
|
dataset/ETHPy150Open chromium/web-page-replay/httpproxy.py/HttpProxyServer.cleanup
|
4,092 |
def cleanup(self):
try:
self.shutdown()
self.server_close()
except __HOLE__:
pass
|
KeyboardInterrupt
|
dataset/ETHPy150Open chromium/web-page-replay/httpproxy.py/HttpsProxyServer.cleanup
|
4,093 |
def __init__(self, var):
self.var = var
try:
# django.template.base.Variable
self.literal = self.var.literal
except __HOLE__:
# django.template.base.FilterExpression
self.literal = self.var.token
|
AttributeError
|
dataset/ETHPy150Open ojii/django-classy-tags/classytags/values.py/StringValue.__init__
|
4,094 |
def clean(self, value):
try:
return int(value)
except __HOLE__:
return self.error(value, "clean")
|
ValueError
|
dataset/ETHPy150Open ojii/django-classy-tags/classytags/values.py/IntegerValue.clean
|
4,095 |
def _open(self, devpath, mode, max_speed, bit_order, bits_per_word, extra_flags):
if not isinstance(devpath, str):
raise TypeError("Invalid devpath type, should be string.")
elif not isinstance(mode, int):
raise TypeError("Invalid mode type, should be integer.")
elif not isinstance(max_speed, int) and not isinstance(max_speed, float):
raise TypeError("Invalid max_speed type, should be integer or float.")
elif not isinstance(bit_order, str):
raise TypeError("Invalid bit_order type, should be string.")
elif not isinstance(bits_per_word, int):
raise TypeError("Invalid bits_per_word type, should be integer.")
elif not isinstance(extra_flags, int):
raise TypeError("Invalid extra_flags type, should be integer.")
if mode not in [0, 1, 2, 3]:
raise ValueError("Invalid mode, can be 0, 1, 2, 3.")
elif bit_order.lower() not in ["msb", "lsb"]:
raise ValueError("Invalid bit_order, can be \"msb\" or \"lsb\".")
elif bits_per_word < 0 or bits_per_word > 255:
raise ValueError("Invalid bits_per_word, must be 0-255.")
elif extra_flags < 0 or extra_flags > 255:
raise ValueError("Invalid extra_flags, must be 0-255.")
# Open spidev
try:
self._fd = os.open(devpath, os.O_RDWR)
except OSError as e:
raise SPIError(e.errno, "Opening SPI device: " + e.strerror)
self._devpath = devpath
bit_order = bit_order.lower()
# Set mode, bit order, extra flags
buf = array.array("B", [mode | (SPI._SPI_LSB_FIRST if bit_order == "lsb" else 0) | extra_flags])
try:
fcntl.ioctl(self._fd, SPI._SPI_IOC_WR_MODE, buf, False)
except __HOLE__ as e:
raise SPIError(e.errno, "Setting SPI mode: " + e.strerror)
# Set max speed
buf = array.array("I", [int(max_speed)])
try:
fcntl.ioctl(self._fd, SPI._SPI_IOC_WR_MAX_SPEED_HZ, buf, False)
except OSError as e:
raise SPIError(e.errno, "Setting SPI max speed: " + e.strerror)
# Set bits per word
buf = array.array("B", [bits_per_word])
try:
fcntl.ioctl(self._fd, SPI._SPI_IOC_WR_BITS_PER_WORD, buf, False)
except OSError as e:
raise SPIError(e.errno, "Setting SPI bits per word: " + e.strerror)
# Methods
|
OSError
|
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._open
|
4,096 |
def transfer(self, data):
"""Shift out `data` and return shifted in data.
Args:
data (bytes, bytearray, list): a byte array or list of 8-bit integers to shift out.
Returns:
bytes, bytearray, list: data shifted in.
Raises:
SPIError: if an I/O or OS error occurs.
TypeError: if `data` type is invalid.
ValueError: if data is not valid bytes.
"""
if not isinstance(data, bytes) and not isinstance(data, bytearray) and not isinstance(data, list):
raise TypeError("Invalid data type, should be bytes, bytearray, or list.")
# Create mutable array
try:
buf = array.array('B', data)
except OverflowError:
raise ValueError("Invalid data bytes.")
buf_addr, buf_len = buf.buffer_info()
# Prepare transfer structure
spi_xfer = _CSpiIocTransfer()
spi_xfer.tx_buf = buf_addr
spi_xfer.rx_buf = buf_addr
spi_xfer.len = buf_len
# Transfer
try:
fcntl.ioctl(self._fd, SPI._SPI_IOC_MESSAGE_1, spi_xfer)
except __HOLE__ as e:
raise SPIError(e.errno, "SPI transfer: " + e.strerror)
# Return shifted out data with the same type as shifted in data
if isinstance(data, bytes):
return bytes(bytearray(buf))
elif isinstance(data, bytearray):
return bytearray(buf)
elif isinstance(data, list):
return buf.tolist()
|
OSError
|
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI.transfer
|
4,097 |
def close(self):
"""Close the spidev SPI device.
Raises:
SPIError: if an I/O or OS error occurs.
"""
if self._fd is None:
return
try:
os.close(self._fd)
except __HOLE__ as e:
raise SPIError(e.errno, "Closing SPI device: " + e.strerror)
self._fd = None
# Immutable properties
|
OSError
|
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI.close
|
4,098 |
def _get_mode(self):
buf = array.array('B', [0])
# Get mode
try:
fcntl.ioctl(self._fd, SPI._SPI_IOC_RD_MODE, buf, True)
except __HOLE__ as e:
raise SPIError(e.errno, "Getting SPI mode: " + e.strerror)
return buf[0] & 0x3
|
OSError
|
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._get_mode
|
4,099 |
def _set_mode(self, mode):
if not isinstance(mode, int):
raise TypeError("Invalid mode type, should be integer.")
if mode not in [0, 1, 2, 3]:
raise ValueError("Invalid mode, can be 0, 1, 2, 3.")
# Read-modify-write mode, because the mode contains bits for other settings
# Get mode
buf = array.array('B', [0])
try:
fcntl.ioctl(self._fd, SPI._SPI_IOC_RD_MODE, buf, True)
except OSError as e:
raise SPIError(e.errno, "Getting SPI mode: " + e.strerror)
buf[0] = (buf[0] & ~(SPI._SPI_CPOL | SPI._SPI_CPHA)) | mode
# Set mode
try:
fcntl.ioctl(self._fd, SPI._SPI_IOC_WR_MODE, buf, False)
except __HOLE__ as e:
raise SPIError(e.errno, "Setting SPI mode: " + e.strerror)
|
OSError
|
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._set_mode
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.