Unnamed: 0
int64 0
10k
| function
stringlengths 79
138k
| label
stringclasses 20
values | info
stringlengths 42
261
|
---|---|---|---|
5,800 |
def _dict_from_expr(expr, opt):
"""Transform an expression into a multinomial form. """
if expr.is_commutative is False:
raise PolynomialError('non-commutative expressions are not supported')
def _is_expandable_pow(expr):
return (expr.is_Pow and expr.exp.is_positive and expr.exp.is_Integer
and expr.base.is_Add)
if opt.expand is not False:
try:
expr = expr.expand()
except __HOLE__:
raise PolynomialError('expression must support expand method')
# TODO: Integrate this into expand() itself
while any(_is_expandable_pow(i) or i.is_Mul and
any(_is_expandable_pow(j) for j in i.args) for i in
Add.make_args(expr)):
expr = expand_multinomial(expr)
while any(i.is_Mul and any(j.is_Add for j in i.args) for i in Add.make_args(expr)):
expr = expand_mul(expr)
if opt.gens:
rep, gens = _dict_from_expr_if_gens(expr, opt)
else:
rep, gens = _dict_from_expr_no_gens(expr, opt)
return rep, opt.clone({'gens': gens})
|
AttributeError
|
dataset/ETHPy150Open sympy/sympy/sympy/polys/polyutils.py/_dict_from_expr
|
5,801 |
def _dict_reorder(rep, gens, new_gens):
"""Reorder levels using dict representation. """
gens = list(gens)
monoms = rep.keys()
coeffs = rep.values()
new_monoms = [ [] for _ in range(len(rep)) ]
used_indices = set()
for gen in new_gens:
try:
j = gens.index(gen)
used_indices.add(j)
for M, new_M in zip(monoms, new_monoms):
new_M.append(M[j])
except __HOLE__:
for new_M in new_monoms:
new_M.append(0)
for i, _ in enumerate(gens):
if i not in used_indices:
for monom in monoms:
if monom[i]:
raise GeneratorsError("unable to drop generators")
return map(tuple, new_monoms), coeffs
|
ValueError
|
dataset/ETHPy150Open sympy/sympy/sympy/polys/polyutils.py/_dict_reorder
|
5,802 |
def __setstate__(self, d):
# All values that were pickled are now assigned to a fresh instance
for name, value in d.items():
try:
setattr(self, name, value)
except __HOLE__: # This is needed in cases like Rational :> Half
pass
|
AttributeError
|
dataset/ETHPy150Open sympy/sympy/sympy/polys/polyutils.py/PicklableWithSlots.__setstate__
|
5,803 |
def started(self, *args):
try:
code = int(sys.argv[1])
except __HOLE__:
code = sys.argv[1]
raise SystemExit(code)
|
ValueError
|
dataset/ETHPy150Open circuits/circuits/tests/core/exitcodeapp.py/App.started
|
5,804 |
def is_numeric(s):
"""Determine if a string is numeric"""
try:
float(s)
return True
except __HOLE__:
return False;
|
ValueError
|
dataset/ETHPy150Open briandconnelly/BEACONToolkit/plotting/scripts/plot_csv.py/is_numeric
|
5,805 |
def plot_csv():
"""Plot CSV data"""
parser = argparse.ArgumentParser(description='Create plots from CSV data', version='{v}'.format(v=__version__))
parser.add_argument('outfile', action='store', help='output file')
parser.add_argument('-i', '--infile', type=argparse.FileType('rb'), default=sys.stdin, help='input file (default: stdin)')
parser.add_argument('--title', action='store', help='display the given title')
parser.add_argument('--legend', action='store_true', default=False, help='display a legend')
parser.add_argument('--grid', action='store_true', default=False, help='display a grid')
parser.add_argument('--transparent', action='store_true', default=False, help='use a transparent canvas')
parser.add_argument('--dpi', action='store', type=float, default=80.0, help='DPI for raster graphics (default: 80)')
parser.add_argument('-x', '--xcol', type=int, metavar='X', required=True, help='column number of data to be used for X axis (1-based)')
parser.add_argument('--xlabel', metavar='L', help='label for X axis (default: name from column header)')
parser.add_argument('--logX', action='store_true', default=False, help='use log scaling along the X axis')
parser.add_argument('--xlim', type=float, nargs=2, help='minimum and maximum X values to be displayed')
parser.add_argument('-y', '--ycols', type=int, nargs='+', metavar='Y', required=True, help='column number of data to be used for X axis (1-based)')
parser.add_argument('--ylabel', metavar='L', help='label for Y axis (default: name from column header)')
parser.add_argument('--logY', action='store_true', default=False, help='use log scaling along the Y axis')
parser.add_argument('--ylim', type=float, nargs=2, help='minimum and maximum Y values to be displayed')
parser.add_argument('--labels', nargs='+', help='labels to be used for plotted columns')
cmd_args = parser.parse_args()
if cmd_args.labels and cmd_args.ycols and len(cmd_args.labels) != len(cmd_args.ycols):
print("Error: number of labels ({nl}) must match number of columns for Y axis ({nc})".format(nl=len(cmd_args.labels), nc=len(cmd_args.ycols)))
sys.exit(1)
#----- Read the data -----------------------------------
reader = csv.reader(cmd_args.infile)
header = None
rownum = 0
xlist = []
ylist = []
for row in reader:
# Skip empty rows
if len(row) == 0:
continue
rownum += 1
# Skip rows that are not data
if not is_numeric(row[0]):
if rownum == 1:
header = row
continue
row = map(float, row)
xlist.append(row[cmd_args.xcol-1])
ylist.append([row[r-1] for r in cmd_args.ycols])
cmd_args.infile.close()
xvals = np.array(xlist)
yvals = np.array(ylist)
#----- Get labels for the legend -----------------------
if cmd_args.labels:
labels = cmd_args.labels
else:
if header:
labels = [header[i-1] for i in cmd_args.ycols]
elif cmd_args.legend:
print("Error: Must supply labels for legend (data has no header)")
sys.exit(2)
#----- Create the plot ---------------------------------
fig = plt.figure()
ax = plt.subplot(1,1,1)
if cmd_args.title:
plt.title(cmd_args.title)
if cmd_args.grid:
ax.grid()
for c in range(yvals.shape[1]):
ax.plot(xvals, yvals[:, c], linestyle='solid')
if cmd_args.xlim:
ax.set_xlim([cmd_args.xlim[0], cmd_args.xlim[1]])
if cmd_args.ylim:
ax.set_ylim([cmd_args.ylim[0], cmd_args.ylim[1]])
if cmd_args.logX:
ax.set_xscale('log')
if cmd_args.logY:
ax.set_yscale('log')
if cmd_args.xlabel:
plt.xlabel(cmd_args.xlabel)
elif header:
plt.xlabel(header[cmd_args.xcol-1])
if cmd_args.ylabel:
plt.ylabel(cmd_args.ylabel)
elif len(cmd_args.ycols) == 1:
plt.ylabel(header[cmd_args.ycols[0]-1])
if cmd_args.legend:
plt.legend(labels)
try:
plt.tight_layout()
except __HOLE__:
warnings.warn("Matplotlib is out-of-date. Consider upgrading.", RuntimeWarning, stacklevel=2)
pass
plt.savefig(cmd_args.outfile, transparent=cmd_args.transparent, dpi=cmd_args.dpi)
|
AttributeError
|
dataset/ETHPy150Open briandconnelly/BEACONToolkit/plotting/scripts/plot_csv.py/plot_csv
|
5,806 |
@application.route('/', methods=['GET', 'POST'])
def index():
"""
Main WSGI application entry.
"""
path = normpath(abspath(dirname(__file__)))
hooks = join(path, 'hooks')
# Only POST is implemented
if request.method != 'POST':
abort(501)
# Load config
with open(join(path, 'config.json'), 'r') as cfg:
config = loads(cfg.read())
# Allow Github IPs only
if config.get('github_ips_only', True):
src_ip = ip_address(
u'{}'.format(request.remote_addr) # Fix stupid ipaddress issue
)
whitelist = requests.get('https://api.github.com/meta').json()['hooks']
for valid_ip in whitelist:
if src_ip in ip_network(valid_ip):
break
else:
abort(403)
# Enforce secret
secret = config.get('enforce_secret', '')
if secret:
# Only SHA1 is supported
header_signature = request.headers.get('X-Hub-Signature')
if header_signature is None:
abort(403)
sha_name, signature = header_signature.split('=')
if sha_name != 'sha1':
abort(501)
# HMAC requires the key to be bytes, but data is string
mac = hmac.new(str(secret), msg=request.data, digestmod=sha1)
# Python prior to 2.7.7 does not have hmac.compare_digest
if hexversion >= 0x020707F0:
if not hmac.compare_digest(str(mac.hexdigest()), str(signature)):
abort(403)
else:
# What compare_digest provides is protection against timing
# attacks; we can live without this protection for a web-based
# application
if not str(mac.hexdigest()) == str(signature):
abort(403)
# Implement ping
event = request.headers.get('X-GitHub-Event', 'ping')
if event == 'ping':
return dumps({'msg': 'pong'})
# Gather data
try:
payload = loads(request.data)
except:
abort(400)
# Determining the branch is tricky, as it only appears for certain event
# types an at different levels
branch = None
try:
# Case 1: a ref_type indicates the type of ref.
# This true for create and delete events.
if 'ref_type' in payload:
if payload['ref_type'] == 'branch':
branch = payload['ref']
# Case 2: a pull_request object is involved. This is pull_request and
# pull_request_review_comment events.
elif 'pull_request' in payload:
# This is the TARGET branch for the pull-request, not the source
# branch
branch = payload['pull_request']['base']['ref']
elif event in ['push']:
# Push events provide a full Git ref in 'ref' and not a 'ref_type'.
branch = payload['ref'].split('/')[2]
except __HOLE__:
# If the payload structure isn't what we expect, we'll live without
# the branch name
pass
# All current events have a repository, but some legacy events do not,
# so let's be safe
name = payload['repository']['name'] if 'repository' in payload else None
meta = {
'name': name,
'branch': branch,
'event': event
}
logging.info('Metadata:\n{}'.format(dumps(meta)))
# Possible hooks
scripts = []
if branch and name:
scripts.append(join(hooks, '{event}-{name}-{branch}'.format(**meta)))
if name:
scripts.append(join(hooks, '{event}-{name}'.format(**meta)))
scripts.append(join(hooks, '{event}'.format(**meta)))
scripts.append(join(hooks, 'all'))
# Check permissions
scripts = [s for s in scripts if isfile(s) and access(s, X_OK)]
if not scripts:
return ''
# Save payload to temporal file
osfd, tmpfile = mkstemp()
with fdopen(osfd, 'w') as pf:
pf.write(dumps(payload))
# Run scripts
ran = {}
for s in scripts:
proc = Popen(
[s, tmpfile, event],
stdout=PIPE, stderr=PIPE
)
stdout, stderr = proc.communicate()
ran[basename(s)] = {
'returncode': proc.returncode,
'stdout': stdout,
'stderr': stderr,
}
# Log errors if a hook failed
if proc.returncode != 0:
logging.error('{} : {} \n{}'.format(
s, proc.returncode, stderr
))
# Remove temporal file
remove(tmpfile)
info = config.get('return_scripts_info', False)
if not info:
return ''
output = dumps(ran, sort_keys=True, indent=4)
logging.info(output)
return output
|
KeyError
|
dataset/ETHPy150Open carlos-jenkins/python-github-webhooks/webhooks.py/index
|
5,807 |
@click.command()
@click.argument('location', type=click.Path(), default='./chanjo-demo',
required=False)
@click.pass_context
def demo(context, location):
"""Copy demo files to a directory.
\b
LOCATION: directory to add demofiles to (default: ./chanjo-demo)
"""
user_dir = path(location)
pkg_dir = __name__.rpartition('.')[0]
demo_dir = path(resource_filename(pkg_dir, 'files'))
# make sure we don't overwrite exiting files
for demo_file in resource_listdir(pkg_dir, 'files'):
user_file_path = user_dir.joinpath(demo_file)
if user_file_path.exists():
click.echo("{} exists, pick a different location"
.format(user_file_path))
context.abort()
try:
# we can copy the directory(tree)
demo_dir.copytree(user_dir)
except __HOLE__:
click.echo('The location must be a non-existing directory.')
context.abort()
# inform the user
click.echo("Successfully copied demo files to {}".format(user_dir))
|
OSError
|
dataset/ETHPy150Open robinandeer/chanjo/chanjo/demo/cli.py/demo
|
5,808 |
def _processSingleResult(self,resultType,resultItem):
if _entryResultTypes.has_key(resultType):
# Search continuations are ignored
dn,entry = resultItem
self.allEntries[dn] = entry
for a in self.indexed_attrs:
if entry.has_key(a):
for v in entry[a]:
try:
self.index[a][v].append(dn)
except __HOLE__:
self.index[a][v] = [ dn ]
|
KeyError
|
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/python-ldap-2.3.13/Lib/ldap/async.py/IndexedDict._processSingleResult
|
5,809 |
def download_md5(uri, dest):
"""
downloads file from uri to file dest
"""
# Create intermediate directories as necessary, #2970
dirname = os.path.dirname(dest)
if len(dirname):
try:
os.makedirs(dirname)
except __HOLE__ as e:
if e.errno != errno.EEXIST:
raise
sys.stdout.write('Downloading %s to %s...' % (uri, dest))
sys.stdout.flush()
try:
download_with_resume(uri, dest)
sys.stdout.write(' done.\n')
except Exception as e:
# delete partially downloaded data
if os.path.exists(dest):
os.unlink(dest)
sys.stdout.write(' failed (%s)!\n' % e)
raise
|
OSError
|
dataset/ETHPy150Open ros/catkin/cmake/test/download_checkmd5.py/download_md5
|
5,810 |
def get_entity_by_key(self, key):
try:
identifier = self.reverse_cache[key][0]
except __HOLE__:
return None
return self.get_entity(identifier)
|
KeyError
|
dataset/ETHPy150Open potatolondon/djangae/djangae/db/backends/appengine/context.py/Context.get_entity_by_key
|
5,811 |
def FromJsonString(self, value):
"""Converts a string to Duration.
Args:
value: A string to be converted. The string must end with 's'. Any
fractional digits (or none) are accepted as long as they fit into
precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
Raises:
ParseError: On parsing problems.
"""
if len(value) < 1 or value[-1] != 's':
raise ParseError(
'Duration must end with letter "s": {0}.'.format(value))
try:
pos = value.find('.')
if pos == -1:
self.seconds = int(value[:-1])
self.nanos = 0
else:
self.seconds = int(value[:pos])
if value[0] == '-':
self.nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
else:
self.nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
except __HOLE__:
raise ParseError(
'Couldn\'t parse duration: {0}.'.format(value))
|
ValueError
|
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/net/proto2/python/internal/well_known_types.py/Duration.FromJsonString
|
5,812 |
def _build(self, build_method):
"""
build image from provided build_args
:return: BuildResults
"""
logger.info("building image '%s'", self.image)
self._ensure_not_built()
self.temp_dir = tempfile.mkdtemp()
temp_path = os.path.join(self.temp_dir, BUILD_JSON)
try:
with open(temp_path, 'w') as build_json:
json.dump(self.build_args, build_json)
self.build_container_id = build_method(self.build_image, self.temp_dir)
try:
logs_gen = self.dt.logs(self.build_container_id, stream=True)
wait_for_command(logs_gen)
return_code = self.dt.wait(self.build_container_id)
except __HOLE__:
logger.info("killing build container on user's request")
self.dt.remove_container(self.build_container_id, force=True)
results = BuildResults()
results.return_code = 1
return results
else:
results = self._load_results(self.build_container_id)
results.return_code = return_code
return results
finally:
shutil.rmtree(self.temp_dir)
|
KeyboardInterrupt
|
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/outer.py/BuildManager._build
|
5,813 |
def main():
# Define default configuration location
parser = OptionParser(usage="usage: %prog [options] /path/to/file")
parser.add_option("-d", "--debug",
action="store_true",
dest="debug",
help="enable debug messages to the console.")
parser.add_option("-c", "--config-path",
action="store", type="string",
dest="config_path",
help="path to configuration for laikaboss framework.")
parser.add_option("-o", "--out-path",
action="store", type="string",
dest="save_path",
help="Write all results to the specified path")
parser.add_option("-s", "--source",
action="store", type="string",
dest="source",
help="Set the source (may affect dispatching) [default:laika]")
parser.add_option("-p", "--num_procs",
action="store", type="int",
dest="num_procs",
default=8,
help="Specify the number of CPU's to use for a recursive scan. [default:8]")
parser.add_option("-l", "--log",
action="store_true",
dest="log_result",
help="enable logging to syslog")
parser.add_option("-j", "--log-json",
action="store", type="string",
dest="log_json",
help="enable logging JSON results to file")
parser.add_option("-m", "--module",
action="store", type="string",
dest="scan_modules",
help="Specify individual module(s) to run and their arguments. If multiple, must be a space-separated list.")
parser.add_option("--parent",
action="store", type="string",
dest="parent", default="",
help="Define the parent of the root object")
parser.add_option("-e", "--ephID",
action="store", type="string",
dest="ephID", default="",
help="Specify an ephemeralID to send with the object")
parser.add_option("--metadata",
action="store",
dest="ext_metadata",
help="Define metadata to add to the scan or specify a file containing the metadata.")
parser.add_option("--size-limit",
action="store", type="int", default=10,
dest="sizeLimit",
help="Specify a size limit in MB (default: 10)")
parser.add_option("--file-limit",
action="store", type="int", default=0,
dest="fileLimit",
help="Specify a limited number of files to scan (default: off)")
parser.add_option("--progress",
action="store_true",
dest="progress",
default=False,
help="enable the progress bar")
(options, args) = parser.parse_args()
logger = logging.getLogger()
if options.debug:
# stdout is added by default, we'll capture this object here
#lhStdout = logger.handlers[0]
fileHandler = logging.FileHandler('laika-debug.log', 'w')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
fileHandler.setFormatter(formatter)
logger.addHandler(fileHandler)
# remove stdout from handlers so that debug info is only written to the file
#logger.removeHandler(lhStdout)
logging.basicConfig(level=logging.DEBUG)
logger.setLevel(logging.DEBUG)
global EXT_METADATA
if options.ext_metadata:
if os.path.exists(options.ext_metadata):
with open(options.ext_metadata) as metafile:
EXT_METADATA = json.loads(metafile.read())
else:
EXT_METADATA = json.loads(options.ext_metadata)
else:
EXT_METADATA = getConfig("ext_metadata")
global EPHID
if options.ephID:
EPHID = options.ephID
else:
EPHID = getConfig("ephID")
global SCAN_MODULES
if options.scan_modules:
SCAN_MODULES = options.scan_modules.split()
else:
SCAN_MODULES = None
logging.debug("SCAN_MODULES: %s" % (SCAN_MODULES))
global PROGRESS_BAR
if options.progress:
PROGRESS_BAR = 1
else:
PROGRESS_BAR = strtobool(getConfig('progress_bar'))
logging.debug("PROGRESS_BAR: %s" % (PROGRESS_BAR))
global LOG_RESULT
if options.log_result:
LOG_RESULT = 1
else:
LOG_RESULT = strtobool(getConfig('log_result'))
logging.debug("LOG_RESULT: %s" % (LOG_RESULT))
global LOG_JSON
if options.log_json:
LOG_JSON = options.log_json
else:
LOG_JSON = getConfig('log_json')
global NUM_PROCS
if options.num_procs:
NUM_PROCS = options.num_procs
else:
NUM_PROCS = int(getConfig('num_procs'))
logging.debug("NUM_PROCS: %s" % (NUM_PROCS))
global MAX_BYTES
if options.sizeLimit:
MAX_BYTES = options.sizeLimit * 1024 * 1024
else:
MAX_BYTES = int(getConfig('max_bytes'))
logging.debug("MAX_BYTES: %s" % (MAX_BYTES))
global MAX_FILES
if options.fileLimit:
MAX_FILES = options.fileLimit
else:
MAX_FILES = int(getConfig('max_files'))
logging.debug("MAX_FILES: %s" % (MAX_FILES))
global SOURCE
if options.source:
SOURCE = options.source
else:
SOURCE = getConfig('source')
global SAVE_PATH
if options.save_path:
SAVE_PATH = options.save_path
else:
SAVE_PATH = getConfig('save_path')
global CONFIG_PATH
# Highest priority configuration is via argument
if options.config_path:
CONFIG_PATH = options.config_path
logging.debug("using alternative config path: %s" % options.config_path)
if not os.path.exists(options.config_path):
error("the provided config path is not valid, exiting")
return 1
# Next, check to see if we're in the top level source directory (dev environment)
elif os.path.exists(default_configs['dev_config_path']):
CONFIG_PATH = default_configs['dev_config_path']
# Next, check for an installed copy of the default configuration
elif os.path.exists(default_configs['sys_config_path']):
CONFIG_PATH = default_configs['sys_config_path']
# Exit
else:
error('A valid framework configuration was not found in either of the following locations:\
\n%s\n%s' % (default_configs['dev_config_path'],default_configs['sys_config_path']))
return 1
# Check for stdin in no arguments were provided
if len(args) == 0:
DATA_PATH = []
if not sys.stdin.isatty():
while True:
f = sys.stdin.readline().strip()
if not f:
break
else:
if not os.path.isfile(f):
error("One of the specified files does not exist: %s" % (f))
return 1
if os.path.isdir(f):
error("One of the files you specified is actually a directory: %s" % (f))
return 1
DATA_PATH.append(f)
if not DATA_PATH:
error("You must provide files via stdin when no arguments are provided")
return 1
logging.debug("Loaded %s files from stdin" % (len(DATA_PATH)))
elif len(args) == 1:
if os.path.isdir(args[0]):
DATA_PATH = args[0]
elif os.path.isfile(args[0]):
DATA_PATH = [args[0]]
else:
error("File or directory does not exist: %s" % (args[0]))
return 1
else:
for f in args:
if not os.path.isfile(f):
error("One of the specified files does not exist: %s" % (f))
return 1
if os.path.isdir(f):
error("One of the files you specified is actually a directory: %s" % (f))
return 1
DATA_PATH = args
tasks = multiprocessing.JoinableQueue()
results = multiprocessing.Queue()
fileList = []
if type(DATA_PATH) is str:
for root, dirs, files in os.walk(DATA_PATH):
files = [f for f in files if not f[0] == '.']
dirs[:] = [d for d in dirs if not d[0] == '.']
for fname in files:
fullpath = os.path.join(root, fname)
if not os.path.islink(fullpath) and os.path.isfile(fullpath):
fileList.append(fullpath)
else:
fileList = DATA_PATH
if MAX_FILES:
fileList = fileList[:MAX_FILES]
num_jobs = len(fileList)
logging.debug("Loaded %s files for scanning" % (num_jobs))
# Start consumers
# If there's less files to process than processes, reduce the number of processes
if num_jobs < NUM_PROCS:
NUM_PROCS = num_jobs
logging.debug("Starting %s processes" % (NUM_PROCS))
consumers = [ Consumer(tasks, results)
for i in xrange(NUM_PROCS) ]
try:
for w in consumers:
w.start()
# Enqueue jobs
for fname in fileList:
tasks.put(fname)
# Add a poison pill for each consumer
for i in xrange(NUM_PROCS):
tasks.put(None)
if PROGRESS_BAR:
monitor = QueueMonitor(tasks, num_jobs)
monitor.start()
# Wait for all of the tasks to finish
tasks.join()
if PROGRESS_BAR:
monitor.join()
while num_jobs:
answer = zlib.decompress(results.get())
print(answer)
num_jobs -= 1
except __HOLE__:
error("Cancelled by user.. Shutting down.")
for w in consumers:
w.terminate()
w.join()
return None
except:
raise
|
KeyboardInterrupt
|
dataset/ETHPy150Open lmco/laikaboss/laika.py/main
|
5,814 |
def run(self):
try:
from progressbar import ProgressBar, Bar, Counter, Timer, ETA, Percentage, RotatingMarker
widgets = [Percentage(), Bar(left='[', right=']'), ' Processed: ', Counter(), '/', "%s" % self.task_count, ' total files (', Timer(), ') ', ETA()]
pb = ProgressBar(widgets=widgets, maxval=self.task_count).start()
while self.task_queue.qsize():
pb.update(self.task_count - self.task_queue.qsize())
time.sleep(0.5)
pb.finish()
except KeyboardInterrupt:
warning("progressbar interrupted by user\n")
return 1
except __HOLE__:
warning("progressbar module not available")
except:
warning("unknown error from progress bar")
return 0
|
ImportError
|
dataset/ETHPy150Open lmco/laikaboss/laika.py/QueueMonitor.run
|
5,815 |
def run(self):
global CONFIG_PATH
config.init(path=CONFIG_PATH)
init_logging()
ret_value = 0
# Loop and accept messages from both channels, acting accordingly
while True:
next_task = self.task_queue.get()
if next_task is None:
# Poison pill means shutdown
self.task_queue.task_done()
logging.debug("%s Got poison pill" % (os.getpid()))
break
try:
with open(next_task) as nextfile:
file_buffer = nextfile.read()
except __HOLE__:
logging.debug("Error opening: %s" % (next_task))
self.task_queue.task_done()
self.result_queue.put(answer)
continue
resultJSON = ""
try:
# perform the work
result = ScanResult()
result.source = SOURCE
result.startTime = time.time()
result.level = level_metadata
myexternalVars = ExternalVars(filename=next_task,
source=SOURCE,
ephID=EPHID,
extMetaData=EXT_METADATA)
Dispatch(file_buffer, result, 0, externalVars=myexternalVars, extScanModules=SCAN_MODULES)
resultJSON = getJSON(result)
if SAVE_PATH:
rootObject = getRootObject(result)
UID_SAVE_PATH = "%s/%s" % (SAVE_PATH, get_scanObjectUID(rootObject))
if not os.path.exists(UID_SAVE_PATH):
try:
os.makedirs(UID_SAVE_PATH)
except (OSError, IOError) as e:
error("\nERROR: unable to write to %s...\n" % (UID_SAVE_PATH))
raise
for uid, scanObject in result.files.iteritems():
with open("%s/%s" % (UID_SAVE_PATH, uid), "wb") as f:
f.write(scanObject.buffer)
if scanObject.filename and scanObject.depth != 0:
linkPath = "%s/%s" % (UID_SAVE_PATH, scanObject.filename.replace("/","_"))
if not os.path.lexists(linkPath):
os.symlink("%s" % (uid), linkPath)
elif scanObject.filename:
filenameParts = scanObject.filename.split("/")
os.symlink("%s" % (uid), "%s/%s" % (UID_SAVE_PATH, filenameParts[-1]))
with open("%s/%s" % (UID_SAVE_PATH, "result.json"), "wb") as f:
f.write(resultJSON)
if LOG_RESULT:
log_result(result)
if LOG_JSON:
LOCAL_PATH = LOG_JSON
with open(LOCAL_PATH, "ab") as f:
f.write(resultJSON + "\n")
except:
logging.exception("Scan worker died, shutting down")
ret_value = 1
break
finally:
self.task_queue.task_done()
self.result_queue.put(zlib.compress(resultJSON))
close_modules()
return ret_value
|
IOError
|
dataset/ETHPy150Open lmco/laikaboss/laika.py/Consumer.run
|
5,816 |
def test_install_requirements_parsing(self):
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'pep8': '1.3.3'})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list}):
with patch.dict(pip_state.__opts__, {'test': True}):
ret = pip_state.installed('pep8=1.3.2')
self.assertSaltFalseReturn({'test': ret})
self.assertInSaltComment(
'Invalid version specification in package pep8=1.3.2. '
'\'=\' is not supported, use \'==\' instead.',
{'test': ret}
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'pep8': '1.3.3'})
pip_install = MagicMock(return_value={'retcode': 0})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list,
'pip.install': pip_install}):
with patch.dict(pip_state.__opts__, {'test': True}):
ret = pip_state.installed('pep8>=1.3.2')
self.assertSaltTrueReturn({'test': ret})
self.assertInSaltComment(
'Python package pep8>=1.3.2 was already installed',
{'test': ret}
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'pep8': '1.3.3'})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list}):
with patch.dict(pip_state.__opts__, {'test': True}):
ret = pip_state.installed('pep8<1.3.2')
self.assertSaltNoneReturn({'test': ret})
self.assertInSaltComment(
'Python package pep8<1.3.2 is set to be installed',
{'test': ret}
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'pep8': '1.3.2'})
pip_install = MagicMock(return_value={'retcode': 0})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list,
'pip.install': pip_install}):
with patch.dict(pip_state.__opts__, {'test': True}):
ret = pip_state.installed('pep8>1.3.1,<1.3.3')
self.assertSaltTrueReturn({'test': ret})
self.assertInSaltComment(
'Python package pep8>1.3.1,<1.3.3 was already installed',
{'test': ret}
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'pep8': '1.3.1'})
pip_install = MagicMock(return_value={'retcode': 0})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list,
'pip.install': pip_install}):
with patch.dict(pip_state.__opts__, {'test': True}):
ret = pip_state.installed('pep8>1.3.1,<1.3.3')
self.assertSaltNoneReturn({'test': ret})
self.assertInSaltComment(
'Python package pep8>1.3.1,<1.3.3 is set to be installed',
{'test': ret}
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'pep8': '1.3.1'})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list}):
with patch.dict(pip_state.__opts__, {'test': True}):
ret = pip_state.installed(
'git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting>=0.5.1'
)
self.assertSaltNoneReturn({'test': ret})
self.assertInSaltComment(
'Python package git+https://github.com/saltstack/'
'salt-testing.git#egg=SaltTesting>=0.5.1 is set to be '
'installed',
{'test': ret}
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'pep8': '1.3.1'})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list}):
with patch.dict(pip_state.__opts__, {'test': True}):
ret = pip_state.installed(
'git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting'
)
self.assertSaltNoneReturn({'test': ret})
self.assertInSaltComment(
'Python package git+https://github.com/saltstack/'
'salt-testing.git#egg=SaltTesting is set to be '
'installed',
{'test': ret}
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'pep8': '1.3.1'})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list}):
with patch.dict(pip_state.__opts__, {'test': True}):
ret = pip_state.installed(
'https://pypi.python.org/packages/source/S/SaltTesting/'
'SaltTesting-0.5.0.tar.gz'
'#md5=e6760af92b7165f8be53b5763e40bc24'
)
self.assertSaltNoneReturn({'test': ret})
self.assertInSaltComment(
'Python package https://pypi.python.org/packages/source/'
'S/SaltTesting/SaltTesting-0.5.0.tar.gz'
'#md5=e6760af92b7165f8be53b5763e40bc24 is set to be '
'installed',
{'test': ret}
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'SaltTesting': '0.5.0'})
pip_install = MagicMock(return_value={
'retcode': 0,
'stderr': '',
'stdout': 'Downloading/unpacking https://pypi.python.org/packages'
'/source/S/SaltTesting/SaltTesting-0.5.0.tar.gz\n '
'Downloading SaltTesting-0.5.0.tar.gz\n Running '
'setup.py egg_info for package from '
'https://pypi.python.org/packages/source/S/SaltTesting/'
'SaltTesting-0.5.0.tar.gz\n \nCleaning up...'
})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list,
'pip.install': pip_install}):
ret = pip_state.installed(
'https://pypi.python.org/packages/source/S/SaltTesting/'
'SaltTesting-0.5.0.tar.gz'
'#md5=e6760af92b7165f8be53b5763e40bc24'
)
self.assertSaltTrueReturn({'test': ret})
self.assertInSaltComment('All packages were successfully installed',
{'test': ret}
)
self.assertInSaltReturn(
'Installed',
{'test': ret},
('changes', 'https://pypi.python.org/packages/source/S/'
'SaltTesting/SaltTesting-0.5.0.tar.gz'
'#md5=e6760af92b7165f8be53b5763e40bc24==???')
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'SaltTesting': '0.5.0'})
pip_install = MagicMock(return_value={
'retcode': 0,
'stderr': '',
'stdout': 'Cloned!'
})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list,
'pip.install': pip_install}):
with patch.dict(pip_state.__opts__, {'test': False}):
ret = pip_state.installed(
'git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting'
)
self.assertSaltTrueReturn({'test': ret})
self.assertInSaltComment(
'successfully installed',
{'test': ret}
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'pep8': '1.3.1'})
pip_install = MagicMock(return_value={'retcode': 0})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list,
'pip.install': pip_install}):
with patch.dict(pip_state.__opts__, {'test': False}):
ret = pip_state.installed(
'arbitrary ID that should be ignored due to requirements specified',
requirements='/tmp/non-existing-requirements.txt'
)
self.assertSaltTrueReturn({'test': ret})
# Test VCS installations using git+git://
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'SaltTesting': '0.5.0'})
pip_install = MagicMock(return_value={
'retcode': 0,
'stderr': '',
'stdout': 'Cloned!'
})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list,
'pip.install': pip_install}):
with patch.dict(pip_state.__opts__, {'test': False}):
ret = pip_state.installed(
'git+git://github.com/saltstack/salt-testing.git#egg=SaltTesting'
)
self.assertSaltTrueReturn({'test': ret})
self.assertInSaltComment(
'were successfully installed',
{'test': ret}
)
# Test VCS installations with version info like >= 0.1
try:
original_pip_version = pip.__version__
pip.__version__ = MagicMock(
side_effect=AttributeError(
'Faked missing __version__ attribute'
)
)
except __HOLE__:
# The pip version being used is already < 1.2
pass
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
pip_list = MagicMock(return_value={'SaltTesting': '0.5.0'})
pip_install = MagicMock(return_value={
'retcode': 0,
'stderr': '',
'stdout': 'Cloned!'
})
with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
'pip.list': pip_list,
'pip.install': pip_install}):
with patch.dict(pip_state.__opts__, {'test': False}):
ret = pip_state.installed(
'git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting>=0.5.0'
)
self.assertSaltTrueReturn({'test': ret})
self.assertInSaltComment(
'were successfully installed',
{'test': ret}
)
# Reset the version attribute if existing
if hasattr(pip, '__version__'):
pip.__version__ = original_pip_version
|
AttributeError
|
dataset/ETHPy150Open saltstack/salt/tests/unit/states/pip_test.py/PipStateTest.test_install_requirements_parsing
|
5,817 |
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=__doc__,
epilog=_USAGE_EXAMPLES)
parser.add_argument(
'destination',
metavar="destination",
type=str,
help="address of the Phabricator instance, e.g. "
"https://secure.phabricator.com")
parser.add_argument(
'--count', '-c',
metavar="TIMES",
type=int,
help="number of times to ping, default is unlimited")
parser.add_argument(
'--interval', '-i',
metavar="SECONDS",
type=float,
default=1.0,
help="wait interval seconds between sending each request, default is "
"to wait one second. Minimum wait is 0.2 seconds.")
args = parser.parse_args()
if args.interval < 0.2:
print("interval must be at least 0.2 seconds")
sys.exit(2)
# perform the ping and display the time taken and result
uri = phlsys_conduit.make_conduit_uri(args.destination)
print("conduit.ping " + str(uri))
if args.count is not None:
# pychecker requires sequence to be of same type so we have to wrap
# xrange() in iter() to make it an iterator.
sequence = iter(xrange(args.count))
else:
sequence = itertools.count()
is_first = True
Stats = collections.namedtuple(
"phabping__Stats", ['min', 'max', 'sum', 'count'])
stats = None
try:
for i in sequence:
# pause between requests
if not is_first:
time.sleep(args.interval)
print("request " + str(i + 1) + " :", end=' ')
conduit = phlsys_conduit.Conduit(uri)
start = time.time()
result = conduit.ping()
end = time.time()
msecs = (end - start) * 1000
print(result, ":", str(int(msecs)), "ms")
# atomically update the 'stats' object
# (we may receive KeyboardInterrupt during update)
if stats is None:
stats = Stats(min=msecs, max=msecs, sum=msecs, count=i + 1)
else:
stats = Stats(
min=min(stats.min, msecs),
max=max(stats.max, msecs),
sum=stats.sum + msecs,
count=i + 1)
is_first = False
except __HOLE__:
# print a newline to separate the ^C
print()
if not stats:
print("no requests processed.")
else:
print("---", uri, "conduit.ping statistics", "---")
print(stats.count, "requests processed")
print("min / mean / max =", end=' ')
mean = stats.sum / stats.count
vals = [stats.min, mean, stats.max]
vals_str = ' / '.join(["{0:0.2f}".format(i) for i in vals])
print(vals_str, 'ms')
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2015 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
KeyboardInterrupt
|
dataset/ETHPy150Open bloomberg/phabricator-tools/py/pig/pigcmd_phabping.py/main
|
5,818 |
def _get_post_clean_step_hook(node):
"""Get post clean step hook for the currently executing clean step.
This method reads node.clean_step and returns the post clean
step hook for the currently executing clean step.
:param node: a node object
:returns: a method if there is a post clean step hook for this clean
step; None otherwise
"""
interface = node.clean_step.get('interface')
step = node.clean_step.get('step')
try:
return POST_CLEAN_STEP_HOOKS[interface][step]
except __HOLE__:
pass
|
KeyError
|
dataset/ETHPy150Open openstack/ironic/ironic/drivers/modules/agent_base_vendor.py/_get_post_clean_step_hook
|
5,819 |
@base.passthru(['POST'])
@task_manager.require_exclusive_lock
def heartbeat(self, task, **kwargs):
"""Method for agent to periodically check in.
The agent should be sending its agent_url (so Ironic can talk back)
as a kwarg. kwargs should have the following format::
{
'agent_url': 'http://AGENT_HOST:AGENT_PORT'
}
AGENT_PORT defaults to 9999.
"""
node = task.node
driver_internal_info = node.driver_internal_info
LOG.debug(
'Heartbeat from %(node)s, last heartbeat at %(heartbeat)s.',
{'node': node.uuid,
'heartbeat': driver_internal_info.get('agent_last_heartbeat')})
driver_internal_info['agent_last_heartbeat'] = int(time.time())
try:
driver_internal_info['agent_url'] = kwargs['agent_url']
except __HOLE__:
raise exception.MissingParameterValue(_('For heartbeat operation, '
'"agent_url" must be '
'specified.'))
node.driver_internal_info = driver_internal_info
node.save()
# Async call backs don't set error state on their own
# TODO(jimrollenhagen) improve error messages here
msg = _('Failed checking if deploy is done.')
try:
if node.maintenance:
# this shouldn't happen often, but skip the rest if it does.
LOG.debug('Heartbeat from node %(node)s in maintenance mode; '
'not taking any action.', {'node': node.uuid})
return
elif (node.provision_state == states.DEPLOYWAIT and
not self.deploy_has_started(task)):
msg = _('Node failed to get image for deploy.')
self.continue_deploy(task, **kwargs)
elif (node.provision_state == states.DEPLOYWAIT and
self.deploy_is_done(task)):
msg = _('Node failed to move to active state.')
self.reboot_to_instance(task, **kwargs)
elif (node.provision_state == states.DEPLOYWAIT and
self.deploy_has_started(task)):
node.touch_provisioning()
elif node.provision_state == states.CLEANWAIT:
node.touch_provisioning()
try:
if not node.clean_step:
LOG.debug('Node %s just booted to start cleaning.',
node.uuid)
msg = _('Node failed to start the first cleaning '
'step.')
# First, cache the clean steps
self._refresh_clean_steps(task)
# Then set/verify node clean steps and start cleaning
manager_utils.set_node_cleaning_steps(task)
self.notify_conductor_resume_clean(task)
else:
msg = _('Node failed to check cleaning progress.')
self.continue_cleaning(task, **kwargs)
except exception.NoFreeConductorWorker:
# waiting for the next heartbeat, node.last_error and
# logging message is filled already via conductor's hook
pass
except Exception as e:
err_info = {'node': node.uuid, 'msg': msg, 'e': e}
last_error = _('Asynchronous exception for node %(node)s: '
'%(msg)s Exception: %(e)s') % err_info
LOG.exception(last_error)
if node.provision_state in (states.CLEANING, states.CLEANWAIT):
manager_utils.cleaning_error_handler(task, last_error)
elif node.provision_state in (states.DEPLOYING, states.DEPLOYWAIT):
deploy_utils.set_failed_state(task, last_error)
|
KeyError
|
dataset/ETHPy150Open openstack/ironic/ironic/drivers/modules/agent_base_vendor.py/BaseAgentVendor.heartbeat
|
5,820 |
def _get_interfaces(self, inventory):
interfaces = []
try:
interfaces = inventory['interfaces']
except (__HOLE__, TypeError):
raise exception.InvalidParameterValue(_(
'Malformed network interfaces lookup: %s') % inventory)
return interfaces
|
KeyError
|
dataset/ETHPy150Open openstack/ironic/ironic/drivers/modules/agent_base_vendor.py/BaseAgentVendor._get_interfaces
|
5,821 |
def _request(self, url, method, **kwargs):
if self.timeout is not None:
kwargs.setdefault('timeout', self.timeout)
kwargs.setdefault('headers', kwargs.get('headers', {}))
kwargs['headers']['User-Agent'] = self.user_agent
try:
if kwargs['body'] is 'json':
kwargs['headers']['Accept'] = 'application/json'
kwargs['headers']['Content-Type'] = 'application/json'
elif kwargs['body'] is 'xml':
kwargs['headers']['Accept'] = 'application/xml'
kwargs['headers']['Content-Type'] = 'application/xml'
elif kwargs['body'] is 'text':
kwargs['headers']['Accept'] = 'text/plain'
kwargs['headers']['Content-Type'] = 'text/plain'
elif kwargs['body'] is 'binary':
kwargs['headers']['Accept'] = 'application/octet-stream'
kwargs['headers']['Content-Type'] = 'application/octet-stream'
else:
raise exceptions.UnsupportedRequestType()
except KeyError:
# Default if body type is unspecified is text/plain
kwargs['headers']['Accept'] = 'text/plain'
kwargs['headers']['Content-Type'] = 'text/plain'
# Optionally verify if requested body type is supported
try:
if kwargs['body'] not in kwargs['supported_body_types']:
raise exceptions.UnsupportedBodyType()
else:
del kwargs['supported_body_types']
except __HOLE__:
pass
del kwargs['body']
self.log.debug("{0} URL: {1}{2} - {3}"
.format(method, self.endpoint, url, str(kwargs)))
resp = self.http.request(
method,
self.endpoint + url,
**kwargs)
if resp.text:
try:
if kwargs['headers']['Content-Type'] is 'application/json':
body = json.loads(resp.text)
elif kwargs['headers']['Content-Type'] is 'application/xml':
body = etree.XML(resp.text)
else:
body = resp.text
except ValueError:
body = None
else:
body = None
return resp, body
|
KeyError
|
dataset/ETHPy150Open dmsimard/python-cephclient/cephclient/client.py/CephClient._request
|
5,822 |
def log_wrapper(self):
"""
Wrapper to set logging parameters for output
"""
log = logging.getLogger('client.py')
# Set the log format and log level
try:
debug = self.params["debug"]
log.setLevel(logging.DEBUG)
except __HOLE__:
log.setLevel(logging.INFO)
# Set the log format.
stream = logging.StreamHandler()
logformat = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%b %d %H:%M:%S')
stream.setFormatter(logformat)
log.addHandler(stream)
return log
|
KeyError
|
dataset/ETHPy150Open dmsimard/python-cephclient/cephclient/client.py/CephClient.log_wrapper
|
5,823 |
def _clean (self, *filenames):
if not filenames:
filenames = self.temp_files
self.temp_files = []
log.info("removing: %s", string.join(filenames))
for filename in filenames:
try:
os.remove(filename)
except __HOLE__:
pass
# XXX these ignore the dry-run flag: what to do, what to do? even if
# you want a dry-run build, you still need some sort of configuration
# info. My inclination is to make it up to the real config command to
# consult 'dry_run', and assume a default (minimal) configuration if
# true. The problem with trying to do it here is that you'd have to
# return either true or false from all the 'try' methods, neither of
# which is correct.
# XXX need access to the header search path and maybe default macros.
|
OSError
|
dataset/ETHPy150Open babble/babble/include/jython/Lib/distutils/command/config.py/config._clean
|
5,824 |
def check(validator, value, *args, **kwargs):
"""Call a validator and return True if it's valid, False otherwise.
The first argument is the validator, the second a value. All other
arguments are forwarded to the validator function.
>>> check(is_valid_email, '[email protected]')
True
"""
try:
validator(*args, **kwargs)(None, value)
except __HOLE__:
return False
return True
|
ValidationError
|
dataset/ETHPy150Open IanLewis/kay/kay/utils/validators.py/check
|
5,825 |
@cacheit
def _simplify_delta(expr):
"""
Rewrite a KroneckerDelta's indices in its simplest form.
"""
from sympy.solvers import solve
if isinstance(expr, KroneckerDelta):
try:
slns = solve(expr.args[0] - expr.args[1], dict=True)
if slns and len(slns) == 1:
return Mul(*[KroneckerDelta(*(key, value))
for key, value in slns[0].items()])
except __HOLE__:
pass
return expr
|
NotImplementedError
|
dataset/ETHPy150Open sympy/sympy/sympy/concrete/delta.py/_simplify_delta
|
5,826 |
@cacheit
def deltaproduct(f, limit):
"""
Handle products containing a KroneckerDelta.
See Also
========
deltasummation
sympy.functions.special.tensor_functions.KroneckerDelta
sympy.concrete.products.product
"""
from sympy.concrete.products import product
if ((limit[2] - limit[1]) < 0) == True:
return S.One
if not f.has(KroneckerDelta):
return product(f, limit)
if f.is_Add:
# Identify the term in the Add that has a simple KroneckerDelta
delta = None
terms = []
for arg in sorted(f.args, key=default_sort_key):
if delta is None and _has_simple_delta(arg, limit[0]):
delta = arg
else:
terms.append(arg)
newexpr = f.func(*terms)
k = Dummy("kprime", integer=True)
if isinstance(limit[1], int) and isinstance(limit[2], int):
result = deltaproduct(newexpr, limit) + sum([
deltaproduct(newexpr, (limit[0], limit[1], ik - 1)) *
delta.subs(limit[0], ik) *
deltaproduct(newexpr, (limit[0], ik + 1, limit[2])) for ik in range(int(limit[1]), int(limit[2] + 1))]
)
else:
result = deltaproduct(newexpr, limit) + deltasummation(
deltaproduct(newexpr, (limit[0], limit[1], k - 1)) *
delta.subs(limit[0], k) *
deltaproduct(newexpr, (limit[0], k + 1, limit[2])),
(k, limit[1], limit[2]),
no_piecewise=_has_simple_delta(newexpr, limit[0])
)
return _remove_multiple_delta(result)
delta, _ = _extract_delta(f, limit[0])
if not delta:
g = _expand_delta(f, limit[0])
if f != g:
from sympy import factor
try:
return factor(deltaproduct(g, limit))
except __HOLE__:
return deltaproduct(g, limit)
return product(f, limit)
from sympy import Eq
c = Eq(limit[2], limit[1] - 1)
return _remove_multiple_delta(f.subs(limit[0], limit[1])*KroneckerDelta(limit[2], limit[1])) + \
S.One*_simplify_delta(KroneckerDelta(limit[2], limit[1] - 1))
|
AssertionError
|
dataset/ETHPy150Open sympy/sympy/sympy/concrete/delta.py/deltaproduct
|
5,827 |
def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None, **adapter_kwargs):
"""Receives a Response. Returns a generator of Responses."""
i = 0
hist = [] # keep track of history
while resp.is_redirect:
prepared_request = req.copy()
if i > 0:
# Update history and keep track of redirects.
hist.append(resp)
new_hist = list(hist)
resp.history = new_hist
try:
resp.content # Consume socket so it can be released
except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
resp.raw.read(decode_content=False)
if i >= self.max_redirects:
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
# Release the connection back into the pool.
resp.close()
url = resp.headers['location']
method = req.method
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(resp.url)
url = '%s:%s' % (parsed_rurl.scheme, url)
# The scheme should be lower case...
parsed = urlparse(url)
url = parsed.geturl()
# Facilitate relative 'location' headers, as allowed by RFC 7231.
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
# Compliant with RFC3986, we percent encode the url.
if not parsed.netloc:
url = urljoin(resp.url, requote_uri(url))
else:
url = requote_uri(url)
prepared_request.url = to_native_string(url)
# Cache the url, unless it redirects to itself.
if resp.is_permanent_redirect and req.url != prepared_request.url:
self.redirect_cache[req.url] = prepared_request.url
# http://tools.ietf.org/html/rfc7231#section-6.4.4
if (resp.status_code == codes.see_other and
method != 'HEAD'):
method = 'GET'
# Do what the browsers do, despite standards...
# First, turn 302s into GETs.
if resp.status_code == codes.found and method != 'HEAD':
method = 'GET'
# Second, if a POST is responded to with a 301, turn it into a GET.
# This bizarre behaviour is explained in Issue 1704.
if resp.status_code == codes.moved and method == 'POST':
method = 'GET'
prepared_request.method = method
# https://github.com/kennethreitz/requests/issues/1084
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
if 'Content-Length' in prepared_request.headers:
del prepared_request.headers['Content-Length']
prepared_request.body = None
headers = prepared_request.headers
try:
del headers['Cookie']
except __HOLE__:
pass
# Extract any cookies sent on the response to the cookiejar
# in the new request. Because we've mutated our copied prepared
# request, use the old one that we haven't yet touched.
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
prepared_request._cookies.update(self.cookies)
prepared_request.prepare_cookies(prepared_request._cookies)
# Rebuild auth and proxy information.
proxies = self.rebuild_proxies(prepared_request, proxies)
self.rebuild_auth(prepared_request, resp)
# Override the original request.
req = prepared_request
resp = self.send(
req,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
allow_redirects=False,
**adapter_kwargs
)
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
i += 1
yield resp
|
KeyError
|
dataset/ETHPy150Open BergWerkGIS/QGIS-CKAN-Browser/CKAN-Browser/request/sessions.py/SessionRedirectMixin.resolve_redirects
|
5,828 |
def rebuild_proxies(self, prepared_request, proxies):
"""
This method re-evaluates the proxy configuration by considering the
environment variables. If we are redirected to a URL covered by
NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
proxy keys for this URL (in case they were stripped by a previous
redirect).
This method also replaces the Proxy-Authorization header where
necessary.
"""
headers = prepared_request.headers
url = prepared_request.url
scheme = urlparse(url).scheme
new_proxies = proxies.copy() if proxies is not None else {}
if self.trust_env and not should_bypass_proxies(url):
environ_proxies = get_environ_proxies(url)
proxy = environ_proxies.get(scheme)
if proxy:
new_proxies.setdefault(scheme, environ_proxies[scheme])
if 'Proxy-Authorization' in headers:
del headers['Proxy-Authorization']
try:
username, password = get_auth_from_url(new_proxies[scheme])
except __HOLE__:
username, password = None, None
if username and password:
headers['Proxy-Authorization'] = _basic_auth_str(username, password)
return new_proxies
|
KeyError
|
dataset/ETHPy150Open BergWerkGIS/QGIS-CKAN-Browser/CKAN-Browser/request/sessions.py/SessionRedirectMixin.rebuild_proxies
|
5,829 |
def ask_key():
if Terminal is None:
try:
value = raw_input(
"Type the decryption key, "
"without spaces or other special characters: ")
return value.strip().decode('hex')
except __HOLE__:
return None
sys.__stdin__ = sys.stdin
t = Terminal()
width = 64
lwidth = 16
movement = {
'KEY_LEFT': -1,
'KEY_RIGHT': 1,
'KEY_UP': -lwidth,
'KEY_DOWN': lwidth,
'KEY_HOME': -width,
'KEY_END': width,
'KEY_BACKSPACE': -1
}
value = [" "] * width
rows = ["B", "C", "D", "E"]
def valid():
return " " not in value
with t.cbreak():
val = None
validvalue = None
pos = 0
nrows = width/lwidth
print "Type the decryption key, or press 'q' to cancel"
while val not in (u'q', u'Q',):
for i in range(width/lwidth):
s = lwidth / 2
print rows[i],
print t.underline("".join(value[2*i*s:(2*i+1)*s])),
print t.underline("".join(value[(2*i+1)*s:(2*i+2)*s]))
if valid():
print "key valid, press enter to accept"
else:
print t.clear_eol
if validvalue is not None:
break
sys.stdout.write(t.move_up * (nrows+1))
with t.location():
y = pos/lwidth
x = pos % lwidth
sys.stdout.write(t.move_down * y)
sys.stdout.write(t.move_right * (x + 2 + (2*x)/lwidth))
sys.stdout.flush()
val = t.inkey(timeout=5)
if not val:
pass
elif val.is_sequence:
if val.name in movement:
newpos = pos + movement[val.name]
pos = min(width-1, max(0, newpos))
if val.name in ('KEY_DELETE', 'KEY_BACKSPACE'):
value[pos:width] = value[pos+1:width] + [" "]
elif val.name is 'KEY_ENTER' and valid():
validvalue = value
elif val.lower() in "0123456789abcdef":
if pos < width:
value[pos] = val.upper()
if pos < width - 1:
pos += 1
if validvalue is not None:
return ("".join(validvalue)).decode('hex')
return validvalue
|
TypeError
|
dataset/ETHPy150Open longaccess/longaccess-client/lacli/certinput.py/ask_key
|
5,830 |
def _inTxn(func):
def wrapped(self, *args, **kwargs):
return self._callInTransaction(func, self, *args, **kwargs)
if hasattr(func, '__name__'):
try:
wrapped.__name__ = func.__name__[4:]
except __HOLE__:
pass
if hasattr(func, '__doc__'):
wrapped.__doc__ = func.__doc__
return wrapped
|
TypeError
|
dataset/ETHPy150Open CollabQ/CollabQ/openid/store/sqlstore.py/_inTxn
|
5,831 |
def _getSQL(self, sql_name):
try:
return self._statement_cache[sql_name]
except __HOLE__:
sql = getattr(self, sql_name)
sql %= self._table_names
self._statement_cache[sql_name] = sql
return sql
|
KeyError
|
dataset/ETHPy150Open CollabQ/CollabQ/openid/store/sqlstore.py/SQLStore._getSQL
|
5,832 |
def blobEncode(self, blob):
try:
from psycopg2 import Binary
except __HOLE__:
from psycopg import Binary
return Binary(blob)
|
ImportError
|
dataset/ETHPy150Open CollabQ/CollabQ/openid/store/sqlstore.py/PostgreSQLStore.blobEncode
|
5,833 |
def get(self, orig_key):
"""Get cache entry for key, or return None."""
resp = requests.Response()
key = self._clean_key(orig_key)
path = os.path.join(self.cache_dir, key)
try:
with open(path, 'rb') as f:
# read lines one at a time
while True:
line = f.readline().decode('utf8').strip('\r\n')
# set headers
if self.check_last_modified and re.search("last-modified", line, flags=re.I):
# line contains last modified header
head_resp = requests.head(orig_key)
try:
new_lm = head_resp.headers['last-modified']
old_lm = line[string.find(line, ':') + 1:].strip()
if old_lm != new_lm:
# last modified timestamps don't match, need to download again
return None
except KeyError:
# no last modified header present, so redownload
return None
header = self._header_re.match(line)
if header:
resp.headers[header.group(1)] = header.group(2)
else:
break
# everything left is the real content
resp._content = f.read()
# status & encoding will be in headers, but are faked
# need to split spaces out of status to get code (e.g. '200 OK')
resp.status_code = int(resp.headers.pop('status').split(' ')[0])
resp.encoding = resp.headers.pop('encoding')
resp.url = resp.headers.get('content-location', orig_key)
# TODO: resp.request = request
return resp
except __HOLE__:
return None
|
IOError
|
dataset/ETHPy150Open jamesturk/scrapelib/scrapelib/cache.py/FileCache.get
|
5,834 |
def load_marathon_config(path=PATH_TO_MARATHON_CONFIG):
try:
with open(path) as f:
return MarathonConfig(json.load(f), path)
except __HOLE__ as e:
raise PaastaNotConfiguredError("Could not load marathon config file %s: %s" % (e.filename, e.strerror))
|
IOError
|
dataset/ETHPy150Open Yelp/paasta/paasta_tools/marathon_tools.py/load_marathon_config
|
5,835 |
def get_url(self):
"""Get the Marathon API url
:returns: The Marathon API endpoint"""
try:
return self['url']
except __HOLE__:
raise MarathonNotConfigured('Could not find marathon url in system marathon config: %s' % self.path)
|
KeyError
|
dataset/ETHPy150Open Yelp/paasta/paasta_tools/marathon_tools.py/MarathonConfig.get_url
|
5,836 |
def get_username(self):
"""Get the Marathon API username
:returns: The Marathon API username"""
try:
return self['user']
except __HOLE__:
raise MarathonNotConfigured('Could not find marathon user in system marathon config: %s' % self.path)
|
KeyError
|
dataset/ETHPy150Open Yelp/paasta/paasta_tools/marathon_tools.py/MarathonConfig.get_username
|
5,837 |
def get_password(self):
"""Get the Marathon API password
:returns: The Marathon API password"""
try:
return self['password']
except __HOLE__:
raise MarathonNotConfigured('Could not find marathon password in system marathon config: %s' % self.path)
|
KeyError
|
dataset/ETHPy150Open Yelp/paasta/paasta_tools/marathon_tools.py/MarathonConfig.get_password
|
5,838 |
def get_marathon_services_running_here_for_nerve(cluster, soa_dir):
if not cluster:
try:
cluster = load_system_paasta_config().get_cluster()
# In the cases where there is *no* cluster or in the case
# where there isn't a Paasta configuration file at *all*, then
# there must be no marathon services running here, so we catch
# these custom exceptions and return [].
except (PaastaNotConfiguredError):
return []
# When a cluster is defined in mesos, let's iterate through marathon services
marathon_services = marathon_services_running_here()
nerve_list = []
for name, instance, port in marathon_services:
try:
namespace = read_namespace_for_service_instance(name, instance, cluster, soa_dir)
nerve_dict = load_service_namespace_config(name, namespace, soa_dir)
if not nerve_dict.is_in_smartstack():
continue
nerve_dict['port'] = port
nerve_name = compose_job_id(name, namespace)
nerve_list.append((nerve_name, nerve_dict))
except __HOLE__:
continue # SOA configs got deleted for this app, it'll get cleaned up
return nerve_list
|
KeyError
|
dataset/ETHPy150Open Yelp/paasta/paasta_tools/marathon_tools.py/get_marathon_services_running_here_for_nerve
|
5,839 |
def register_service(name, regtype, port):
def register_callback(sdRef, flags, errorCode, name, regtype, domain):
if errorCode == pybonjour.kDNSServiceErr_NoError:
logger.debug('Registered bonjour service %s.%s', name, regtype)
record = pybonjour.TXTRecord(appletv.DEVICE_INFO)
service = pybonjour.DNSServiceRegister(name = name,
regtype = regtype,
port = port,
txtRecord = record,
callBack = register_callback)
try:
try:
while True:
ready = select.select([service], [], [])
if service in ready[0]:
pybonjour.DNSServiceProcessResult(service)
except __HOLE__:
pass
finally:
service.close()
|
KeyboardInterrupt
|
dataset/ETHPy150Open pascalw/Airplayer/airplayer/bonjour.py/register_service
|
5,840 |
def send_message(self, message):
headers = self._headers.copy()
headers['Content-Length'] = len(message)
self._setup_opener()
request = Request(self.endpoint, data=message, headers=headers)
try:
# install_opener is ignored in > 2.7.9 when an SSLContext is passed to urlopen, so
# we'll have to call open manually with our stored opener chain
response = self.opener.open(request, timeout=self.timeout)
# Version 1.1 of WinRM adds the namespaces in the document instead
# of the envelope so we have to
# add them ourselves here. This should have no affect version 2.
response_text = response.read()
return response_text
# doc = ElementTree.fromstring(response.read())
# Ruby
# doc = Nokogiri::XML(resp.http_body.content)
# doc.collect_namespaces.each_pair do |k,v|
# doc.root.add_namespace((k.split(/:/).last),v)
# unless doc.namespaces.has_key?(k)
# end
# return doc
# return doc
except __HOLE__ as ex:
if ex.code == 401:
raise UnauthorizedError(transport='plaintext', message=ex.msg)
response_text = ex.read()
# Per http://msdn.microsoft.com/en-us/library/cc251676.aspx rule 3,
# should handle this 500 error and retry receiving command output.
if 'http://schemas.microsoft.com/wbem/wsman/1/windows/shell/Receive' in message and 'Code="2150858793"' in response_text: # NOQA
# TODO raise TimeoutError here instead of just return text
return response_text
error_message = 'Bad HTTP response returned from server. ' \
' Code {0}'.format(ex.code)
if ex.msg:
error_message += ', {0}'.format(ex.msg)
raise WinRMTransportError('http', error_message)
except URLError as ex:
raise WinRMTransportError('http', ex.reason)
|
HTTPError
|
dataset/ETHPy150Open diyan/pywinrm/winrm/transport.py/HttpPlaintext.send_message
|
5,841 |
def send_message(self, message):
# TODO current implementation does negotiation on each HTTP request
# which is not efficient
# TODO support kerberos session with message encryption
krb_ticket = KerberosTicket(self.krb_service)
headers = {'Authorization': krb_ticket.auth_header,
'Connection': 'Keep-Alive',
'Content-Type': 'application/soap+xml;charset=UTF-8',
'User-Agent': 'Python WinRM client'}
request = Request(self.endpoint, data=message, headers=headers)
try:
urlopen_kwargs = dict(timeout=self.timeout)
# it's an error to pass context to non-SSLContext aware urlopen (pre-2.7.9), so don't...
if(self.scheme=='https' and self.sslcontext):
urlopen_kwargs['context'] = self.sslcontext
response = urlopen(request, **urlopen_kwargs)
krb_ticket.verify_response(response.headers['WWW-Authenticate'])
response_text = response.read()
return response_text
except __HOLE__ as ex:
response_text = ex.read()
# Per http://msdn.microsoft.com/en-us/library/cc251676.aspx rule 3,
# should handle this 500 error and retry receiving command output.
if 'http://schemas.microsoft.com/wbem/wsman/1/windows/shell/Receive' in message and 'Code="2150858793"' in response_text: # NOQA
return response_text
# if ex.code == 401 and ex.headers['WWW-Authenticate'] == \
# 'Negotiate, Basic realm="WSMAN"':
error_message = 'Kerberos-based authentication was failed. ' \
'Code {0}'.format(ex.code)
if ex.msg:
error_message += ', {0}'.format(ex.msg)
raise WinRMTransportError('kerberos', error_message)
except URLError as ex:
raise WinRMTransportError('kerberos', ex.reason)
|
HTTPError
|
dataset/ETHPy150Open diyan/pywinrm/winrm/transport.py/HttpKerberos.send_message
|
5,842 |
def objGetChildren(self, obj):
"""Return dictionary with attributes or contents of object."""
# print 'objGetChildren ', obj
otype = type(obj)
d = {}
if (obj is None or obj is False or obj is True):
return d
self.ntop = 0
if isinstance(obj, SymbolTable) or isinstance(obj, Group):
d = obj._members()
if isinstance(obj, COMMONTYPES):
d = obj
elif isinstance(obj, h5py.Group):
try:
for key, val in obj.items():
d[key] = val
except __HOLE__:
pass
elif isinstance(obj, h5py.Dataset):
d = obj
elif isinstance(obj, (list, tuple)):
for n in range(len(obj)):
key = '[' + str(n) + ']'
d[key] = obj[n]
elif (not isinstance(obj, wx.Object)
and not hasattr(obj, '__call__')):
d = self.GetAttr(obj)
return d
|
AttributeError
|
dataset/ETHPy150Open xraypy/xraylarch/lib/wxlib/larchfilling.py/FillingTree.objGetChildren
|
5,843 |
def get_installed(method_filter=None):
if method_filter is None:
method_filter = ["max-product", 'ad3', 'qpbo', 'ogm', 'lp']
installed = []
unary = np.zeros((1, 1))
pw = np.zeros((1, 1))
edges = np.empty((0, 2), dtype=np.int)
for method in method_filter:
try:
inference_dispatch(unary, pw, edges, inference_method=method)
installed.append(method)
except __HOLE__:
pass
return installed
|
ImportError
|
dataset/ETHPy150Open pystruct/pystruct/pystruct/inference/inference_methods.py/get_installed
|
5,844 |
def get_extractor(coarse, fine):
log.debug("getting coarse extractor for '{}'".format(coarse))
# http://stackoverflow.com/questions/301134/dynamic-module-import-in-python
try:
coarse_extractor = importlib.import_module(__package__+'.'+question_types[coarse])
except (ImportError, __HOLE__):
log.warn("Extractor for coarse type '{}' not implemented".format(coarse))
raise NoExtractorError(coarse)
return coarse_extractor.get_extractor(coarse, fine)
|
KeyError
|
dataset/ETHPy150Open jcelliott/inquire/inquire/extraction/extractors.py/get_extractor
|
5,845 |
def parse_graminit_h(self, filename):
"""Parse the .h file written by pgen. (Internal)
This file is a sequence of #define statements defining the
nonterminals of the grammar as numbers. We build two tables
mapping the numbers to names and back.
"""
try:
f = open(filename)
except __HOLE__, err:
print "Can't open %s: %s" % (filename, err)
return False
self.symbol2number = {}
self.number2symbol = {}
lineno = 0
for line in f:
lineno += 1
mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line)
if not mo and line.strip():
print "%s(%s): can't parse %s" % (filename, lineno,
line.strip())
else:
symbol, number = mo.groups()
number = int(number)
assert symbol not in self.symbol2number
assert number not in self.number2symbol
self.symbol2number[symbol] = number
self.number2symbol[number] = symbol
return True
|
IOError
|
dataset/ETHPy150Open ctxis/canape/CANAPE.Scripting/Lib/lib2to3/pgen2/conv.py/Converter.parse_graminit_h
|
5,846 |
def parse_graminit_c(self, filename):
"""Parse the .c file written by pgen. (Internal)
The file looks as follows. The first two lines are always this:
#include "pgenheaders.h"
#include "grammar.h"
After that come four blocks:
1) one or more state definitions
2) a table defining dfas
3) a table defining labels
4) a struct defining the grammar
A state definition has the following form:
- one or more arc arrays, each of the form:
static arc arcs_<n>_<m>[<k>] = {
{<i>, <j>},
...
};
- followed by a state array, of the form:
static state states_<s>[<t>] = {
{<k>, arcs_<n>_<m>},
...
};
"""
try:
f = open(filename)
except __HOLE__, err:
print "Can't open %s: %s" % (filename, err)
return False
# The code below essentially uses f's iterator-ness!
lineno = 0
# Expect the two #include lines
lineno, line = lineno+1, f.next()
assert line == '#include "pgenheaders.h"\n', (lineno, line)
lineno, line = lineno+1, f.next()
assert line == '#include "grammar.h"\n', (lineno, line)
# Parse the state definitions
lineno, line = lineno+1, f.next()
allarcs = {}
states = []
while line.startswith("static arc "):
while line.startswith("static arc "):
mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$",
line)
assert mo, (lineno, line)
n, m, k = map(int, mo.groups())
arcs = []
for _ in range(k):
lineno, line = lineno+1, f.next()
mo = re.match(r"\s+{(\d+), (\d+)},$", line)
assert mo, (lineno, line)
i, j = map(int, mo.groups())
arcs.append((i, j))
lineno, line = lineno+1, f.next()
assert line == "};\n", (lineno, line)
allarcs[(n, m)] = arcs
lineno, line = lineno+1, f.next()
mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line)
assert mo, (lineno, line)
s, t = map(int, mo.groups())
assert s == len(states), (lineno, line)
state = []
for _ in range(t):
lineno, line = lineno+1, f.next()
mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line)
assert mo, (lineno, line)
k, n, m = map(int, mo.groups())
arcs = allarcs[n, m]
assert k == len(arcs), (lineno, line)
state.append(arcs)
states.append(state)
lineno, line = lineno+1, f.next()
assert line == "};\n", (lineno, line)
lineno, line = lineno+1, f.next()
self.states = states
# Parse the dfas
dfas = {}
mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line)
assert mo, (lineno, line)
ndfas = int(mo.group(1))
for i in range(ndfas):
lineno, line = lineno+1, f.next()
mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$',
line)
assert mo, (lineno, line)
symbol = mo.group(2)
number, x, y, z = map(int, mo.group(1, 3, 4, 5))
assert self.symbol2number[symbol] == number, (lineno, line)
assert self.number2symbol[number] == symbol, (lineno, line)
assert x == 0, (lineno, line)
state = states[z]
assert y == len(state), (lineno, line)
lineno, line = lineno+1, f.next()
mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line)
assert mo, (lineno, line)
first = {}
rawbitset = eval(mo.group(1))
for i, c in enumerate(rawbitset):
byte = ord(c)
for j in range(8):
if byte & (1<<j):
first[i*8 + j] = 1
dfas[number] = (state, first)
lineno, line = lineno+1, f.next()
assert line == "};\n", (lineno, line)
self.dfas = dfas
# Parse the labels
labels = []
lineno, line = lineno+1, f.next()
mo = re.match(r"static label labels\[(\d+)\] = {$", line)
assert mo, (lineno, line)
nlabels = int(mo.group(1))
for i in range(nlabels):
lineno, line = lineno+1, f.next()
mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line)
assert mo, (lineno, line)
x, y = mo.groups()
x = int(x)
if y == "0":
y = None
else:
y = eval(y)
labels.append((x, y))
lineno, line = lineno+1, f.next()
assert line == "};\n", (lineno, line)
self.labels = labels
# Parse the grammar struct
lineno, line = lineno+1, f.next()
assert line == "grammar _PyParser_Grammar = {\n", (lineno, line)
lineno, line = lineno+1, f.next()
mo = re.match(r"\s+(\d+),$", line)
assert mo, (lineno, line)
ndfas = int(mo.group(1))
assert ndfas == len(self.dfas)
lineno, line = lineno+1, f.next()
assert line == "\tdfas,\n", (lineno, line)
lineno, line = lineno+1, f.next()
mo = re.match(r"\s+{(\d+), labels},$", line)
assert mo, (lineno, line)
nlabels = int(mo.group(1))
assert nlabels == len(self.labels), (lineno, line)
lineno, line = lineno+1, f.next()
mo = re.match(r"\s+(\d+)$", line)
assert mo, (lineno, line)
start = int(mo.group(1))
assert start in self.number2symbol, (lineno, line)
self.start = start
lineno, line = lineno+1, f.next()
assert line == "};\n", (lineno, line)
try:
lineno, line = lineno+1, f.next()
except StopIteration:
pass
else:
assert 0, (lineno, line)
|
IOError
|
dataset/ETHPy150Open ctxis/canape/CANAPE.Scripting/Lib/lib2to3/pgen2/conv.py/Converter.parse_graminit_c
|
5,847 |
def unexpose(self, name):
"""Removes an exposed method
@param name: (string)
@return None
"""
#check the method dictionary first
if name in self.exposedMethods:
del self.exposedMethods[name]
info = None #locally scoped
try: #make sure the documentation is up to date
for info in self.exposedMethodInfo:
if info[0] != name: continue
raise StopIteration('Found Method')
except __HOLE__:
self.exposedMethodInfo.remove(info)
|
StopIteration
|
dataset/ETHPy150Open OrbitzWorldwide/droned/droned/lib/droned/models/action.py/AdminAction.unexpose
|
5,848 |
@classmethod
def parse_ansi_colors(cls, source):
# note: strips all control sequences, even if not SGRs.
colors = []
plain = ''
last = 0
curclr = 0
for match in ansi_seq.finditer(source):
prevsegment = source[last:match.start()]
plain += prevsegment
colors.extend([curclr] * len(prevsegment))
if match.group('final') == ansi_cmd_SGR:
try:
curclr = cls.parse_sgr_param(curclr, match.group('params'))
except __HOLE__:
pass
last = match.end()
prevsegment = source[last:]
plain += prevsegment
colors.extend([curclr] * len(prevsegment))
return ''.join(plain), colors
|
ValueError
|
dataset/ETHPy150Open francelabs/datafari/cassandra/pylib/cqlshlib/test/ansi_colors.py/ColoredText.parse_ansi_colors
|
5,849 |
def __str__(self):
if 'formadmin' in settings.INSTALLED_APPS:
from formadmin.forms import as_django_admin
try:
return as_django_admin(self)
except __HOLE__:
pass
return super(UserSuForm, self).__str__()
|
ImportError
|
dataset/ETHPy150Open adamcharnock/django-su/django_su/forms.py/UserSuForm.__str__
|
5,850 |
def parse_revision_spec(self, revisions=[]):
"""Parses the given revision spec.
The 'revisions' argument is a list of revisions as specified by the
user. Items in the list do not necessarily represent a single revision,
since the user can use SCM-native syntaxes such as "r1..r2" or "r1:r2".
SCMTool-specific overrides of this method are expected to deal with
such syntaxes.
This will return a dictionary with the following keys:
'base': A revision to use as the base of the resulting diff.
'tip': A revision to use as the tip of the resulting diff.
These will be used to generate the diffs to upload to Review Board (or
print). The diff for review will include the changes in (base, tip].
If a single revision is passed in, this will return the parent of that
revision for 'base' and the passed-in revision for 'tip'.
If zero revisions are passed in, this will return the most recently
checked-out revision for 'base' and a special string indicating the
working copy for 'tip'.
The SVN SCMClient never fills in the 'parent_base' key. Users who are
using other patch-stack tools who want to use parent diffs with SVN
will have to generate their diffs by hand.
"""
n_revisions = len(revisions)
if n_revisions == 1 and ':' in revisions[0]:
revisions = revisions[0].split(':')
n_revisions = len(revisions)
if n_revisions == 0:
# Most recent checked-out revision -- working copy
# TODO: this should warn about mixed-revision working copies that
# affect the list of files changed (see bug 2392).
return {
'base': 'BASE',
'tip': self.REVISION_WORKING_COPY,
}
elif n_revisions == 1:
# Either a numeric revision (n-1:n) or a changelist
revision = revisions[0]
try:
revision = self._convert_symbolic_revision(revision)
return {
'base': revision - 1,
'tip': revision,
}
except __HOLE__:
# It's not a revision--let's try a changelist. This only makes
# sense if we have a working copy.
if not self.options.repository_url:
status = self._run_svn(['status', '--cl', str(revision),
'--ignore-externals', '--xml'],
results_unicode=False)
cl = ElementTree.fromstring(status).find('changelist')
if cl is not None:
# TODO: this should warn about mixed-revision working
# copies that affect the list of files changed (see
# bug 2392).
return {
'base': 'BASE',
'tip': self.REVISION_CHANGELIST_PREFIX + revision
}
raise InvalidRevisionSpecError(
'"%s" does not appear to be a valid revision or '
'changelist name' % revision)
elif n_revisions == 2:
# Diff between two numeric revisions
try:
return {
'base': self._convert_symbolic_revision(revisions[0]),
'tip': self._convert_symbolic_revision(revisions[1]),
}
except ValueError:
raise InvalidRevisionSpecError(
'Could not parse specified revisions: %s' % revisions)
else:
raise TooManyRevisionsError
|
ValueError
|
dataset/ETHPy150Open reviewboard/rbtools/rbtools/clients/svn.py/SVNClient.parse_revision_spec
|
5,851 |
def _convert_symbolic_revision(self, revision):
command = ['-r', six.text_type(revision), '-l', '1']
if getattr(self.options, 'repository_url', None):
command.append(self.options.repository_url)
log = self.svn_log_xml(command)
if log is not None:
try:
root = ElementTree.fromstring(log)
except __HOLE__ as e:
# _convert_symbolic_revision() nominally raises a ValueError to
# indicate any failure to determine the revision number from
# the log entry. Here, we explicitly catch a ValueError from
# ElementTree and raise a generic SCMError so that this
# specific failure to parse the XML log output is
# differentiated from the nominal case.
raise SCMError('Failed to parse svn log - %s.' % e)
logentry = root.find('logentry')
if logentry is not None:
return int(logentry.attrib['revision'])
raise ValueError
|
ValueError
|
dataset/ETHPy150Open reviewboard/rbtools/rbtools/clients/svn.py/SVNClient._convert_symbolic_revision
|
5,852 |
def history_scheduled_with_commit(self, changelist, include_files,
exclude_patterns):
""" Method to find if any file status has '+' in 4th column"""
status_cmd = ['status', '-q', '--ignore-externals']
if changelist:
status_cmd.extend(['--changelist', changelist])
if include_files:
status_cmd.extend(include_files)
for p in self._run_svn(status_cmd, split_lines=True,
results_unicode=False):
try:
if p[3] == b'+':
if exclude_patterns:
# We found a file with history, but first we must make
# sure that it is not being excluded.
filename = p[8:].rstrip()
should_exclude = filename_match_any_patterns(
filename,
exclude_patterns,
self.get_repository_info().base_path)
if not should_exclude:
return True
else:
return True
except __HOLE__:
# This may be some other output, or just doesn't have the
# data we're looking for. Move along.
pass
return False
|
IndexError
|
dataset/ETHPy150Open reviewboard/rbtools/rbtools/clients/svn.py/SVNClient.history_scheduled_with_commit
|
5,853 |
def apply_patch(self, patch_file, base_path, base_dir, p=None,
revert=False):
"""Apply the patch and return a PatchResult indicating its success."""
if not is_valid_version(self.subversion_client_version,
self.PATCH_MIN_VERSION):
raise MinimumVersionError(
'Using "rbt patch" with the SVN backend requires at least '
'svn 1.7.0')
if base_dir and not base_dir.startswith(base_path):
# The patch was created in either a higher level directory or a
# directory not under this one. We should exclude files from the
# patch that are not under this directory.
excluded, empty = self._exclude_files_not_in_tree(patch_file,
base_path)
if excluded:
logging.warn('This patch was generated in a different '
'directory. To prevent conflicts, all files '
'not under the current directory have been '
'excluded. To apply all files in this '
'patch, apply this patch from the %s directory.'
% base_dir)
if empty:
logging.warn('All files were excluded from the patch.')
cmd = ['patch']
p_num = p or self._get_p_number(base_path, base_dir)
if p_num >= 0:
cmd.append('--strip=%s' % p_num)
if revert:
cmd.append('--reverse-diff')
cmd.append(six.text_type(patch_file))
rc, patch_output = self._run_svn(cmd, return_error_code=True)
if self.supports_empty_files():
try:
with open(patch_file, 'rb') as f:
patch = f.read()
except __HOLE__ as e:
logging.error('Unable to read file %s: %s', patch_file, e)
return
self.apply_patch_for_empty_files(patch, p_num, revert=revert)
# TODO: What is svn's equivalent of a garbage patch message?
return PatchResult(applied=(rc == 0), patch_output=patch_output)
|
IOError
|
dataset/ETHPy150Open reviewboard/rbtools/rbtools/clients/svn.py/SVNClient.apply_patch
|
5,854 |
def trim_dict(
data,
max_dict_bytes,
percent=50.0,
stepper_size=10,
replace_with='VALUE_TRIMMED',
is_msgpacked=False,
use_bin_type=False):
'''
Takes a dictionary and iterates over its keys, looking for
large values and replacing them with a trimmed string.
If after the first pass over dictionary keys, the dictionary
is not sufficiently small, the stepper_size will be increased
and the dictionary will be rescanned. This allows for progressive
scanning, removing large items first and only making additional
passes for smaller items if necessary.
This function uses msgpack to calculate the size of the dictionary
in question. While this might seem like unnecessary overhead, a
data structure in python must be serialized in order for sys.getsizeof()
to accurately return the items referenced in the structure.
Ex:
>>> salt.utils.trim_dict({'a': 'b', 'c': 'x' * 10000}, 100)
{'a': 'b', 'c': 'VALUE_TRIMMED'}
To improve performance, it is adviseable to pass in msgpacked
data structures instead of raw dictionaries. If a msgpack
structure is passed in, it will not be unserialized unless
necessary.
If a msgpack is passed in, it will be repacked if necessary
before being returned.
:param use_bin_type: Set this to true if "is_msgpacked=True"
and the msgpack data has been encoded
with "use_bin_type=True". This also means
that the msgpack data should be decoded with
"encoding='utf-8'".
'''
serializer = salt.payload.Serial({'serial': 'msgpack'})
if is_msgpacked:
dict_size = sys.getsizeof(data)
else:
dict_size = sys.getsizeof(serializer.dumps(data))
if dict_size > max_dict_bytes:
if is_msgpacked:
if use_bin_type:
data = serializer.loads(data, encoding='utf-8')
else:
data = serializer.loads(data)
while True:
percent = float(percent)
max_val_size = float(max_dict_bytes * (percent / 100))
try:
for key in data:
if sys.getsizeof(data[key]) > max_val_size:
data[key] = replace_with
percent = percent - stepper_size
max_val_size = float(max_dict_bytes * (percent / 100))
if use_bin_type:
dump_data = serializer.dumps(data, use_bin_type=True)
else:
dump_data = serializer.dumps(data)
cur_dict_size = sys.getsizeof(dump_data)
if cur_dict_size < max_dict_bytes:
if is_msgpacked: # Repack it
return dump_data
else:
return data
elif max_val_size == 0:
if is_msgpacked:
return dump_data
else:
return data
except __HOLE__:
pass
if is_msgpacked:
if use_bin_type:
return serializer.dumps(data, use_bin_type=True)
else:
return serializer.dumps(data)
else:
return data
else:
return data
|
ValueError
|
dataset/ETHPy150Open saltstack/salt/salt/utils/dicttrim.py/trim_dict
|
5,855 |
def update_raid_info(node, raid_config):
"""Update the node's information based on the RAID config.
This method updates the node's information to make use of the configured
RAID for scheduling purposes (through properties['capabilities'] and
properties['local_gb']) and deploying purposes (using
properties['root_device']).
:param node: a node object
:param raid_config: The dictionary containing the current RAID
configuration.
:raises: InvalidParameterValue, if 'raid_config' has more than
one root volume or if node.properties['capabilities'] is malformed.
"""
current = raid_config.copy()
current['last_updated'] = str(datetime.datetime.utcnow())
node.raid_config = current
# Current RAID configuration can have 0 or 1 root volumes. If there
# are > 1 root volumes, then it's invalid. We check for this condition
# while accepting target RAID configuration, but this check is just in
# place, if some drivers pass > 1 root volumes to this method.
root_logical_disk = _check_and_return_root_volumes(raid_config)
if root_logical_disk:
# Update local_gb and root_device_hint
properties = node.properties
properties['local_gb'] = root_logical_disk['size_gb']
try:
properties['root_device'] = (
root_logical_disk['root_device_hint'])
except __HOLE__:
pass
properties['capabilities'] = utils.get_updated_capabilities(
properties.get('capabilities', ''),
{'raid_level': root_logical_disk['raid_level']})
node.properties = properties
node.save()
|
KeyError
|
dataset/ETHPy150Open openstack/ironic/ironic/common/raid.py/update_raid_info
|
5,856 |
def get_job_url( self, str_job_name ):
try:
job_dict = self.get_job_dict()
return job_dict[ str_job_name ]
except __HOLE__:
#noinspection PyUnboundLocalVariable
all_views = ", ".join( job_dict.keys() )
raise KeyError("Job %s is not known - available: %s" % ( str_job_name, all_views ) )
|
KeyError
|
dataset/ETHPy150Open ramonvanalteren/jenkinsapi/jenkinsapi/view.py/View.get_job_url
|
5,857 |
@property
def docusign_parser(self):
"""Parser for DocuSign's request.
This is a shortcut property using a cache.
If you want to adapt the implementation, consider overriding
:meth:`get_docusign_parser`.
"""
try:
return self._docusign_parser
except __HOLE__:
self._docusign_parser = self.get_docusign_parser()
return self._docusign_parser
|
AttributeError
|
dataset/ETHPy150Open novafloss/django-docusign/django_docusign/views.py/SignatureCallbackView.docusign_parser
|
5,858 |
@property
def signature(self):
"""Signature model instance.
This is a shortcut property using a cache.
If you want to adapt the implementation, consider overriding
:meth:`get_signature`.
"""
try:
return self._signature
except __HOLE__:
self._signature = self.get_signature()
return self._signature
|
AttributeError
|
dataset/ETHPy150Open novafloss/django-docusign/django_docusign/views.py/SignatureCallbackView.signature
|
5,859 |
@property
def signature_backend(self):
"""Signature backend instance.
This is a shortcut property using a cache.
If you want to adapt the implementation, consider overriding
:meth:`get_signature_backend`.
"""
try:
return self._signature_backend
except __HOLE__:
self._signature_backend = self.get_signature_backend()
return self._signature_backend
|
AttributeError
|
dataset/ETHPy150Open novafloss/django-docusign/django_docusign/views.py/SignatureCallbackView.signature_backend
|
5,860 |
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_FORM_DATA, NOT_LOGGED_IN,
PERMISSION_DENIED, TOKEN_GENERATION_FAILED)
@webapi_request_fields(
required={
'note': {
'type': six.text_type,
'description': 'The note explaining the purpose of '
'this token.',
},
'policy': {
'type': six.text_type,
'description': 'The token access policy, encoded as a '
'JSON string.',
},
},
allow_unknown=True
)
def create(self, request, note, policy, extra_fields={},
local_site_name=None, *args, **kwargs):
"""Registers a new API token.
The token value be generated and returned in the payload.
Callers are expected to provide a note and a policy.
Note that this may, in theory, fail due to too many token collisions.
If that happens, please re-try the request.
"""
try:
user = resources.user.get_object(request, *args, **kwargs)
except __HOLE__:
return DOES_NOT_EXIST
if not self.has_list_access_permissions(request, *args, **kwargs):
return self.get_no_access_error(request)
try:
self._validate_policy(policy)
except ValueError as e:
return INVALID_FORM_DATA, {
'fields': {
'policy': six.text_type(e),
},
}
local_site = self._get_local_site(local_site_name)
try:
token = WebAPIToken.objects.generate_token(user,
note=note,
policy=policy,
local_site=local_site)
except WebAPITokenGenerationError as e:
return TOKEN_GENERATION_FAILED.with_message(six.text_type(e))
if extra_fields:
self.import_extra_data(token, token.extra_data, extra_fields)
token.save()
return 201, {
self.item_result_key: token,
}
|
ObjectDoesNotExist
|
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/webapi/resources/api_token.py/APITokenResource.create
|
5,861 |
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_FORM_DATA, NOT_LOGGED_IN,
PERMISSION_DENIED)
@webapi_request_fields(
optional={
'note': {
'type': six.text_type,
'description': 'The note explaining the purpose of '
'this token.',
},
'policy': {
'type': six.text_type,
'description': 'The token access policy, encoded as a '
'JSON string.',
},
},
allow_unknown=True
)
def update(self, request, extra_fields={}, *args, **kwargs):
"""Updates the information on an existing API token.
The note, policy, and extra data on the token may be updated.
"""
try:
token = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
if not self.has_access_permissions(request, token, *args, **kwargs):
return self.get_no_access_error(request)
if 'note' in kwargs:
token.note = kwargs['note']
if 'policy' in kwargs:
try:
token.policy = self._validate_policy(kwargs['policy'])
except __HOLE__ as e:
return INVALID_FORM_DATA, {
'fields': {
'policy': e.message,
},
}
if extra_fields:
self.import_extra_data(token, token.extra_data, extra_fields)
token.save()
return 200, {
self.item_result_key: token,
}
|
ValidationError
|
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/webapi/resources/api_token.py/APITokenResource.update
|
5,862 |
def detect_location_info():
"""Detect location information."""
success = None
for source in DATA_SOURCE:
try:
raw_info = requests.get(source, timeout=5).json()
success = source
break
except (requests.RequestException, __HOLE__):
success = False
if success is False:
return None
else:
data = {key: raw_info.get(key) for key in LocationInfo._fields}
if success is DATA_SOURCE[1]:
data['ip'] = raw_info.get('query')
data['country_code'] = raw_info.get('countryCode')
data['country_name'] = raw_info.get('country')
data['region_code'] = raw_info.get('region')
data['region_name'] = raw_info.get('regionName')
data['zip_code'] = raw_info.get('zip')
data['time_zone'] = raw_info.get('timezone')
data['latitude'] = raw_info.get('lat')
data['longitude'] = raw_info.get('lon')
# From Wikipedia: Fahrenheit is used in the Bahamas, Belize,
# the Cayman Islands, Palau, and the United States and associated
# territories of American Samoa and the U.S. Virgin Islands
data['use_fahrenheit'] = data['country_code'] in (
'BS', 'BZ', 'KY', 'PW', 'US', 'AS', 'VI')
return LocationInfo(**data)
|
ValueError
|
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/util/location.py/detect_location_info
|
5,863 |
def elevation(latitude, longitude):
"""Return elevation for given latitude and longitude."""
req = requests.get(ELEVATION_URL, params={
'locations': '{},{}'.format(latitude, longitude),
'sensor': 'false',
})
if req.status_code != 200:
return 0
try:
return int(float(req.json()['results'][0]['elevation']))
except (ValueError, __HOLE__):
return 0
|
KeyError
|
dataset/ETHPy150Open home-assistant/home-assistant/homeassistant/util/location.py/elevation
|
5,864 |
def get(tree, name):
""" Return a float value attribute NAME from TREE.
"""
if name in tree:
value = tree[name]
else:
return float("nan")
try:
a = float(value)
except __HOLE__:
a = float("nan")
return a
|
ValueError
|
dataset/ETHPy150Open BD2KGenomics/toil/src/toil/utils/toilStats.py/get
|
5,865 |
def getStats(options):
""" Collect and return the stats and config data.
"""
def aggregateStats(fileHandle,aggregateObject):
try:
stats = json.load(fileHandle, object_hook=Expando)
for key in stats.keys():
if key in aggregateObject:
aggregateObject[key].append(stats[key])
else:
aggregateObject[key]=[stats[key]]
except __HOLE__:
logger.critical("File %s contains corrupted json. Skipping file." % fileHandle)
pass # The file is corrupted.
jobStore = Toil.loadOrCreateJobStore(options.jobStore)
aggregateObject = Expando()
callBack = partial(aggregateStats, aggregateObject=aggregateObject)
jobStore.readStatsAndLogging(callBack, readAll=True)
return aggregateObject
|
ValueError
|
dataset/ETHPy150Open BD2KGenomics/toil/src/toil/utils/toilStats.py/getStats
|
5,866 |
def processData(config, stats, options):
##########################################
# Collate the stats and report
##########################################
if stats.get("total_time", None) is None: # Hack to allow unfinished toils.
stats.total_time = {"total_time": "0.0", "total_clock": "0.0"}
else:
stats.total_time = sum([float(number) for number in stats.total_time])
stats.total_clock = sum([float(number) for number in stats.total_clock])
collatedStatsTag = Expando(total_run_time=stats.total_time,
total_clock=stats.total_clock,
batch_system=config.batchSystem,
default_memory=str(config.defaultMemory),
default_cores=str(config.defaultCores),
max_cores=str(config.maxCores)
)
# Add worker info
worker = filter(None, stats.workers)
jobs = filter(None, stats.jobs)
jobs = [item for sublist in jobs for item in sublist]
def fn4(job):
try:
return list(jobs)
except __HOLE__:
return []
buildElement(collatedStatsTag, worker, "worker")
createSummary(buildElement(collatedStatsTag, jobs, "jobs"),
stats.workers, "worker", fn4)
# Get info for each job
jobNames = set()
for job in jobs:
jobNames.add(job.class_name)
jobTypesTag = Expando()
collatedStatsTag.job_types = jobTypesTag
for jobName in jobNames:
jobTypes = [ job for job in jobs if job.class_name == jobName ]
buildElement(jobTypesTag, jobTypes, jobName)
collatedStatsTag.name = "collatedStatsTag"
return collatedStatsTag
|
TypeError
|
dataset/ETHPy150Open BD2KGenomics/toil/src/toil/utils/toilStats.py/processData
|
5,867 |
def touch(self, key, cost=None):
""" Update score for key
Provide a cost the first time and optionally thereafter.
"""
time = self._base
if cost is not None:
self.cost[key] = cost
self.time[key] += self._base
time = self.time[key]
else:
try:
cost = self.cost[key]
self.time[key] += self._base
time = self.time[key]
except __HOLE__:
return
self._base *= self._base_multiplier
return cost * time
|
KeyError
|
dataset/ETHPy150Open blaze/cachey/cachey/score.py/Scorer.touch
|
5,868 |
def _unpack_args(item_f, src_queue, link, args):
if isinstance(item_f, FSQWorkItem):
item_id = item_f.id
src_queue = item_f.queue
item_f = item_f.item
elif src_queue:
item_id = coerce_unicode(item_f, _c.FSQ_CHARSET)
item_f = None
else:
args = list(args)
if link:
raise ValueError('Incorrect arguments')
try:
item_id = args.pop(0)
except __HOLE__:
raise ValueError('Insufficient arguments')
return item_f, src_queue, item_id , args, link
####### EXPOSED METHODS #######
|
IndexError
|
dataset/ETHPy150Open axialmarket/fsq/fsq/enqueue.py/_unpack_args
|
5,869 |
def venqueue(trg_queue, item_f, args, user=None, group=None, mode=None):
'''Enqueue the contents of a file, or file-like object, file-descriptor or
the contents of a file at an address (e.g. '/my/file') queue with
an argument list, venqueue is to enqueue what vprintf is to printf
If entropy is passed in, failure on duplicates is raised to the caller,
if entropy is not passed in, venqueue will increment entropy until it
can create the queue item.
'''
# setup defaults
trg_fd = name = None
user = _c.FSQ_ITEM_USER if user is None else user
group = _c.FSQ_ITEM_GROUP if group is None else group
mode = _c.FSQ_ITEM_MODE if mode is None else mode
now = fmt_time(datetime.datetime.now(), _c.FSQ_TIMEFMT, _c.FSQ_CHARSET)
pid = coerce_unicode(os.getpid(), _c.FSQ_CHARSET)
host = coerce_unicode(_HOSTNAME, _c.FSQ_CHARSET)
tries = u'0'
entropy = _mkentropy(pid, now, host)
# open source file
try:
src_file = rationalize_file(item_f, _c.FSQ_CHARSET)
except (OSError, IOError, ), e:
raise FSQEnqueueError(e.errno, wrap_io_os_err(e))
try:
real_file = True if hasattr(src_file, 'fileno') else False
# get low, so we can use some handy options; man 2 open
try:
item_name = construct(( now, entropy, pid, host,
tries, ) + tuple(args))
tmp_name = os.path.join(fsq_path.tmp(trg_queue), item_name)
trg_fd = os.open(tmp_name, os.O_WRONLY|os.O_CREAT|os.O_EXCL, mode)
except (OSError, IOError, ), e:
if isinstance(e, FSQError):
raise e
raise FSQEnqueueError(e.errno, wrap_io_os_err(e))
try:
if user is not None or group is not None:
# set user/group ownership for file; man 2 fchown
os.fchown(trg_fd, *uid_gid(user, group, fd=trg_fd))
with closing(os.fdopen(trg_fd, 'wb', 1)) as trg_file:
# i/o time ... assume line-buffered
while True:
if real_file:
reads, dis, card = select.select([src_file], [], [])
try:
msg = os.read(reads[0].fileno(), 2048)
if 0 == len(msg):
break
except (OSError, IOError, ), e:
if e.errno in (errno.EWOULDBLOCK, errno.EAGAIN,):
continue
raise e
trg_file.write(msg)
else:
line = src_file.readline()
if not line:
break
trg_file.write(line)
# flush buffers, and force write to disk pre mv.
trg_file.flush()
os.fsync(trg_file.fileno())
# hard-link into queue, unlink tmp, failure case here leaves
# cruft in tmp, but no race condition into queue
os.link(tmp_name, os.path.join(fsq_path.item(trg_queue,
item_name)))
os.unlink(tmp_name)
# return the queue item id (filename)
return item_name
except Exception, e:
try:
os.close(trg_fd)
except (OSError, IOError, ), err:
if err.errno != errno.EBADF:
raise FSQEnqueueError(err.errno, wrap_io_os_err(err))
try:
if tmp_name is not None:
os.unlink(tmp_name)
except (OSError, __HOLE__, ), err:
if err.errno != errno.ENOENT:
raise FSQEnqueueError(err.errno, wrap_io_os_err(err))
try:
if name is not None:
os.unlink(name)
except OSError, err:
if err.errno != errno.ENOENT:
raise FSQEnqueueError(err.errno, wrap_io_os_err(err))
if (isinstance(e, OSError) or isinstance(e, IOError)) and\
not isinstance(e, FSQError):
raise FSQEnqueueError(e.errno, wrap_io_os_err(e))
raise e
finally:
src_file.close()
|
IOError
|
dataset/ETHPy150Open axialmarket/fsq/fsq/enqueue.py/venqueue
|
5,870 |
def vreenqueue(item_f, *args, **kwargs):
'''Enqueue the contents of a file, or file-like object, FSQWorkItem,
file-descriptor or the contents of a files queues at an address
(e.g. '/my/file') queue with arbitrary arguments from one queue to
other queues, reenqueue is to vreenqueue what printf is to vprintf
Uses include:
vreenqueue(FSQWorkItem, [trg_queue, ...], link=, kwargs)
vreenqueue(fileish, file_name, [trg_queue, ...], kwargs)
vreenqueue(fd, file_name, [trg_queue, ...], kwargs)
'''
item_id = None
src_queue = kwargs.pop('src_queue', None)
link = kwargs.pop('link', False)
hosts = kwargs.pop('hosts', None)
all_hosts = kwargs.pop('all_hosts', False)
item_f, src_queue, item_id, args, link = _unpack_args(item_f, src_queue,
link, args)
if 1 < len(args):
raise ValueError('Too many arguements')
try:
args = args[0]
except IndexError:
raise ValueError('Insufficient arguments')
try:
if item_f is None:
item_f = fsq_path.item(src_queue, item_id)
if link:
src_file = item_f
else:
src_file = rationalize_file(item_f, _c.FSQ_CHARSET)
except (OSError, IOError, ), e:
raise FSQReenqueueError(e.errno, wrap_io_os_err(e))
tmp_names = []
try:
paths = _formhostpath(args, hosts, all_hosts)
if link:
tmp_name = os.path.join(fsq_path.tmp(src_queue), item_id)
# hard link directly to tmp
try:
try:
os.link(fsq_path.item(src_queue, item_id), tmp_name)
except (OSError, IOError, ), e:
if not e.errno == errno.EEXIST:
raise FSQReenqueueError(e.errno, wrap_io_os_err(e))
for queue, host in paths:
try:
os.link(tmp_name, os.path.join(fsq_path.item(queue,
item_id, host=host)))
except (OSError, IOError, ), e:
if not e.errno == errno.EEXIST:
raise FSQReenqueueError(e.errno, wrap_io_os_err(e))
finally:
os.unlink(tmp_name)
else:
tmp_fos = []
try:
for queue, host in paths:
try:
tmp_name = os.path.join(fsq_path.tmp(queue, host=host),
item_id)
tmp_names.append(tmp_name)
# copy to n trg_queues
tmp_fo = os.open(tmp_name, os.O_RDWR|os.O_CREAT|\
os.O_TRUNC, _c.FSQ_ITEM_MODE)
tmp_fos.append(os.fdopen(tmp_fo, 'wb', 1))
except Exception, e:
raise FSQReenqueueError(wrap_io_os_err(e))
real_file = True if hasattr(src_file, 'fileno') else False
# read src_file once
while True:
if real_file:
reads, dis, card = select.select([src_file], [], [])
try:
chunk = os.read(reads[0].fileno(), 2048)
except (OSError, IOError, ), e:
if e.errno in (errno.EWOULDBLOCK, errno.EAGAIN,):
continue
raise
else:
chunk = src_file.readline()
if 0 == len(chunk):
break
for tmp_fo in tmp_fos:
tmp_fo.write(chunk)
# flush buffers, and force write to disk pre mv.
tmp_fo.flush()
os.fsync(tmp_fo.fileno())
for queue, host in paths:
tmp_name = os.path.join(fsq_path.tmp(queue, host=host),
item_id)
# hard-link into queue, unlink tmp, failure case here
# leaves cruft in tmp, but no race condition into queue
try:
os.link(tmp_name, os.path.join(fsq_path.item(queue,
item_id, host=host)))
except (__HOLE__, IOError, ), e:
if link and not e.errno == errno.EEXIST:
raise FSQReenqueueError(e.errno, wrap_io_os_err(e))
finally:
os.unlink(tmp_name)
finally:
for tmp_fo in tmp_fos:
tmp_fo.close()
return item_id
except Exception, e:
try:
if link:
tmp_name = os.path.join(fsq_path.tmp(src_queue, item_id))
try:
os.unlink(tmp_name)
except (OSError, IOError, ), err:
if err.errno == errno.ENOENT:
pass
raise FSQReenqueueError(err.errno, wrap_io_os_err(err))
else:
for tmp_name in tmp_names:
try:
os.unlink(tmp_name)
except (OSError, IOError, ), err:
if err.errno == errno.ENOENT:
pass
raise FSQReenqueueError(err.errno, wrap_io_os_err(err))
except (OSError, IOError, ), err:
if err.errno != errno.ENOENT:
raise FSQReenqueueError(err.errno, wrap_io_os_err(err))
except OSError, err:
if err.errno != errno.ENOENT:
raise FSQReenqueueError(err.errno, wrap_io_os_err(err))
if (isinstance(e, OSError) or isinstance(e, IOError)) and\
not isinstance(e, FSQError):
raise FSQReenqueueError(e.errno, wrap_io_os_err(e))
raise e
finally:
if not link:
src_file.close()
|
OSError
|
dataset/ETHPy150Open axialmarket/fsq/fsq/enqueue.py/vreenqueue
|
5,871 |
def import_app(app_label, verbosity):
# We get the app_path, necessary to use imp module find function
try:
app_path = __import__(app_label, {}, {}, [app_label.split('.')[-1]]).__path__
except AttributeError:
return
except __HOLE__:
print "Unknown application: %s" % app_label
print "Stopping synchronization"
sys.exit(1)
# imp.find_module looks for rules.py within the app
# It does not import the module, but raises and ImportError
# if rules.py does not exist, so we continue to next app
try:
imp.find_module('rules', app_path)
except ImportError:
return
if verbosity >= 1:
sys.stderr.write('Syncing rules from %s\n' % app_label)
# Now we import the module, this should bubble up errors
# if there are any in rules.py Warning the user
generator = import_module('.rules', app_label)
|
ImportError
|
dataset/ETHPy150Open maraujop/django-rules/django_rules/management/commands/sync_rules.py/import_app
|
5,872 |
def safeint(value):
try:
return int(value)
except __HOLE__:
return value
|
ValueError
|
dataset/ETHPy150Open ionelmc/django-redisboard/src/redisboard/views.py/safeint
|
5,873 |
def main():
"""
Get the market cap!
"""
btc = Decimal(0)
try:
for balance in get_balances():
try:
btc += get_amt_in_btc(balance)
except ValueError:
sys.stderr.write(
'WARNING: Cannot convert {0} to btc\n'.format(
balance.currency.abbreviation
)
)
# there isn't an exchange for this coin to BTC, ignore it
pass
except __HOLE__ as e:
print("Error: status code {0}".format(e.code))
print("Is coinex down?")
return
btc_str = '{0:12.8f}'.format(btc)
print('{0} BTC'.format(btc_str))
try:
usd = get_amt_in_usd(btc)
except HTTPError as e:
print("Error: status code {0}".format(e.code))
print("Is bitstramp down?")
return
usd_str = '{0:12.8f}'.format(usd)
print('{0} USD'.format(usd_str))
|
HTTPError
|
dataset/ETHPy150Open robmcl4/Coinex/market_cap.py/main
|
5,874 |
def __hash__(self):
try:
return self._cached_hash
except __HOLE__:
h = self._cached_hash = hash(repr(self))
return h
|
AttributeError
|
dataset/ETHPy150Open datamade/dedupe/dedupe/predicates.py/Predicate.__hash__
|
5,875 |
def __call__(self, record) :
column = record[self.field]
if column :
try :
centers = self.index.search(self.preprocess(column),
self.threshold)
except __HOLE__ :
raise AttributeError("Attempting to block with an index "
"predicate without indexing records")
l_str = str
return [l_str(center) for center in centers]
else :
return ()
|
AttributeError
|
dataset/ETHPy150Open datamade/dedupe/dedupe/predicates.py/TfidfSearchPredicate.__call__
|
5,876 |
def __call__(self, record) :
block_key = None
column = record[self.field]
if column :
doc = self.preprocess(column)
try :
doc_id = self.index._doc_to_id[doc]
except __HOLE__ :
raise AttributeError("Attempting to block with an index "
"predicate without indexing records")
if doc_id in self.canopy :
block_key = self.canopy[doc_id]
else :
canopy_members = self.index.search(doc,
self.threshold)
for member in canopy_members :
if member not in self.canopy :
self.canopy[member] = doc_id
if canopy_members :
block_key = doc_id
self.canopy[doc_id] = doc_id
else:
self.canopy[doc_id] = None
if block_key is None :
return []
else :
return [str(block_key)]
|
AttributeError
|
dataset/ETHPy150Open datamade/dedupe/dedupe/predicates.py/TfidfCanopyPredicate.__call__
|
5,877 |
def existsPredicate(field) :
try :
if any(field) :
return (u'1',)
else :
return (u'0',)
except __HOLE__ :
if field :
return (u'1',)
else :
return (u'0',)
|
TypeError
|
dataset/ETHPy150Open datamade/dedupe/dedupe/predicates.py/existsPredicate
|
5,878 |
def __call__(self, fd, events):
if not (events & ioloop.IOLoop.READ):
if events == ioloop.IOLoop.ERROR:
self.redirector.remove_fd(fd)
return
try:
data = os.read(fd, self.redirector.buffer)
if len(data) == 0:
self.redirector.remove_fd(fd)
else:
datamap = {'data': data, 'pid': self.process.pid,
'name': self.name}
self.redirector.redirect[self.name](datamap)
except __HOLE__ as ex:
if ex.args[0] != errno.EAGAIN:
raise
try:
sys.exc_clear()
except Exception:
pass
|
IOError
|
dataset/ETHPy150Open circus-tent/circus/circus/stream/redirector.py/Redirector.Handler.__call__
|
5,879 |
@classmethod
def _gen_memd_wrappers(cls, factory):
"""Generates wrappers for all the memcached operations.
:param factory: A function to be called to return the wrapped
method. It will be called with two arguments; the first is
the unbound method being wrapped, and the second is the name
of such a method.
The factory shall return a new unbound method
:return: A dictionary of names mapping the API calls to the
wrapped functions
"""
d = {}
for n in cls._MEMCACHED_OPERATIONS:
for variant in (n, n + "_multi"):
try:
d[variant] = factory(getattr(cls, variant), variant)
except __HOLE__:
if n in cls._MEMCACHED_NOMULTI:
continue
raise
return d
|
AttributeError
|
dataset/ETHPy150Open couchbase/couchbase-python-client/couchbase/bucket.py/Bucket._gen_memd_wrappers
|
5,880 |
def __init__(self, params):
timeout = params.get('timeout', params.get('TIMEOUT', 300))
try:
timeout = int(timeout)
except (__HOLE__, TypeError):
timeout = 300
self.default_timeout = timeout
options = params.get('OPTIONS', {})
max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300))
try:
self._max_entries = int(max_entries)
except (ValueError, TypeError):
self._max_entries = 300
cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3))
try:
self._cull_frequency = int(cull_frequency)
except (ValueError, TypeError):
self._cull_frequency = 3
self.key_prefix = params.get('KEY_PREFIX', '')
self.version = params.get('VERSION', 1)
self.key_func = get_key_func(params.get('KEY_FUNCTION', None))
|
ValueError
|
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/core/cache/backends/base.py/BaseCache.__init__
|
5,881 |
def clean(self, value):
if value:
try:
map(int, value)
except (TypeError, __HOLE__):
raise ValidationError('You must enter valid integer values.')
return '-'.join(value)
|
ValueError
|
dataset/ETHPy150Open disqus/gargoyle/gargoyle/conditions.py/Range.clean
|
5,882 |
def clean(self, value):
try:
date = self.str_to_date(value)
except __HOLE__, e:
raise ValidationError("Date must be a valid date in the format YYYY-MM-DD.\n(%s)" % e.message)
return date.strftime(self.DATE_FORMAT)
|
ValueError
|
dataset/ETHPy150Open disqus/gargoyle/gargoyle/conditions.py/AbstractDate.clean
|
5,883 |
def describe(self, identifiers):
if not identifiers:
raise MissingParameterValue('', 'identifier')
identifier_elements = []
# 'all' keyword means all processes
if 'all' in (ident.lower() for ident in identifiers):
for process in self.processes:
try:
identifier_elements.append(
self.processes[process].describe_xml())
except Exception as e:
raise NoApplicableCode(e)
else:
for identifier in identifiers:
try:
process = self.processes[identifier]
except __HOLE__:
raise InvalidParameterValue(
"Unknown process %r" % identifier, "identifier")
else:
try:
identifier_elements.append(process.describe_xml())
except Exception as e:
raise NoApplicableCode(e)
doc = WPS.ProcessDescriptions(
*identifier_elements
)
doc.attrib[
'{http://www.w3.org/2001/XMLSchema-instance}schemaLocation'] = 'http://www.opengis.net/wps/1.0.0 http://schemas.opengis.net/wps/1.0.0/wpsDescribeProcess_response.xsd'
doc.attrib['service'] = 'WPS'
doc.attrib['version'] = '1.0.0'
doc.attrib['{http://www.w3.org/XML/1998/namespace}lang'] = 'en-US'
return xml_response(doc)
|
KeyError
|
dataset/ETHPy150Open geopython/pywps/pywps/app/Service.py/Service.describe
|
5,884 |
def execute(self, identifier, wps_request, uuid):
"""Parse and perform Execute WPS request call
:param identifier: process identifier string
:param wps_request: pywps.WPSRequest structure with parsed inputs, still in memory
:param uuid: string identifier of the request
"""
response = None
try:
process = self.processes[identifier]
workdir = config.get_config_value('server', 'workdir')
tempdir = tempfile.mkdtemp(prefix='pywps_process_', dir=workdir)
process.set_workdir(tempdir)
except __HOLE__:
raise InvalidParameterValue("Unknown process '%r'" % identifier, 'Identifier')
olddir = os.path.abspath(os.curdir)
try:
os.chdir(process.workdir)
response = self._parse_and_execute(process, wps_request, uuid)
finally:
os.chdir(olddir)
shutil.rmtree(process.workdir)
return response
|
KeyError
|
dataset/ETHPy150Open geopython/pywps/pywps/app/Service.py/Service.execute
|
5,885 |
def __init__(self, filename, sorted=False, header=False):
if filename.endswith(".delta"):
coordsfile = filename.rsplit(".", 1)[0] + ".coords"
if need_update(filename, coordsfile):
fromdelta([filename])
filename = coordsfile
super(Coords, self).__init__(filename)
fp = open(filename)
if header:
self.cmd = fp.next()
for row in fp:
try:
self.append(CoordsLine(row))
except __HOLE__:
pass
if sorted:
self.ref_sort()
|
AssertionError
|
dataset/ETHPy150Open tanghaibao/jcvi/formats/coords.py/Coords.__init__
|
5,886 |
def coverage(args):
"""
%prog coverage coordsfile
Report the coverage per query record, useful to see which query matches
reference. The coords file MUST be filtered with supermap::
jcvi.algorithms.supermap --filter query
"""
p = OptionParser(coverage.__doc__)
p.add_option("-c", dest="cutoff", default=0.5, type="float",
help="only report query with coverage greater than [default: %default]")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
coordsfile, = args
fp = open(coordsfile)
coords = []
for row in fp:
try:
c = CoordsLine(row)
except __HOLE__:
continue
coords.append(c)
coords.sort(key=lambda x: x.query)
coverages = []
for query, lines in groupby(coords, key=lambda x: x.query):
cumulative_cutoff = sum(x.querycov for x in lines)
coverages.append((query, cumulative_cutoff))
coverages.sort(key=lambda x: (-x[1], x[0]))
for query, cumulative_cutoff in coverages:
if cumulative_cutoff < opts.cutoff:
break
print "{0}\t{1:.2f}".format(query, cumulative_cutoff)
|
AssertionError
|
dataset/ETHPy150Open tanghaibao/jcvi/formats/coords.py/coverage
|
5,887 |
def annotate(args):
"""
%prog annotate coordsfile
Annotate coordsfile to append an additional column, with the following
overlaps: {0}.
"""
p = OptionParser(annotate.__doc__.format(", ".join(Overlap_types)))
p.add_option("--maxhang", default=100, type="int",
help="Max hang to call dovetail overlap [default: %default]")
p.add_option("--all", default=False, action="store_true",
help="Output all lines [default: terminal/containment]")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
coordsfile, = args
fp = open(coordsfile)
for row in fp:
try:
c = CoordsLine(row)
except __HOLE__:
continue
ov = c.overlap(opts.maxhang)
if not opts.all and ov == 0:
continue
print "{0}\t{1}".format(row.strip(), Overlap_types[ov])
|
AssertionError
|
dataset/ETHPy150Open tanghaibao/jcvi/formats/coords.py/annotate
|
5,888 |
def filter(args):
"""
%prog filter <deltafile|coordsfile>
Produce a new delta/coords file and filter based on id% or cov%.
Use `delta-filter` for .delta file.
"""
p = OptionParser(filter.__doc__)
p.set_align(pctid=0, hitlen=0)
p.add_option("--overlap", default=False, action="store_true",
help="Print overlap status (e.g. terminal, contained)")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
pctid = opts.pctid
hitlen = opts.hitlen
filename, = args
if pctid == 0 and hitlen == 0:
return filename
pf, suffix = filename.rsplit(".", 1)
outfile = "".join((pf, ".P{0}L{1}.".format(int(pctid), int(hitlen)), suffix))
if not need_update(filename, outfile):
return outfile
if suffix == "delta":
cmd = "delta-filter -i {0} -l {1} {2}".format(pctid, hitlen, filename)
sh(cmd, outfile=outfile)
return outfile
fp = open(filename)
fw = must_open(outfile, "w")
for row in fp:
try:
c = CoordsLine(row)
except __HOLE__:
continue
if c.identity < pctid:
continue
if c.len2 < hitlen:
continue
if opts.overlap and not c.overlap:
continue
outrow = row.rstrip()
if opts.overlap:
ov = Overlap_types[c.overlap]
outrow += "\t" + ov
print >> fw, outrow
return outfile
|
AssertionError
|
dataset/ETHPy150Open tanghaibao/jcvi/formats/coords.py/filter
|
5,889 |
@memoize
def get_package(command):
try:
c = CommandNotFound.CommandNotFound()
cmd = command.split(' ')
pkgs = c.getPackages(cmd[0] if cmd[0] != 'sudo' else cmd[1])
name, _ = pkgs[0]
return name
except __HOLE__:
# IndexError is thrown when no matching package is found
return None
|
IndexError
|
dataset/ETHPy150Open nvbn/thefuck/thefuck/rules/apt_get.py/get_package
|
5,890 |
def create_archive():
ravello_dir = util.get_ravello_dir()
try:
st = os.stat(ravello_dir)
except OSError:
st = None
if st and not stat.S_ISDIR(st.st_mode):
error.raise_error("Path `{0}` exists but is not a directory.",
ravello_dir)
elif st is None:
os.mkdir(ravello_dir)
distfile = os.path.join(ravello_dir, 'dist.tar.gz')
try:
st = os.stat(distfile)
except __HOLE__:
st = None
if st and st.st_mtime >= env.start_time:
return distfile
archive = tarfile.TarFile.open(distfile, 'w:gz')
repotype = env.manifest['repository']['type']
files = versioncontrol.walk_repository('.', repotype)
for fname in files:
if fname.startswith(ravello_dir):
continue
archive.add(fname, recursive=False)
archive.close()
return distfile
|
OSError
|
dataset/ETHPy150Open ravello/testmill/lib/testmill/tasks.py/create_archive
|
5,891 |
def __init__(self, positions, end=None):
"""
Construct a Feature which may apply at C{positions}.
#For instance, importing some concrete subclasses (Feature is abstract)
>>> from nltk.tag.brill import Word, Pos
#Feature Word, applying at one of [-2, -1]
>>> Word([-2,-1])
Word([-2, -1])
#Positions need not be contiguous
>>> Word([-2,-1, 1])
Word([-2, -1, 1])
#Contiguous ranges can alternatively be specified giving the
#two endpoints (inclusive)
>>> Pos(-3, -1)
Pos([-3, -2, -1])
#In two-arg form, start <= end is enforced
>>> Pos(2, 1)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "nltk/tbl/template.py", line 306, in __init__
raise TypeError
ValueError: illegal interval specification: (start=2, end=1)
:type positions: list of int
:param positions: the positions at which this features should apply
:raises ValueError: illegal position specifications
An alternative calling convention, for contiguous positions only,
is Feature(start, end):
:type start: int
:param start: start of range where this feature should apply
:type end: int
:param end: end of range (NOTE: inclusive!) where this feature should apply
"""
self.positions = None # to avoid warnings
if end is None:
self.positions = tuple(sorted(set([int(i) for i in positions])))
else: # positions was actually not a list, but only the start index
try:
if positions > end:
raise TypeError
self.positions = tuple(range(positions, end+1))
except __HOLE__:
# let any kind of erroneous spec raise ValueError
raise ValueError("illegal interval specification: (start={0}, end={1})".format(positions, end))
# set property name given in subclass, or otherwise name of subclass
self.PROPERTY_NAME = self.__class__.PROPERTY_NAME or self.__class__.__name__
|
TypeError
|
dataset/ETHPy150Open nltk/nltk/nltk/tbl/feature.py/Feature.__init__
|
5,892 |
@staticmethod
def load(response, encoding):
response = response.decode(encoding)
try:
return json.loads(response)
except __HOLE__:
return response
except json.decoder.JSONDecodeError:
return response
|
ValueError
|
dataset/ETHPy150Open haikuginger/beekeeper/beekeeper/data_handlers.py/PlainText.load
|
5,893 |
def command(self, cmd, *args):
func = getattr(self.__class__, cmd, False)
# silently ignore nonsensical calls because the logger loops over each
# writer and passes the command separately to all of them
if func and func.__dict__.get('_callable', False):
try:
return func(self, *args)
except __HOLE__:
# Probably the wrong number of arguments; make it
# possible to debug the problem
self.freetext('\nERROR: failed attempt to call'
' function %s with arguments %s\n' %(cmd, repr(args)))
except Exception, e:
# Unknown problem; provide information so that we can
# debug it and fix it later
self.freetext('\nERROR: unhandled exception %s: %s'
' calling function %s with arguments %s\n' %(
str(e.__class__), str(e), cmd, repr(args)))
|
TypeError
|
dataset/ETHPy150Open sassoftware/conary/conary/lib/logger.py/LogWriter.command
|
5,894 |
def close(self):
""" Reassert control of tty. Closing stdin, stderr, and and stdout
will get rid of the last pointer to the slave fd of the pseudo
tty, which should cause the logging process to stop. We wait
for it to die before continuing
"""
if not self.logging:
return
self.closed = True
# restore old terminal settings before quitting
if self.oldStdin != 0:
os.dup2(self.oldStdin, 0)
os.dup2(self.oldStdout, 1)
os.dup2(self.oldStderr, 2)
if self.oldTermios is not None:
termios.tcsetattr(0, termios.TCSADRAIN, self.oldTermios)
if self.oldStdin != 0:
os.close(self.oldStdin)
os.close(self.oldStdout)
os.close(self.oldStderr)
try:
# control stdin -- if stdin is a tty
# that can be controlled
if sys.stdin.isatty() and self.restoreTerminalControl:
os.tcsetpgrp(0, os.getpgrp())
except __HOLE__:
# stdin might not even have an isatty method
pass
# Wait for child logging process to die. Send successively ruder
# signals if it does not do so within a reasonable time. The primary
# reason that it would not die immediately is that a process has forked
# while holding the TTY file descriptor, and thus the logger is still
# polling it for output.
signals = [signal.SIGTERM, signal.SIGKILL]
while signals:
start = time.time()
while time.time() - start < 10:
pid, status = os.waitpid(self.loggerPid, os.WNOHANG)
if pid:
break
time.sleep(0.1)
else:
# Child process did not die.
signum = signals.pop(0)
os.kill(self.loggerPid, signum)
continue
break
else:
# Last signal was a KILL, so wait indefinitely.
os.waitpid(self.loggerPid, 0)
|
AttributeError
|
dataset/ETHPy150Open sassoftware/conary/conary/lib/logger.py/Logger.close
|
5,895 |
def _controlTerminal(self):
try:
# the child should control stdin -- if stdin is a tty
# that can be controlled
if sys.stdin.isatty():
os.tcsetpgrp(0, os.getpgrp())
except __HOLE__:
# stdin might not even have an isatty method
pass
|
AttributeError
|
dataset/ETHPy150Open sassoftware/conary/conary/lib/logger.py/_ChildLogger._controlTerminal
|
5,896 |
def log(self):
if self.shouldControlTerminal:
self._controlTerminal()
# standardize terminal size at 24, 80 for those programs that
# access it. This should ensure that programs that look at
# terminal size for displaying log info will look similar across
# runs.
self._setTerminalSize(24, 80)
# set some local variables that are reused often within the loop
ptyFd = self.ptyFd
lexer = self.lexer
stdin = sys.stdin.fileno()
unLogged = ''
pollObj = select.poll()
pollObj.register(ptyFd, select.POLLIN)
if self.withStdin and os.isatty(stdin):
pollObj.register(stdin, select.POLLIN)
# sigwinch is called when the window size is changed
sigwinch = []
def sigwinch_handler(s, f):
sigwinch.append(True)
# disable to ensure window size is standardized
#signal.signal(signal.SIGWINCH, sigwinch_handler)
while True:
try:
read = [ x[0] for x in pollObj.poll() ]
except select.error, msg:
if msg.args[0] != 4:
raise
read = []
if ptyFd in read:
# read output from pseudo terminal stdout/stderr, and pass to
# terminal and log
try:
output = os.read(ptyFd, BUFFER)
except __HOLE__, msg:
if msg.errno == errno.EIO:
# input/output error - pty closed
# shut down logger
break
elif msg.errno != errno.EINTR:
# EINTR is due to an interrupted read - that could be
# due to a SIGWINCH signal. Raise any other error
raise
else:
lexer.write(output)
if stdin in read:
# read input from stdin, and pass to
# pseudo tty
try:
input = os.read(stdin, BUFFER)
except OSError, msg:
if msg.errno == errno.EIO:
# input/output error - stdin closed
# shut down logger
break
elif msg.errno != errno.EINTR:
# EINTR is due to an interrupted read - that could be
# due to a SIGWINCH signal. Raise any other error
raise
else:
os.write(ptyFd, input)
if sigwinch:
# disable sigwinch to ensure the window width expected in logs is standardized
# self._resizeTerminal()
sigwinch = []
|
OSError
|
dataset/ETHPy150Open sassoftware/conary/conary/lib/logger.py/_ChildLogger.log
|
5,897 |
def process_image_diff(diff_path, before_path, after_path):
cmd = ["perceptualdiff", "-output", diff_path, before_path, after_path]
try:
proc = subprocess.Popen(cmd)
code = proc.wait()
except __HOLE__:
fail("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
return code
|
OSError
|
dataset/ETHPy150Open bokeh/bokeh/tests/plugins/image_diff.py/process_image_diff
|
5,898 |
def _update_extents(self):
inp = self.plane.input
extents = list(_get_extent(inp))
pos = self.position
axis = self._get_axis_index()
extents[2*axis] = pos
extents[2*axis+1] = pos
try:
self.plane.set_extent(extents)
except __HOLE__:
self.plane.extent = extents
|
AttributeError
|
dataset/ETHPy150Open enthought/mayavi/mayavi/components/grid_plane.py/GridPlane._update_extents
|
5,899 |
def validateEmail(email):
try:
validate_email(email)
return True
except __HOLE__:
return False
|
ValidationError
|
dataset/ETHPy150Open haystack/eyebrowse-server/common/view_helpers.py/validateEmail
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.