_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q279600
|
mappable
|
test
|
def mappable(obj):
"""return whether an object is mappable or not."""
if isinstance(obj, (tuple,list)):
return True
for m in arrayModules:
if isinstance(obj,m['type']):
return True
return False
|
python
|
{
"resource": ""
}
|
q279601
|
Map.getPartition
|
test
|
def getPartition(self, seq, p, q):
"""Returns the pth partition of q partitions of seq."""
# Test for error conditions here
if p<0 or p>=q:
print "No partition exists."
return
remainder = len(seq)%q
basesize = len(seq)//q
hi = []
lo = []
for n in range(q):
if n < remainder:
lo.append(n * (basesize + 1))
hi.append(lo[-1] + basesize + 1)
else:
lo.append(n*basesize + remainder)
hi.append(lo[-1] + basesize)
try:
result = seq[lo[p]:hi[p]]
except TypeError:
# some objects (iterators) can't be sliced,
# use islice:
result = list(islice(seq, lo[p], hi[p]))
return result
|
python
|
{
"resource": ""
}
|
q279602
|
pexpect_monkeypatch
|
test
|
def pexpect_monkeypatch():
"""Patch pexpect to prevent unhandled exceptions at VM teardown.
Calling this function will monkeypatch the pexpect.spawn class and modify
its __del__ method to make it more robust in the face of failures that can
occur if it is called when the Python VM is shutting down.
Since Python may fire __del__ methods arbitrarily late, it's possible for
them to execute during the teardown of the Python VM itself. At this
point, various builtin modules have been reset to None. Thus, the call to
self.close() will trigger an exception because it tries to call os.close(),
and os is now None.
"""
if pexpect.__version__[:3] >= '2.2':
# No need to patch, fix is already the upstream version.
return
def __del__(self):
"""This makes sure that no system resources are left open.
Python only garbage collects Python objects. OS file descriptors
are not Python objects, so they must be handled explicitly.
If the child file descriptor was opened outside of this class
(passed to the constructor) then this does not close it.
"""
if not self.closed:
try:
self.close()
except AttributeError:
pass
pexpect.spawn.__del__ = __del__
|
python
|
{
"resource": ""
}
|
q279603
|
InteractiveRunner.run_file
|
test
|
def run_file(self,fname,interact=False,get_output=False):
"""Run the given file interactively.
Inputs:
-fname: name of the file to execute.
See the run_source docstring for the meaning of the optional
arguments."""
fobj = open(fname,'r')
try:
out = self.run_source(fobj,interact,get_output)
finally:
fobj.close()
if get_output:
return out
|
python
|
{
"resource": ""
}
|
q279604
|
InteractiveRunner.run_source
|
test
|
def run_source(self,source,interact=False,get_output=False):
"""Run the given source code interactively.
Inputs:
- source: a string of code to be executed, or an open file object we
can iterate over.
Optional inputs:
- interact(False): if true, start to interact with the running
program at the end of the script. Otherwise, just exit.
- get_output(False): if true, capture the output of the child process
(filtering the input commands out) and return it as a string.
Returns:
A string containing the process output, but only if requested.
"""
# if the source is a string, chop it up in lines so we can iterate
# over it just as if it were an open file.
if isinstance(source, basestring):
source = source.splitlines(True)
if self.echo:
# normalize all strings we write to use the native OS line
# separators.
linesep = os.linesep
stdwrite = self.out.write
write = lambda s: stdwrite(s.replace('\r\n',linesep))
else:
# Quiet mode, all writes are no-ops
write = lambda s: None
c = self.child
prompts = c.compile_pattern_list(self.prompts)
prompt_idx = c.expect_list(prompts)
# Flag whether the script ends normally or not, to know whether we can
# do anything further with the underlying process.
end_normal = True
# If the output was requested, store it in a list for return at the end
if get_output:
output = []
store_output = output.append
for cmd in source:
# skip blank lines for all matches to the 'main' prompt, while the
# secondary prompts do not
if prompt_idx==0 and \
(cmd.isspace() or cmd.lstrip().startswith('#')):
write(cmd)
continue
# write('AFTER: '+c.after) # dbg
write(c.after)
c.send(cmd)
try:
prompt_idx = c.expect_list(prompts)
except pexpect.EOF:
# this will happen if the child dies unexpectedly
write(c.before)
end_normal = False
break
write(c.before)
# With an echoing process, the output we get in c.before contains
# the command sent, a newline, and then the actual process output
if get_output:
store_output(c.before[len(cmd+'\n'):])
#write('CMD: <<%s>>' % cmd) # dbg
#write('OUTPUT: <<%s>>' % output[-1]) # dbg
self.out.flush()
if end_normal:
if interact:
c.send('\n')
print '<< Starting interactive mode >>',
try:
c.interact()
except OSError:
# This is what fires when the child stops. Simply print a
# newline so the system prompt is aligned. The extra
# space is there to make sure it gets printed, otherwise
# OS buffering sometimes just suppresses it.
write(' \n')
self.out.flush()
else:
if interact:
e="Further interaction is not possible: child process is dead."
print >> sys.stderr, e
# Leave the child ready for more input later on, otherwise select just
# hangs on the second invocation.
if c.isalive():
c.send('\n')
# Return any requested output
if get_output:
return ''.join(output)
|
python
|
{
"resource": ""
}
|
q279605
|
XmlReporter.report
|
test
|
def report(self, morfs, outfile=None):
"""Generate a Cobertura-compatible XML report for `morfs`.
`morfs` is a list of modules or filenames.
`outfile` is a file object to write the XML to.
"""
# Initial setup.
outfile = outfile or sys.stdout
# Create the DOM that will store the data.
impl = xml.dom.minidom.getDOMImplementation()
docType = impl.createDocumentType(
"coverage", None,
"http://cobertura.sourceforge.net/xml/coverage-03.dtd"
)
self.xml_out = impl.createDocument(None, "coverage", docType)
# Write header stuff.
xcoverage = self.xml_out.documentElement
xcoverage.setAttribute("version", __version__)
xcoverage.setAttribute("timestamp", str(int(time.time()*1000)))
xcoverage.appendChild(self.xml_out.createComment(
" Generated by coverage.py: %s " % __url__
))
xpackages = self.xml_out.createElement("packages")
xcoverage.appendChild(xpackages)
# Call xml_file for each file in the data.
self.packages = {}
self.report_files(self.xml_file, morfs)
lnum_tot, lhits_tot = 0, 0
bnum_tot, bhits_tot = 0, 0
# Populate the XML DOM with the package info.
for pkg_name in sorted(self.packages.keys()):
pkg_data = self.packages[pkg_name]
class_elts, lhits, lnum, bhits, bnum = pkg_data
xpackage = self.xml_out.createElement("package")
xpackages.appendChild(xpackage)
xclasses = self.xml_out.createElement("classes")
xpackage.appendChild(xclasses)
for class_name in sorted(class_elts.keys()):
xclasses.appendChild(class_elts[class_name])
xpackage.setAttribute("name", pkg_name.replace(os.sep, '.'))
xpackage.setAttribute("line-rate", rate(lhits, lnum))
xpackage.setAttribute("branch-rate", rate(bhits, bnum))
xpackage.setAttribute("complexity", "0")
lnum_tot += lnum
lhits_tot += lhits
bnum_tot += bnum
bhits_tot += bhits
xcoverage.setAttribute("line-rate", rate(lhits_tot, lnum_tot))
xcoverage.setAttribute("branch-rate", rate(bhits_tot, bnum_tot))
# Use the DOM to write the output file.
outfile.write(self.xml_out.toprettyxml())
# Return the total percentage.
denom = lnum_tot + bnum_tot
if denom == 0:
pct = 0.0
else:
pct = 100.0 * (lhits_tot + bhits_tot) / denom
return pct
|
python
|
{
"resource": ""
}
|
q279606
|
XmlReporter.xml_file
|
test
|
def xml_file(self, cu, analysis):
"""Add to the XML report for a single file."""
# Create the 'lines' and 'package' XML elements, which
# are populated later. Note that a package == a directory.
package_name = rpartition(cu.name, ".")[0]
className = cu.name
package = self.packages.setdefault(package_name, [{}, 0, 0, 0, 0])
xclass = self.xml_out.createElement("class")
xclass.appendChild(self.xml_out.createElement("methods"))
xlines = self.xml_out.createElement("lines")
xclass.appendChild(xlines)
xclass.setAttribute("name", className)
filename = cu.file_locator.relative_filename(cu.filename)
xclass.setAttribute("filename", filename.replace("\\", "/"))
xclass.setAttribute("complexity", "0")
branch_stats = analysis.branch_stats()
# For each statement, create an XML 'line' element.
for line in sorted(analysis.statements):
xline = self.xml_out.createElement("line")
xline.setAttribute("number", str(line))
# Q: can we get info about the number of times a statement is
# executed? If so, that should be recorded here.
xline.setAttribute("hits", str(int(line not in analysis.missing)))
if self.arcs:
if line in branch_stats:
total, taken = branch_stats[line]
xline.setAttribute("branch", "true")
xline.setAttribute("condition-coverage",
"%d%% (%d/%d)" % (100*taken/total, taken, total)
)
xlines.appendChild(xline)
class_lines = len(analysis.statements)
class_hits = class_lines - len(analysis.missing)
if self.arcs:
class_branches = sum([t for t,k in branch_stats.values()])
missing_branches = sum([t-k for t,k in branch_stats.values()])
class_br_hits = class_branches - missing_branches
else:
class_branches = 0.0
class_br_hits = 0.0
# Finalize the statistics that are collected in the XML DOM.
xclass.setAttribute("line-rate", rate(class_hits, class_lines))
xclass.setAttribute("branch-rate", rate(class_br_hits, class_branches))
package[0][className] = xclass
package[1] += class_hits
package[2] += class_lines
package[3] += class_br_hits
package[4] += class_branches
|
python
|
{
"resource": ""
}
|
q279607
|
fetch_pi_file
|
test
|
def fetch_pi_file(filename):
"""This will download a segment of pi from super-computing.org
if the file is not already present.
"""
import os, urllib
ftpdir="ftp://pi.super-computing.org/.2/pi200m/"
if os.path.exists(filename):
# we already have it
return
else:
# download it
urllib.urlretrieve(ftpdir+filename,filename)
|
python
|
{
"resource": ""
}
|
q279608
|
reduce_freqs
|
test
|
def reduce_freqs(freqlist):
"""
Add up a list of freq counts to get the total counts.
"""
allfreqs = np.zeros_like(freqlist[0])
for f in freqlist:
allfreqs += f
return allfreqs
|
python
|
{
"resource": ""
}
|
q279609
|
compute_n_digit_freqs
|
test
|
def compute_n_digit_freqs(filename, n):
"""
Read digits of pi from a file and compute the n digit frequencies.
"""
d = txt_file_to_digits(filename)
freqs = n_digit_freqs(d, n)
return freqs
|
python
|
{
"resource": ""
}
|
q279610
|
txt_file_to_digits
|
test
|
def txt_file_to_digits(filename, the_type=str):
"""
Yield the digits of pi read from a .txt file.
"""
with open(filename, 'r') as f:
for line in f.readlines():
for c in line:
if c != '\n' and c!= ' ':
yield the_type(c)
|
python
|
{
"resource": ""
}
|
q279611
|
one_digit_freqs
|
test
|
def one_digit_freqs(digits, normalize=False):
"""
Consume digits of pi and compute 1 digit freq. counts.
"""
freqs = np.zeros(10, dtype='i4')
for d in digits:
freqs[int(d)] += 1
if normalize:
freqs = freqs/freqs.sum()
return freqs
|
python
|
{
"resource": ""
}
|
q279612
|
two_digit_freqs
|
test
|
def two_digit_freqs(digits, normalize=False):
"""
Consume digits of pi and compute 2 digits freq. counts.
"""
freqs = np.zeros(100, dtype='i4')
last = digits.next()
this = digits.next()
for d in digits:
index = int(last + this)
freqs[index] += 1
last = this
this = d
if normalize:
freqs = freqs/freqs.sum()
return freqs
|
python
|
{
"resource": ""
}
|
q279613
|
n_digit_freqs
|
test
|
def n_digit_freqs(digits, n, normalize=False):
"""
Consume digits of pi and compute n digits freq. counts.
This should only be used for 1-6 digits.
"""
freqs = np.zeros(pow(10,n), dtype='i4')
current = np.zeros(n, dtype=int)
for i in range(n):
current[i] = digits.next()
for d in digits:
index = int(''.join(map(str, current)))
freqs[index] += 1
current[0:-1] = current[1:]
current[-1] = d
if normalize:
freqs = freqs/freqs.sum()
return freqs
|
python
|
{
"resource": ""
}
|
q279614
|
plot_two_digit_freqs
|
test
|
def plot_two_digit_freqs(f2):
"""
Plot two digits frequency counts using matplotlib.
"""
f2_copy = f2.copy()
f2_copy.shape = (10,10)
ax = plt.matshow(f2_copy)
plt.colorbar()
for i in range(10):
for j in range(10):
plt.text(i-0.2, j+0.2, str(j)+str(i))
plt.ylabel('First digit')
plt.xlabel('Second digit')
return ax
|
python
|
{
"resource": ""
}
|
q279615
|
plot_one_digit_freqs
|
test
|
def plot_one_digit_freqs(f1):
"""
Plot one digit frequency counts using matplotlib.
"""
ax = plt.plot(f1,'bo-')
plt.title('Single digit counts in pi')
plt.xlabel('Digit')
plt.ylabel('Count')
return ax
|
python
|
{
"resource": ""
}
|
q279616
|
debugx
|
test
|
def debugx(expr,pre_msg=''):
"""Print the value of an expression from the caller's frame.
Takes an expression, evaluates it in the caller's frame and prints both
the given expression and the resulting value (as well as a debug mark
indicating the name of the calling function. The input must be of a form
suitable for eval().
An optional message can be passed, which will be prepended to the printed
expr->value pair."""
cf = sys._getframe(1)
print '[DBG:%s] %s%s -> %r' % (cf.f_code.co_name,pre_msg,expr,
eval(expr,cf.f_globals,cf.f_locals))
|
python
|
{
"resource": ""
}
|
q279617
|
reverse
|
test
|
def reverse(view, *args, **kwargs):
'''
User-friendly reverse. Pass arguments and keyword arguments to Django's `reverse`
as `args` and `kwargs` arguments, respectively.
The special optional keyword argument `query` is a dictionary of query (or GET) parameters
that can be appended to the `reverse`d URL.
Example:
reverse('products:category', categoryId = 5, query = {'page': 2})
is equivalent to
django.core.urlresolvers.reverse('products:category', kwargs = {'categoryId': 5}) + '?page=2'
'''
if 'query' in kwargs:
query = kwargs.pop('query')
else:
query = None
base = urlresolvers.reverse(view, args = args, kwargs = kwargs)
if query:
return '{}?{}'.format(base, django.utils.http.urlencode(query))
else:
return base
|
python
|
{
"resource": ""
}
|
q279618
|
is_private
|
test
|
def is_private(prefix, base):
"""prefix, base -> true iff name prefix + "." + base is "private".
Prefix may be an empty string, and base does not contain a period.
Prefix is ignored (although functions you write conforming to this
protocol may make use of it).
Return true iff base begins with an (at least one) underscore, but
does not both begin and end with (at least) two underscores.
"""
warnings.warn("is_private is deprecated; it wasn't useful; "
"examine DocTestFinder.find() lists instead",
DeprecationWarning, stacklevel=2)
return base[:1] == "_" and not base[:2] == "__" == base[-2:]
|
python
|
{
"resource": ""
}
|
q279619
|
DocFileSuite
|
test
|
def DocFileSuite(*paths, **kw):
"""A unittest suite for one or more doctest files.
The path to each doctest file is given as a string; the
interpretation of that string depends on the keyword argument
"module_relative".
A number of options may be provided as keyword arguments:
module_relative
If "module_relative" is True, then the given file paths are
interpreted as os-independent module-relative paths. By
default, these paths are relative to the calling module's
directory; but if the "package" argument is specified, then
they are relative to that package. To ensure os-independence,
"filename" should use "/" characters to separate path
segments, and may not be an absolute path (i.e., it may not
begin with "/").
If "module_relative" is False, then the given file paths are
interpreted as os-specific paths. These paths may be absolute
or relative (to the current working directory).
package
A Python package or the name of a Python package whose directory
should be used as the base directory for module relative paths.
If "package" is not specified, then the calling module's
directory is used as the base directory for module relative
filenames. It is an error to specify "package" if
"module_relative" is False.
setUp
A set-up function. This is called before running the
tests in each file. The setUp function will be passed a DocTest
object. The setUp function can access the test globals as the
globs attribute of the test passed.
tearDown
A tear-down function. This is called after running the
tests in each file. The tearDown function will be passed a DocTest
object. The tearDown function can access the test globals as the
globs attribute of the test passed.
globs
A dictionary containing initial global variables for the tests.
optionflags
A set of doctest option flags expressed as an integer.
parser
A DocTestParser (or subclass) that should be used to extract
tests from the files.
"""
suite = unittest.TestSuite()
# We do this here so that _normalize_module is called at the right
# level. If it were called in DocFileTest, then this function
# would be the caller and we might guess the package incorrectly.
if kw.get('module_relative', True):
kw['package'] = _normalize_module(kw.get('package'))
for path in paths:
suite.addTest(DocFileTest(path, **kw))
return suite
|
python
|
{
"resource": ""
}
|
q279620
|
debug_src
|
test
|
def debug_src(src, pm=False, globs=None):
"""Debug a single doctest docstring, in argument `src`'"""
testsrc = script_from_examples(src)
debug_script(testsrc, pm, globs)
|
python
|
{
"resource": ""
}
|
q279621
|
debug_script
|
test
|
def debug_script(src, pm=False, globs=None):
"Debug a test script. `src` is the script, as a string."
import pdb
# Note that tempfile.NameTemporaryFile() cannot be used. As the
# docs say, a file so created cannot be opened by name a second time
# on modern Windows boxes, and execfile() needs to open it.
srcfilename = tempfile.mktemp(".py", "doctestdebug")
f = open(srcfilename, 'w')
f.write(src)
f.close()
try:
if globs:
globs = globs.copy()
else:
globs = {}
if pm:
try:
execfile(srcfilename, globs, globs)
except:
print sys.exc_info()[1]
pdb.post_mortem(sys.exc_info()[2])
else:
# Note that %r is vital here. '%s' instead can, e.g., cause
# backslashes to get treated as metacharacters on Windows.
pdb.run("execfile(%r)" % srcfilename, globs, globs)
finally:
os.remove(srcfilename)
|
python
|
{
"resource": ""
}
|
q279622
|
debug
|
test
|
def debug(module, name, pm=False):
"""Debug a single doctest docstring.
Provide the module (or dotted name of the module) containing the
test to be debugged and the name (within the module) of the object
with the docstring with tests to be debugged.
"""
module = _normalize_module(module)
testsrc = testsource(module, name)
debug_script(testsrc, pm, module.__dict__)
|
python
|
{
"resource": ""
}
|
q279623
|
PickleShareDB.hdict
|
test
|
def hdict(self, hashroot):
""" Get all data contained in hashed category 'hashroot' as dict """
hfiles = self.keys(hashroot + "/*")
hfiles.sort()
last = len(hfiles) and hfiles[-1] or ''
if last.endswith('xx'):
# print "using xx"
hfiles = [last] + hfiles[:-1]
all = {}
for f in hfiles:
# print "using",f
try:
all.update(self[f])
except KeyError:
print "Corrupt",f,"deleted - hset is not threadsafe!"
del self[f]
self.uncache(f)
return all
|
python
|
{
"resource": ""
}
|
q279624
|
PickleShareDB.hcompress
|
test
|
def hcompress(self, hashroot):
""" Compress category 'hashroot', so hset is fast again
hget will fail if fast_only is True for compressed items (that were
hset before hcompress).
"""
hfiles = self.keys(hashroot + "/*")
all = {}
for f in hfiles:
# print "using",f
all.update(self[f])
self.uncache(f)
self[hashroot + '/xx'] = all
for f in hfiles:
p = self.root / f
if p.basename() == 'xx':
continue
p.remove()
|
python
|
{
"resource": ""
}
|
q279625
|
PickleShareDB.keys
|
test
|
def keys(self, globpat = None):
""" All keys in DB, or all keys matching a glob"""
if globpat is None:
files = self.root.walkfiles()
else:
files = [Path(p) for p in glob.glob(self.root/globpat)]
return [self._normalized(p) for p in files if p.isfile()]
|
python
|
{
"resource": ""
}
|
q279626
|
FilterSet.allow
|
test
|
def allow(self, record):
"""returns whether this record should be printed"""
if not self:
# nothing to filter
return True
return self._allow(record) and not self._deny(record)
|
python
|
{
"resource": ""
}
|
q279627
|
FilterSet._any_match
|
test
|
def _any_match(matchers, record):
"""return the bool of whether `record` starts with
any item in `matchers`"""
def record_matches_key(key):
return record == key or record.startswith(key + '.')
return anyp(bool, map(record_matches_key, matchers))
|
python
|
{
"resource": ""
}
|
q279628
|
LogCapture.formatError
|
test
|
def formatError(self, test, err):
"""Add captured log messages to error output.
"""
# logic flow copied from Capture.formatError
test.capturedLogging = records = self.formatLogRecords()
if not records:
return err
ec, ev, tb = err
return (ec, self.addCaptureToErr(ev, records), tb)
|
python
|
{
"resource": ""
}
|
q279629
|
embed
|
test
|
def embed(**kwargs):
"""Call this to embed IPython at the current point in your program.
The first invocation of this will create an :class:`InteractiveShellEmbed`
instance and then call it. Consecutive calls just call the already
created instance.
Here is a simple example::
from IPython import embed
a = 10
b = 20
embed('First time')
c = 30
d = 40
embed
Full customization can be done by passing a :class:`Struct` in as the
config argument.
"""
config = kwargs.get('config')
header = kwargs.pop('header', u'')
if config is None:
config = load_default_config()
config.InteractiveShellEmbed = config.TerminalInteractiveShell
kwargs['config'] = config
global _embedded_shell
if _embedded_shell is None:
_embedded_shell = InteractiveShellEmbed(**kwargs)
_embedded_shell(header=header, stack_depth=2)
|
python
|
{
"resource": ""
}
|
q279630
|
InteractiveShellEmbed.mainloop
|
test
|
def mainloop(self, local_ns=None, module=None, stack_depth=0,
display_banner=None, global_ns=None):
"""Embeds IPython into a running python program.
Input:
- header: An optional header message can be specified.
- local_ns, module: working local namespace (a dict) and module (a
module or similar object). If given as None, they are automatically
taken from the scope where the shell was called, so that
program variables become visible.
- stack_depth: specifies how many levels in the stack to go to
looking for namespaces (when local_ns or module is None). This
allows an intermediate caller to make sure that this function gets
the namespace from the intended level in the stack. By default (0)
it will get its locals and globals from the immediate caller.
Warning: it's possible to use this in a program which is being run by
IPython itself (via %run), but some funny things will happen (a few
globals get overwritten). In the future this will be cleaned up, as
there is no fundamental reason why it can't work perfectly."""
if (global_ns is not None) and (module is None):
class DummyMod(object):
"""A dummy module object for embedded IPython."""
pass
warnings.warn("global_ns is deprecated, use module instead.", DeprecationWarning)
module = DummyMod()
module.__dict__ = global_ns
# Get locals and globals from caller
if (local_ns is None or module is None) and self.default_user_namespaces:
call_frame = sys._getframe(stack_depth).f_back
if local_ns is None:
local_ns = call_frame.f_locals
if module is None:
global_ns = call_frame.f_globals
module = sys.modules[global_ns['__name__']]
# Save original namespace and module so we can restore them after
# embedding; otherwise the shell doesn't shut down correctly.
orig_user_module = self.user_module
orig_user_ns = self.user_ns
# Update namespaces and fire up interpreter
# The global one is easy, we can just throw it in
if module is not None:
self.user_module = module
# But the user/local one is tricky: ipython needs it to store internal
# data, but we also need the locals. We'll throw our hidden variables
# like _ih and get_ipython() into the local namespace, but delete them
# later.
if local_ns is not None:
self.user_ns = local_ns
self.init_user_ns()
# Patch for global embedding to make sure that things don't overwrite
# user globals accidentally. Thanks to Richard <[email protected]>
# FIXME. Test this a bit more carefully (the if.. is new)
# N.B. This can't now ever be called. Not sure what it was for.
# And now, since it wasn't called in the previous version, I'm
# commenting out these lines so they can't be called with my new changes
# --TK, 2011-12-10
#if local_ns is None and module is None:
# self.user_global_ns.update(__main__.__dict__)
# make sure the tab-completer has the correct frame information, so it
# actually completes using the frame's locals/globals
self.set_completer_frame()
with nested(self.builtin_trap, self.display_trap):
self.interact(display_banner=display_banner)
# now, purge out the local namespace of IPython's hidden variables.
if local_ns is not None:
for name in self.user_ns_hidden:
local_ns.pop(name, None)
# Restore original namespace so shell can shut down when we exit.
self.user_module = orig_user_module
self.user_ns = orig_user_ns
|
python
|
{
"resource": ""
}
|
q279631
|
_get_new_csv_writers
|
test
|
def _get_new_csv_writers(trans_title, meta_title,
trans_csv_path, meta_csv_path):
"""
Prepare new csv writers, write title rows and return them.
"""
trans_writer = UnicodeWriter(trans_csv_path)
trans_writer.writerow(trans_title)
meta_writer = UnicodeWriter(meta_csv_path)
meta_writer.writerow(meta_title)
return trans_writer, meta_writer
|
python
|
{
"resource": ""
}
|
q279632
|
_prepare_locale_dirs
|
test
|
def _prepare_locale_dirs(languages, locale_root):
"""
Prepare locale dirs for writing po files.
Create new directories if they doesn't exist.
"""
trans_languages = []
for i, t in enumerate(languages):
lang = t.split(':')[0]
trans_languages.append(lang)
lang_path = os.path.join(locale_root, lang)
if not os.path.exists(lang_path):
os.makedirs(lang_path)
return trans_languages
|
python
|
{
"resource": ""
}
|
q279633
|
_write_entries
|
test
|
def _write_entries(po_files, languages, msgid, msgstrs, metadata, comment):
"""
Write msgstr for every language with all needed metadata and comment.
Metadata are parser from string into dict, so read them only from gdocs.
"""
start = re.compile(r'^[\s]+')
end = re.compile(r'[\s]+$')
for i, lang in enumerate(languages):
meta = ast.literal_eval(metadata)
entry = polib.POEntry(**meta)
entry.tcomment = comment
entry.msgid = msgid
if msgstrs[i]:
start_ws = start.search(msgid)
end_ws = end.search(msgid)
entry.msgstr = str(start_ws.group() if start_ws else '') + \
unicode(msgstrs[i].strip()) + \
str(end_ws.group() if end_ws else '')
else:
entry.msgstr = ''
po_files[lang].append(entry)
|
python
|
{
"resource": ""
}
|
q279634
|
_write_header
|
test
|
def _write_header(po_path, lang, header):
"""
Write header into po file for specific lang.
Metadata are read from settings file.
"""
po_file = open(po_path, 'w')
po_file.write(header + '\n')
po_file.write(
'msgid ""' +
'\nmsgstr ""' +
'\n"MIME-Version: ' + settings.METADATA['MIME-Version'] + r'\n"'
'\n"Content-Type: ' + settings.METADATA['Content-Type'] + r'\n"'
'\n"Content-Transfer-Encoding: ' +
settings.METADATA['Content-Transfer-Encoding'] + r'\n"'
'\n"Language: ' + lang + r'\n"' + '\n')
po_file.close()
|
python
|
{
"resource": ""
}
|
q279635
|
Notifo.subscribe_user
|
test
|
def subscribe_user(self, user):
""" method to subscribe a user to a service
"""
url = self.root_url + "subscribe_user"
values = {}
values["username"] = user
return self._query(url, values)
|
python
|
{
"resource": ""
}
|
q279636
|
init_parser
|
test
|
def init_parser():
""" function to init option parser """
usage = "usage: %prog -u user -s secret -n name [-l label] \
[-t title] [-c callback] [TEXT]"
parser = OptionParser(usage, version="%prog " + notifo.__version__)
parser.add_option("-u", "--user", action="store", dest="user",
help="your notifo username")
parser.add_option("-s", "--secret", action="store", dest="secret",
help="your notifo API secret")
parser.add_option("-n", "--name", action="store", dest="name",
help="recipient for the notification")
parser.add_option("-l", "--label", action="store", dest="label",
help="label for the notification")
parser.add_option("-t", "--title", action="store", dest="title",
help="title of the notification")
parser.add_option("-c", "--callback", action="store", dest="callback",
help="callback URL to call")
parser.add_option("-m", "--message", action="store_true", dest="message",
default=False, help="send message instead of notification")
(options, args) = parser.parse_args()
return (parser, options, args)
|
python
|
{
"resource": ""
}
|
q279637
|
run_python_module
|
test
|
def run_python_module(modulename, args):
"""Run a python module, as though with ``python -m name args...``.
`modulename` is the name of the module, possibly a dot-separated name.
`args` is the argument array to present as sys.argv, including the first
element naming the module being executed.
"""
openfile = None
glo, loc = globals(), locals()
try:
try:
# Search for the module - inside its parent package, if any - using
# standard import mechanics.
if '.' in modulename:
packagename, name = rsplit1(modulename, '.')
package = __import__(packagename, glo, loc, ['__path__'])
searchpath = package.__path__
else:
packagename, name = None, modulename
searchpath = None # "top-level search" in imp.find_module()
openfile, pathname, _ = imp.find_module(name, searchpath)
# Complain if this is a magic non-file module.
if openfile is None and pathname is None:
raise NoSource(
"module does not live in a file: %r" % modulename
)
# If `modulename` is actually a package, not a mere module, then we
# pretend to be Python 2.7 and try running its __main__.py script.
if openfile is None:
packagename = modulename
name = '__main__'
package = __import__(packagename, glo, loc, ['__path__'])
searchpath = package.__path__
openfile, pathname, _ = imp.find_module(name, searchpath)
except ImportError:
_, err, _ = sys.exc_info()
raise NoSource(str(err))
finally:
if openfile:
openfile.close()
# Finally, hand the file off to run_python_file for execution.
pathname = os.path.abspath(pathname)
args[0] = pathname
run_python_file(pathname, args, package=packagename)
|
python
|
{
"resource": ""
}
|
q279638
|
run_python_file
|
test
|
def run_python_file(filename, args, package=None):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element naming the file being executed. `package` is the name of the
enclosing package, if any.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module('__main__')
sys.modules['__main__'] = main_mod
main_mod.__file__ = filename
if package:
main_mod.__package__ = package
main_mod.__builtins__ = BUILTINS
# Set sys.argv properly.
old_argv = sys.argv
sys.argv = args
try:
# Make a code object somehow.
if filename.endswith(".pyc") or filename.endswith(".pyo"):
code = make_code_from_pyc(filename)
else:
code = make_code_from_py(filename)
# Execute the code object.
try:
exec_code_object(code, main_mod.__dict__)
except SystemExit:
# The user called sys.exit(). Just pass it along to the upper
# layers, where it will be handled.
raise
except:
# Something went wrong while executing the user code.
# Get the exc_info, and pack them into an exception that we can
# throw up to the outer loop. We peel two layers off the traceback
# so that the coverage.py code doesn't appear in the final printed
# traceback.
typ, err, tb = sys.exc_info()
raise ExceptionDuringRun(typ, err, tb.tb_next.tb_next)
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
|
python
|
{
"resource": ""
}
|
q279639
|
make_code_from_py
|
test
|
def make_code_from_py(filename):
"""Get source from `filename` and make a code object of it."""
# Open the source file.
try:
source_file = open_source(filename)
except IOError:
raise NoSource("No file to run: %r" % filename)
try:
source = source_file.read()
finally:
source_file.close()
# We have the source. `compile` still needs the last line to be clean,
# so make sure it is, then compile a code object from it.
if not source or source[-1] != '\n':
source += '\n'
code = compile(source, filename, "exec")
return code
|
python
|
{
"resource": ""
}
|
q279640
|
make_code_from_pyc
|
test
|
def make_code_from_pyc(filename):
"""Get a code object from a .pyc file."""
try:
fpyc = open(filename, "rb")
except IOError:
raise NoCode("No file to run: %r" % filename)
try:
# First four bytes are a version-specific magic number. It has to
# match or we won't run the file.
magic = fpyc.read(4)
if magic != imp.get_magic():
raise NoCode("Bad magic number in .pyc file")
# Skip the junk in the header that we don't need.
fpyc.read(4) # Skip the moddate.
if sys.version_info >= (3, 3):
# 3.3 added another long to the header (size), skip it.
fpyc.read(4)
# The rest of the file is the code object we want.
code = marshal.load(fpyc)
finally:
fpyc.close()
return code
|
python
|
{
"resource": ""
}
|
q279641
|
html_tableify
|
test
|
def html_tableify(item_matrix, select=None, header=None , footer=None) :
""" returnr a string for an html table"""
if not item_matrix :
return ''
html_cols = []
tds = lambda text : u'<td>'+text+u' </td>'
trs = lambda text : u'<tr>'+text+u'</tr>'
tds_items = [map(tds, row) for row in item_matrix]
if select :
row, col = select
tds_items[row][col] = u'<td class="inverted">'\
+item_matrix[row][col]\
+u' </td>'
#select the right item
html_cols = map(trs, (u''.join(row) for row in tds_items))
head = ''
foot = ''
if header :
head = (u'<tr>'\
+''.join((u'<td>'+header+u'</td>')*len(item_matrix[0]))\
+'</tr>')
if footer :
foot = (u'<tr>'\
+''.join((u'<td>'+footer+u'</td>')*len(item_matrix[0]))\
+'</tr>')
html = (u'<table class="completion" style="white-space:pre">'+head+(u''.join(html_cols))+foot+u'</table>')
return html
|
python
|
{
"resource": ""
}
|
q279642
|
SlidingInterval.current
|
test
|
def current(self, value):
"""set current cursor position"""
current = min(max(self._min, value), self._max)
self._current = current
if current > self._stop :
self._stop = current
self._start = current-self._width
elif current < self._start :
self._start = current
self._stop = current + self._width
if abs(self._start - self._min) <= self._sticky_lenght :
self._start = self._min
if abs(self._stop - self._max) <= self._sticky_lenght :
self._stop = self._max
|
python
|
{
"resource": ""
}
|
q279643
|
CompletionHtml.cancel_completion
|
test
|
def cancel_completion(self):
"""Cancel the completion
should be called when the completer have to be dismissed
This reset internal variable, clearing the temporary buffer
of the console where the completion are shown.
"""
self._consecutive_tab = 0
self._slice_start = 0
self._console_widget._clear_temporary_buffer()
self._index = (0, 0)
if(self._sliding_interval):
self._sliding_interval = None
|
python
|
{
"resource": ""
}
|
q279644
|
CompletionHtml._select_index
|
test
|
def _select_index(self, row, col):
"""Change the selection index, and make sure it stays in the right range
A little more complicated than just dooing modulo the number of row columns
to be sure to cycle through all element.
horizontaly, the element are maped like this :
to r <-- a b c d e f --> to g
to f <-- g h i j k l --> to m
to l <-- m n o p q r --> to a
and vertically
a d g j m p
b e h k n q
c f i l o r
"""
nr, nc = self._size
nr = nr-1
nc = nc-1
# case 1
if (row > nr and col >= nc) or (row >= nr and col > nc):
self._select_index(0, 0)
# case 2
elif (row <= 0 and col < 0) or (row < 0 and col <= 0):
self._select_index(nr, nc)
# case 3
elif row > nr :
self._select_index(0, col+1)
# case 4
elif row < 0 :
self._select_index(nr, col-1)
# case 5
elif col > nc :
self._select_index(row+1, 0)
# case 6
elif col < 0 :
self._select_index(row-1, nc)
elif 0 <= row and row <= nr and 0 <= col and col <= nc :
self._index = (row, col)
else :
raise NotImplementedError("you'r trying to go where no completion\
have gone before : %d:%d (%d:%d)"%(row, col, nr, nc) )
|
python
|
{
"resource": ""
}
|
q279645
|
CompletionHtml.select_up
|
test
|
def select_up(self):
"""move cursor up"""
r, c = self._index
self._select_index(r-1, c)
|
python
|
{
"resource": ""
}
|
q279646
|
CompletionHtml.select_down
|
test
|
def select_down(self):
"""move cursor down"""
r, c = self._index
self._select_index(r+1, c)
|
python
|
{
"resource": ""
}
|
q279647
|
CompletionHtml.select_left
|
test
|
def select_left(self):
"""move cursor left"""
r, c = self._index
self._select_index(r, c-1)
|
python
|
{
"resource": ""
}
|
q279648
|
CompletionHtml.select_right
|
test
|
def select_right(self):
"""move cursor right"""
r, c = self._index
self._select_index(r, c+1)
|
python
|
{
"resource": ""
}
|
q279649
|
CompletionHtml._update_list
|
test
|
def _update_list(self, hilight=True):
""" update the list of completion and hilight the currently selected completion """
self._sliding_interval.current = self._index[0]
head = None
foot = None
if self._sliding_interval.start > 0 :
head = '...'
if self._sliding_interval.stop < self._sliding_interval._max:
foot = '...'
items_m = self._justified_items[\
self._sliding_interval.start:\
self._sliding_interval.stop+1\
]
self._console_widget._clear_temporary_buffer()
if(hilight):
sel = (self._sliding_interval.nth, self._index[1])
else :
sel = None
strng = html_tableify(items_m, select=sel, header=head, footer=foot)
self._console_widget._fill_temporary_buffer(self._old_cursor, strng, html=True)
|
python
|
{
"resource": ""
}
|
q279650
|
wordfreq
|
test
|
def wordfreq(text, is_filename=False):
"""Return a dictionary of words and word counts in a string."""
if is_filename:
with open(text) as f:
text = f.read()
freqs = {}
for word in text.split():
lword = word.lower()
freqs[lword] = freqs.get(lword, 0) + 1
return freqs
|
python
|
{
"resource": ""
}
|
q279651
|
print_wordfreq
|
test
|
def print_wordfreq(freqs, n=10):
"""Print the n most common words and counts in the freqs dict."""
words, counts = freqs.keys(), freqs.values()
items = zip(counts, words)
items.sort(reverse=True)
for (count, word) in items[:n]:
print(word, count)
|
python
|
{
"resource": ""
}
|
q279652
|
WinHPCJob.tostring
|
test
|
def tostring(self):
"""Return the string representation of the job description XML."""
root = self.as_element()
indent(root)
txt = ET.tostring(root, encoding="utf-8")
# Now remove the tokens used to order the attributes.
txt = re.sub(r'_[A-Z]_','',txt)
txt = '<?xml version="1.0" encoding="utf-8"?>\n' + txt
return txt
|
python
|
{
"resource": ""
}
|
q279653
|
WinHPCJob.write
|
test
|
def write(self, filename):
"""Write the XML job description to a file."""
txt = self.tostring()
with open(filename, 'w') as f:
f.write(txt)
|
python
|
{
"resource": ""
}
|
q279654
|
validate_pin
|
test
|
def validate_pin(pin):
""" Validate the given pin against the schema.
:param dict pin: The pin to validate:
:raises pypebbleapi.schemas.DocumentError: If the pin is not valid.
"""
v = _Validator(schemas.pin)
if v.validate(pin):
return
else:
raise schemas.DocumentError(errors=v.errors)
|
python
|
{
"resource": ""
}
|
q279655
|
Timeline.send_shared_pin
|
test
|
def send_shared_pin(self, topics, pin, skip_validation=False):
"""
Send a shared pin for the given topics.
:param list topics: The list of topics.
:param dict pin: The pin.
:param bool skip_validation: Whether to skip the validation.
:raises pypebbleapi.schemas.DocumentError: If the validation process failed.
:raises `requests.exceptions.HTTPError`: If an HTTP error occurred.
"""
if not self.api_key:
raise ValueError("You need to specify an api_key.")
if not skip_validation:
validate_pin(pin)
response = _request('PUT',
url=self.url_v1('/shared/pins/' + pin['id']),
user_agent=self.user_agent,
api_key=self.api_key,
topics_list=topics,
json=pin,
)
_raise_for_status(response)
|
python
|
{
"resource": ""
}
|
q279656
|
Timeline.delete_shared_pin
|
test
|
def delete_shared_pin(self, pin_id):
"""
Delete a shared pin.
:param str pin_id: The id of the pin to delete.
:raises `requests.exceptions.HTTPError`: If an HTTP error occurred.
"""
if not self.api_key:
raise ValueError("You need to specify an api_key.")
response = _request('DELETE',
url=self.url_v1('/shared/pins/' + pin_id),
user_agent=self.user_agent,
api_key=self.api_key,
)
_raise_for_status(response)
|
python
|
{
"resource": ""
}
|
q279657
|
Timeline.send_user_pin
|
test
|
def send_user_pin(self, user_token, pin, skip_validation=False):
"""
Send a user pin.
:param str user_token: The token of the user.
:param dict pin: The pin.
:param bool skip_validation: Whether to skip the validation.
:raises pypebbleapi.schemas.DocumentError: If the validation process failed.
:raises `requests.exceptions.HTTPError`: If an HTTP error occurred.
"""
if not skip_validation:
validate_pin(pin)
response = _request('PUT',
url=self.url_v1('/user/pins/' + pin['id']),
user_agent=self.user_agent,
user_token=user_token,
json=pin,
)
_raise_for_status(response)
|
python
|
{
"resource": ""
}
|
q279658
|
Timeline.delete_user_pin
|
test
|
def delete_user_pin(self, user_token, pin_id):
"""
Delete a user pin.
:param str user_token: The token of the user.
:param str pin_id: The id of the pin to delete.
:raises `requests.exceptions.HTTPError`: If an HTTP error occurred.
"""
response = _request('DELETE',
url=self.url_v1('/user/pins/' + pin_id),
user_agent=self.user_agent,
user_token=user_token,
)
_raise_for_status(response)
|
python
|
{
"resource": ""
}
|
q279659
|
Timeline.subscribe
|
test
|
def subscribe(self, user_token, topic):
"""
Subscribe a user to the given topic.
:param str user_token: The token of the user.
:param str topic: The topic.
:raises `requests.exceptions.HTTPError`: If an HTTP error occurred.
"""
response = _request('POST',
url=self.url_v1('/user/subscriptions/' + topic),
user_agent=self.user_agent,
user_token=user_token,
)
_raise_for_status(response)
|
python
|
{
"resource": ""
}
|
q279660
|
Timeline.list_subscriptions
|
test
|
def list_subscriptions(self, user_token):
"""
Get the list of the topics which a user is subscribed to.
:param str user_token: The token of the user.
:return: The list of the topics.
:rtype: list
:raises `requests.exceptions.HTTPError`: If an HTTP error occurred.
"""
response = _request('GET',
url=self.url_v1('/user/subscriptions'),
user_agent=self.user_agent,
user_token=user_token,
)
_raise_for_status(response)
return response.json()['topics']
|
python
|
{
"resource": ""
}
|
q279661
|
monitored
|
test
|
def monitored(total: int, name=None, message=None):
"""
Decorate a function to automatically begin and end a task on the progressmonitor.
The function must have a parameter called 'monitor'
"""
def decorator(f):
nonlocal name
monitor_index = list(inspect.signature(f).parameters.keys()).index('monitor')
if name is None:
name=f.__name__
@wraps(f)
def wrapper(*args, **kargs):
if len(args) > monitor_index:
monitor = args[monitor_index]
elif 'monitor' in kargs:
monitor = kargs['monitor']
else:
monitor = kargs['monitor'] = NullMonitor()
with monitor.task(total, name, message):
f(*args, **kargs)
return wrapper
return decorator
|
python
|
{
"resource": ""
}
|
q279662
|
ProgressMonitor.begin
|
test
|
def begin(self, total: int, name=None, message=None):
"""Call before starting work on a monitor, specifying name and amount of work"""
self.total = total
message = message or name or "Working..."
self.name = name or "ProgressMonitor"
self.update(0, message)
|
python
|
{
"resource": ""
}
|
q279663
|
ProgressMonitor.task
|
test
|
def task(self, total: int, name=None, message=None):
"""Wrap code into a begin and end call on this monitor"""
self.begin(total, name, message)
try:
yield self
finally:
self.done()
|
python
|
{
"resource": ""
}
|
q279664
|
ProgressMonitor.subtask
|
test
|
def subtask(self, units: int):
"""Create a submonitor with the given units"""
sm = self.submonitor(units)
try:
yield sm
finally:
if sm.total is None:
# begin was never called, so the subtask cannot be closed
self.update(units)
else:
sm.done()
|
python
|
{
"resource": ""
}
|
q279665
|
ProgressMonitor.update
|
test
|
def update(self, units: int=1, message: str=None):
"""Increment the monitor with N units worked and an optional message"""
if self.total is None:
raise Exception("Cannot call progressmonitor.update before calling begin")
self.worked = min(self.total, self.worked+units)
if message:
self.message = message
for listener in self.listeners:
listener(self)
|
python
|
{
"resource": ""
}
|
q279666
|
ProgressMonitor.submonitor
|
test
|
def submonitor(self, units: int, *args, **kargs) -> 'ProgressMonitor':
"""
Create a sub monitor that stands for N units of work in this monitor
The sub task should call .begin (or use @monitored / with .task) before calling updates
"""
submonitor = ProgressMonitor(*args, **kargs)
self.sub_monitors[submonitor] = units
submonitor.add_listener(self._submonitor_update)
return submonitor
|
python
|
{
"resource": ""
}
|
q279667
|
ProgressMonitor.done
|
test
|
def done(self, message: str=None):
"""
Signal that this task is done.
This is completely optional and will just call .update with the remaining work.
"""
if message is None:
message = "{self.name} done".format(**locals()) if self.name else "Done"
self.update(units=self.total - self.worked, message=message)
|
python
|
{
"resource": ""
}
|
q279668
|
page
|
test
|
def page(strng, start=0, screen_lines=0, pager_cmd=None,
html=None, auto_html=False):
"""Print a string, piping through a pager.
This version ignores the screen_lines and pager_cmd arguments and uses
IPython's payload system instead.
Parameters
----------
strng : str
Text to page.
start : int
Starting line at which to place the display.
html : str, optional
If given, an html string to send as well.
auto_html : bool, optional
If true, the input string is assumed to be valid reStructuredText and is
converted to HTML with docutils. Note that if docutils is not found,
this option is silently ignored.
Note
----
Only one of the ``html`` and ``auto_html`` options can be given, not
both.
"""
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
shell = InteractiveShell.instance()
if auto_html:
try:
# These defaults ensure user configuration variables for docutils
# are not loaded, only our config is used here.
defaults = {'file_insertion_enabled': 0,
'raw_enabled': 0,
'_disable_config': 1}
html = publish_string(strng, writer_name='html',
settings_overrides=defaults)
except:
pass
payload = dict(
source='IPython.zmq.page.page',
text=strng,
html=html,
start_line_number=start
)
shell.payload_manager.write_payload(payload)
|
python
|
{
"resource": ""
}
|
q279669
|
InstallRequirement.correct_build_location
|
test
|
def correct_build_location(self):
"""If the build location was a temporary directory, this will move it
to a new more permanent location"""
if self.source_dir is not None:
return
assert self.req is not None
assert self._temp_build_dir
old_location = self._temp_build_dir
new_build_dir = self._ideal_build_dir
del self._ideal_build_dir
if self.editable:
name = self.name.lower()
else:
name = self.name
new_location = os.path.join(new_build_dir, name)
if not os.path.exists(new_build_dir):
logger.debug('Creating directory %s', new_build_dir)
_make_build_dir(new_build_dir)
if os.path.exists(new_location):
raise InstallationError(
'A package already exists in %s; please remove it to continue'
% display_path(new_location))
logger.debug(
'Moving package %s from %s to new location %s',
self, display_path(old_location), display_path(new_location),
)
shutil.move(old_location, new_location)
self._temp_build_dir = new_location
self.source_dir = new_location
self._egg_info_path = None
|
python
|
{
"resource": ""
}
|
q279670
|
load_pyconfig_files
|
test
|
def load_pyconfig_files(config_files, path):
"""Load multiple Python config files, merging each of them in turn.
Parameters
==========
config_files : list of str
List of config files names to load and merge into the config.
path : unicode
The full path to the location of the config files.
"""
config = Config()
for cf in config_files:
loader = PyFileConfigLoader(cf, path=path)
try:
next_config = loader.load_config()
except ConfigFileNotFound:
pass
except:
raise
else:
config._merge(next_config)
return config
|
python
|
{
"resource": ""
}
|
q279671
|
PyFileConfigLoader.load_config
|
test
|
def load_config(self):
"""Load the config from a file and return it as a Struct."""
self.clear()
try:
self._find_file()
except IOError as e:
raise ConfigFileNotFound(str(e))
self._read_file_as_dict()
self._convert_to_config()
return self.config
|
python
|
{
"resource": ""
}
|
q279672
|
PyFileConfigLoader._read_file_as_dict
|
test
|
def _read_file_as_dict(self):
"""Load the config file into self.config, with recursive loading."""
# This closure is made available in the namespace that is used
# to exec the config file. It allows users to call
# load_subconfig('myconfig.py') to load config files recursively.
# It needs to be a closure because it has references to self.path
# and self.config. The sub-config is loaded with the same path
# as the parent, but it uses an empty config which is then merged
# with the parents.
# If a profile is specified, the config file will be loaded
# from that profile
def load_subconfig(fname, profile=None):
# import here to prevent circular imports
from IPython.core.profiledir import ProfileDir, ProfileDirError
if profile is not None:
try:
profile_dir = ProfileDir.find_profile_dir_by_name(
get_ipython_dir(),
profile,
)
except ProfileDirError:
return
path = profile_dir.location
else:
path = self.path
loader = PyFileConfigLoader(fname, path)
try:
sub_config = loader.load_config()
except ConfigFileNotFound:
# Pass silently if the sub config is not there. This happens
# when a user s using a profile, but not the default config.
pass
else:
self.config._merge(sub_config)
# Again, this needs to be a closure and should be used in config
# files to get the config being loaded.
def get_config():
return self.config
namespace = dict(load_subconfig=load_subconfig, get_config=get_config)
fs_encoding = sys.getfilesystemencoding() or 'ascii'
conf_filename = self.full_filename.encode(fs_encoding)
py3compat.execfile(conf_filename, namespace)
|
python
|
{
"resource": ""
}
|
q279673
|
CommandLineConfigLoader._load_flag
|
test
|
def _load_flag(self, cfg):
"""update self.config from a flag, which can be a dict or Config"""
if isinstance(cfg, (dict, Config)):
# don't clobber whole config sections, update
# each section from config:
for sec,c in cfg.iteritems():
self.config[sec].update(c)
else:
raise TypeError("Invalid flag: %r" % cfg)
|
python
|
{
"resource": ""
}
|
q279674
|
KeyValueConfigLoader._decode_argv
|
test
|
def _decode_argv(self, argv, enc=None):
"""decode argv if bytes, using stin.encoding, falling back on default enc"""
uargv = []
if enc is None:
enc = DEFAULT_ENCODING
for arg in argv:
if not isinstance(arg, unicode):
# only decode if not already decoded
arg = arg.decode(enc)
uargv.append(arg)
return uargv
|
python
|
{
"resource": ""
}
|
q279675
|
KeyValueConfigLoader.load_config
|
test
|
def load_config(self, argv=None, aliases=None, flags=None):
"""Parse the configuration and generate the Config object.
After loading, any arguments that are not key-value or
flags will be stored in self.extra_args - a list of
unparsed command-line arguments. This is used for
arguments such as input files or subcommands.
Parameters
----------
argv : list, optional
A list that has the form of sys.argv[1:] which has unicode
elements of the form u"key=value". If this is None (default),
then self.argv will be used.
aliases : dict
A dict of aliases for configurable traits.
Keys are the short aliases, Values are the resolved trait.
Of the form: `{'alias' : 'Configurable.trait'}`
flags : dict
A dict of flags, keyed by str name. Values can be Config objects
or dicts. When the flag is triggered, The config is loaded as
`self.config.update(cfg)`.
"""
from IPython.config.configurable import Configurable
self.clear()
if argv is None:
argv = self.argv
if aliases is None:
aliases = self.aliases
if flags is None:
flags = self.flags
# ensure argv is a list of unicode strings:
uargv = self._decode_argv(argv)
for idx,raw in enumerate(uargv):
# strip leading '-'
item = raw.lstrip('-')
if raw == '--':
# don't parse arguments after '--'
# this is useful for relaying arguments to scripts, e.g.
# ipython -i foo.py --pylab=qt -- args after '--' go-to-foo.py
self.extra_args.extend(uargv[idx+1:])
break
if kv_pattern.match(raw):
lhs,rhs = item.split('=',1)
# Substitute longnames for aliases.
if lhs in aliases:
lhs = aliases[lhs]
if '.' not in lhs:
# probably a mistyped alias, but not technically illegal
warn.warn("Unrecognized alias: '%s', it will probably have no effect."%lhs)
try:
self._exec_config_str(lhs, rhs)
except Exception:
raise ArgumentError("Invalid argument: '%s'" % raw)
elif flag_pattern.match(raw):
if item in flags:
cfg,help = flags[item]
self._load_flag(cfg)
else:
raise ArgumentError("Unrecognized flag: '%s'"%raw)
elif raw.startswith('-'):
kv = '--'+item
if kv_pattern.match(kv):
raise ArgumentError("Invalid argument: '%s', did you mean '%s'?"%(raw, kv))
else:
raise ArgumentError("Invalid argument: '%s'"%raw)
else:
# keep all args that aren't valid in a list,
# in case our parent knows what to do with them.
self.extra_args.append(item)
return self.config
|
python
|
{
"resource": ""
}
|
q279676
|
ArgParseConfigLoader.load_config
|
test
|
def load_config(self, argv=None, aliases=None, flags=None):
"""Parse command line arguments and return as a Config object.
Parameters
----------
args : optional, list
If given, a list with the structure of sys.argv[1:] to parse
arguments from. If not given, the instance's self.argv attribute
(given at construction time) is used."""
self.clear()
if argv is None:
argv = self.argv
if aliases is None:
aliases = self.aliases
if flags is None:
flags = self.flags
self._create_parser(aliases, flags)
self._parse_args(argv)
self._convert_to_config()
return self.config
|
python
|
{
"resource": ""
}
|
q279677
|
ArgParseConfigLoader._parse_args
|
test
|
def _parse_args(self, args):
"""self.parser->self.parsed_data"""
# decode sys.argv to support unicode command-line options
enc = DEFAULT_ENCODING
uargs = [py3compat.cast_unicode(a, enc) for a in args]
self.parsed_data, self.extra_args = self.parser.parse_known_args(uargs)
|
python
|
{
"resource": ""
}
|
q279678
|
KVArgParseConfigLoader._convert_to_config
|
test
|
def _convert_to_config(self):
"""self.parsed_data->self.config, parse unrecognized extra args via KVLoader."""
# remove subconfigs list from namespace before transforming the Namespace
if '_flags' in self.parsed_data:
subcs = self.parsed_data._flags
del self.parsed_data._flags
else:
subcs = []
for k, v in vars(self.parsed_data).iteritems():
if v is None:
# it was a flag that shares the name of an alias
subcs.append(self.alias_flags[k])
else:
# eval the KV assignment
self._exec_config_str(k, v)
for subc in subcs:
self._load_flag(subc)
if self.extra_args:
sub_parser = KeyValueConfigLoader()
sub_parser.load_config(self.extra_args)
self.config._merge(sub_parser.config)
self.extra_args = sub_parser.extra_args
|
python
|
{
"resource": ""
}
|
q279679
|
find_module
|
test
|
def find_module(name, path=None):
"""imp.find_module variant that only return path of module.
The `imp.find_module` returns a filehandle that we are not interested in.
Also we ignore any bytecode files that `imp.find_module` finds.
Parameters
----------
name : str
name of module to locate
path : list of str
list of paths to search for `name`. If path=None then search sys.path
Returns
-------
filename : str
Return full path of module or None if module is missing or does not have
.py or .pyw extension
"""
if name is None:
return None
try:
file, filename, _ = imp.find_module(name, path)
except ImportError:
return None
if file is None:
return filename
else:
file.close()
if os.path.splitext(filename)[1] in [".py", "pyc"]:
return filename
else:
return None
|
python
|
{
"resource": ""
}
|
q279680
|
BaseLauncher.on_stop
|
test
|
def on_stop(self, f):
"""Register a callback to be called with this Launcher's stop_data
when the process actually finishes.
"""
if self.state=='after':
return f(self.stop_data)
else:
self.stop_callbacks.append(f)
|
python
|
{
"resource": ""
}
|
q279681
|
BaseLauncher.notify_start
|
test
|
def notify_start(self, data):
"""Call this to trigger startup actions.
This logs the process startup and sets the state to 'running'. It is
a pass-through so it can be used as a callback.
"""
self.log.debug('Process %r started: %r', self.args[0], data)
self.start_data = data
self.state = 'running'
return data
|
python
|
{
"resource": ""
}
|
q279682
|
BaseLauncher.notify_stop
|
test
|
def notify_stop(self, data):
"""Call this to trigger process stop actions.
This logs the process stopping and sets the state to 'after'. Call
this to trigger callbacks registered via :meth:`on_stop`."""
self.log.debug('Process %r stopped: %r', self.args[0], data)
self.stop_data = data
self.state = 'after'
for i in range(len(self.stop_callbacks)):
d = self.stop_callbacks.pop()
d(data)
return data
|
python
|
{
"resource": ""
}
|
q279683
|
LocalProcessLauncher.interrupt_then_kill
|
test
|
def interrupt_then_kill(self, delay=2.0):
"""Send INT, wait a delay and then send KILL."""
try:
self.signal(SIGINT)
except Exception:
self.log.debug("interrupt failed")
pass
self.killer = ioloop.DelayedCallback(lambda : self.signal(SIGKILL), delay*1000, self.loop)
self.killer.start()
|
python
|
{
"resource": ""
}
|
q279684
|
MPILauncher.find_args
|
test
|
def find_args(self):
"""Build self.args using all the fields."""
return self.mpi_cmd + ['-n', str(self.n)] + self.mpi_args + \
self.program + self.program_args
|
python
|
{
"resource": ""
}
|
q279685
|
MPILauncher.start
|
test
|
def start(self, n):
"""Start n instances of the program using mpiexec."""
self.n = n
return super(MPILauncher, self).start()
|
python
|
{
"resource": ""
}
|
q279686
|
SSHLauncher._send_file
|
test
|
def _send_file(self, local, remote):
"""send a single file"""
remote = "%s:%s" % (self.location, remote)
for i in range(10):
if not os.path.exists(local):
self.log.debug("waiting for %s" % local)
time.sleep(1)
else:
break
self.log.info("sending %s to %s", local, remote)
check_output(self.scp_cmd + [local, remote])
|
python
|
{
"resource": ""
}
|
q279687
|
SSHLauncher._fetch_file
|
test
|
def _fetch_file(self, remote, local):
"""fetch a single file"""
full_remote = "%s:%s" % (self.location, remote)
self.log.info("fetching %s from %s", local, full_remote)
for i in range(10):
# wait up to 10s for remote file to exist
check = check_output(self.ssh_cmd + self.ssh_args + \
[self.location, 'test -e', remote, "&& echo 'yes' || echo 'no'"])
check = check.strip()
if check == 'no':
time.sleep(1)
elif check == 'yes':
break
check_output(self.scp_cmd + [full_remote, local])
|
python
|
{
"resource": ""
}
|
q279688
|
SSHEngineSetLauncher.engine_count
|
test
|
def engine_count(self):
"""determine engine count from `engines` dict"""
count = 0
for n in self.engines.itervalues():
if isinstance(n, (tuple,list)):
n,args = n
count += n
return count
|
python
|
{
"resource": ""
}
|
q279689
|
SSHEngineSetLauncher.start
|
test
|
def start(self, n):
"""Start engines by profile or profile_dir.
`n` is ignored, and the `engines` config property is used instead.
"""
dlist = []
for host, n in self.engines.iteritems():
if isinstance(n, (tuple, list)):
n, args = n
else:
args = copy.deepcopy(self.engine_args)
if '@' in host:
user,host = host.split('@',1)
else:
user=None
for i in range(n):
if i > 0:
time.sleep(self.delay)
el = self.launcher_class(work_dir=self.work_dir, config=self.config, log=self.log,
profile_dir=self.profile_dir, cluster_id=self.cluster_id,
)
if i > 0:
# only send files for the first engine on each host
el.to_send = []
# Copy the engine args over to each engine launcher.
el.engine_cmd = self.engine_cmd
el.engine_args = args
el.on_stop(self._notice_engine_stopped)
d = el.start(user=user, hostname=host)
self.launchers[ "%s/%i" % (host,i) ] = el
dlist.append(d)
self.notify_start(dlist)
return dlist
|
python
|
{
"resource": ""
}
|
q279690
|
WindowsHPCLauncher.start
|
test
|
def start(self, n):
"""Start n copies of the process using the Win HPC job scheduler."""
self.write_job_file(n)
args = [
'submit',
'/jobfile:%s' % self.job_file,
'/scheduler:%s' % self.scheduler
]
self.log.debug("Starting Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),))
output = check_output([self.job_cmd]+args,
env=os.environ,
cwd=self.work_dir,
stderr=STDOUT
)
job_id = self.parse_job_id(output)
self.notify_start(job_id)
return job_id
|
python
|
{
"resource": ""
}
|
q279691
|
BatchSystemLauncher._context_default
|
test
|
def _context_default(self):
"""load the default context with the default values for the basic keys
because the _trait_changed methods only load the context if they
are set to something other than the default value.
"""
return dict(n=1, queue=u'', profile_dir=u'', cluster_id=u'')
|
python
|
{
"resource": ""
}
|
q279692
|
BatchSystemLauncher.parse_job_id
|
test
|
def parse_job_id(self, output):
"""Take the output of the submit command and return the job id."""
m = self.job_id_regexp.search(output)
if m is not None:
job_id = m.group()
else:
raise LauncherError("Job id couldn't be determined: %s" % output)
self.job_id = job_id
self.log.info('Job submitted with job id: %r', job_id)
return job_id
|
python
|
{
"resource": ""
}
|
q279693
|
BatchSystemLauncher.write_batch_script
|
test
|
def write_batch_script(self, n):
"""Instantiate and write the batch script to the work_dir."""
self.n = n
# first priority is batch_template if set
if self.batch_template_file and not self.batch_template:
# second priority is batch_template_file
with open(self.batch_template_file) as f:
self.batch_template = f.read()
if not self.batch_template:
# third (last) priority is default_template
self.batch_template = self.default_template
# add jobarray or queue lines to user-specified template
# note that this is *only* when user did not specify a template.
# print self.job_array_regexp.search(self.batch_template)
if not self.job_array_regexp.search(self.batch_template):
self.log.debug("adding job array settings to batch script")
firstline, rest = self.batch_template.split('\n',1)
self.batch_template = u'\n'.join([firstline, self.job_array_template, rest])
# print self.queue_regexp.search(self.batch_template)
if self.queue and not self.queue_regexp.search(self.batch_template):
self.log.debug("adding PBS queue settings to batch script")
firstline, rest = self.batch_template.split('\n',1)
self.batch_template = u'\n'.join([firstline, self.queue_template, rest])
script_as_string = self.formatter.format(self.batch_template, **self.context)
self.log.debug('Writing batch script: %s', self.batch_file)
with open(self.batch_file, 'w') as f:
f.write(script_as_string)
os.chmod(self.batch_file, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
python
|
{
"resource": ""
}
|
q279694
|
BatchSystemLauncher.start
|
test
|
def start(self, n):
"""Start n copies of the process using a batch system."""
self.log.debug("Starting %s: %r", self.__class__.__name__, self.args)
# Here we save profile_dir in the context so they
# can be used in the batch script template as {profile_dir}
self.write_batch_script(n)
output = check_output(self.args, env=os.environ)
job_id = self.parse_job_id(output)
self.notify_start(job_id)
return job_id
|
python
|
{
"resource": ""
}
|
q279695
|
RichIPythonWidget._context_menu_make
|
test
|
def _context_menu_make(self, pos):
""" Reimplemented to return a custom context menu for images.
"""
format = self._control.cursorForPosition(pos).charFormat()
name = format.stringProperty(QtGui.QTextFormat.ImageName)
if name:
menu = QtGui.QMenu()
menu.addAction('Copy Image', lambda: self._copy_image(name))
menu.addAction('Save Image As...', lambda: self._save_image(name))
menu.addSeparator()
svg = self._name_to_svg_map.get(name, None)
if svg is not None:
menu.addSeparator()
menu.addAction('Copy SVG', lambda: svg_to_clipboard(svg))
menu.addAction('Save SVG As...',
lambda: save_svg(svg, self._control))
else:
menu = super(RichIPythonWidget, self)._context_menu_make(pos)
return menu
|
python
|
{
"resource": ""
}
|
q279696
|
RichIPythonWidget._append_jpg
|
test
|
def _append_jpg(self, jpg, before_prompt=False):
""" Append raw JPG data to the widget."""
self._append_custom(self._insert_jpg, jpg, before_prompt)
|
python
|
{
"resource": ""
}
|
q279697
|
RichIPythonWidget._append_png
|
test
|
def _append_png(self, png, before_prompt=False):
""" Append raw PNG data to the widget.
"""
self._append_custom(self._insert_png, png, before_prompt)
|
python
|
{
"resource": ""
}
|
q279698
|
RichIPythonWidget._append_svg
|
test
|
def _append_svg(self, svg, before_prompt=False):
""" Append raw SVG data to the widget.
"""
self._append_custom(self._insert_svg, svg, before_prompt)
|
python
|
{
"resource": ""
}
|
q279699
|
RichIPythonWidget._add_image
|
test
|
def _add_image(self, image):
""" Adds the specified QImage to the document and returns a
QTextImageFormat that references it.
"""
document = self._control.document()
name = str(image.cacheKey())
document.addResource(QtGui.QTextDocument.ImageResource,
QtCore.QUrl(name), image)
format = QtGui.QTextImageFormat()
format.setName(name)
return format
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.