_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q279200
|
Index.search
|
test
|
def search(self, query, verbose=0):
"""Searches files satisfying query
It first decompose the query in ngrams, then score each document containing
at least one ngram with the number. The ten document having the most ngrams
in common with the query are selected.
Args:
query (str): what to search;
results_number (int): number of results to return (default: 10)
"""
if verbose > 0:
print("searching " + query)
query = query.lower()
qgram = ng(query, self.slb)
qocument = set()
for q in qgram:
if q in self.ngrams.keys():
for i in self.ngrams[q]:
qocument.add(i)
self.qocument = qocument
results = {}
for i in qocument:
for j in self.D[i].keys():
if not j in results.keys():
results[j] = 0
results[j] = results[j] + self.D[i][j]
sorted_results = sorted(results.items(), key=operator.itemgetter(1), reverse=True)
return [self.elements[f[0]] for f in sorted_results]
|
python
|
{
"resource": ""
}
|
q279201
|
partition
|
test
|
def partition(condition, collection) -> Tuple[List, List]:
"""Partitions a list into two based on a condition."""
succeed, fail = [], []
for x in collection:
if condition(x):
succeed.append(x)
else:
fail.append(x)
return succeed, fail
|
python
|
{
"resource": ""
}
|
q279202
|
run
|
test
|
def run(locations, random, bikes, crime, nearby, json, update_bikes, api_server, cross_origin, host, port, db_path,
verbose):
"""
Runs the program. Takes a list of postcodes or coordinates and
returns various information about them. If using the cli, make
sure to update the bikes database with the -u command.
Locations can be either a specific postcode, or a pair of coordinates.
Coordinates are passed in the form "55.948824,-3.196425".
:param locations: The list of postcodes or coordinates to search.
:param random: The number of random postcodes to include.
:param bikes: Includes a list of stolen bikes in that area.
:param crime: Includes a list of committed crimes in that area.
:param nearby: Includes a list of wikipedia articles in that area.
:param json: Returns the data in json format.
:param update_bikes: Whether to force update bikes.
:param api_server: If given, the program will instead run a rest api.
:param cross_origin:
:param host:
:param port: Defines the port to run the rest api on.
:param db_path: The path to the sqlite db to use.
:param verbose: The verbosity.
"""
log_levels = [logging.WARNING, logging.INFO, logging.DEBUG]
logging.basicConfig(level=log_levels[min(verbose, 2)])
initialize_database(db_path)
loop = get_event_loop()
if update_bikes:
logger.info("Force updating bikes.")
loop.run_until_complete(util.update_bikes())
if api_server:
if cross_origin:
enable_cross_origin(app)
try:
web.run_app(app, host=host, port=port)
except CancelledError as e:
if e.__context__ is not None:
click.echo(Fore.RED + (
f"Could not bind to address {host}:{port}" if e.__context__.errno == 48 else e.__context__))
exit(1)
else:
click.echo("Goodbye!")
elif len(locations) > 0 or random > 0:
exit(loop.run_until_complete(cli(locations, random, bikes=bikes, crime=crime, nearby=nearby, as_json=json)))
else:
click.echo(Fore.RED + "Either include a post code, or the --api-server flag.")
|
python
|
{
"resource": ""
}
|
q279203
|
bidi
|
test
|
def bidi(request):
"""Adds to the context BiDi related variables
LANGUAGE_DIRECTION -- Direction of current language ('ltr' or 'rtl')
LANGUAGE_START -- Start of language layout ('right' for rtl, 'left'
for 'ltr')
LANGUAGE_END -- End of language layout ('left' for rtl, 'right'
for 'ltr')
LANGUAGE_MARKER -- Language marker entity ('‏' for rtl, '&lrm'
for ltr)
"""
from django.utils import translation
from django.utils.safestring import mark_safe
if translation.get_language_bidi():
extra_context = {
'LANGUAGE_DIRECTION':'rtl',
'LANGUAGE_START':'right',
'LANGUAGE_END':'left',
'LANGUAGE_MARKER': mark_safe('‏'),
}
else:
extra_context = {
'LANGUAGE_DIRECTION':'ltr',
'LANGUAGE_START':'left',
'LANGUAGE_END':'right',
'LANGUAGE_MARKER': mark_safe('‎'),
}
return extra_context
|
python
|
{
"resource": ""
}
|
q279204
|
_find_link
|
test
|
def _find_link(inst1, inst2, rel_id, phrase):
'''
Find links that correspond to the given arguments.
'''
metaclass1 = get_metaclass(inst1)
metaclass2 = get_metaclass(inst2)
if isinstance(rel_id, int):
rel_id = 'R%d' % rel_id
for ass in metaclass1.metamodel.associations:
if ass.rel_id != rel_id:
continue
if (ass.source_link.from_metaclass.kind == metaclass1.kind and
ass.source_link.to_metaclass.kind == metaclass2.kind and
ass.source_link.phrase == phrase):
return inst1, inst2, ass
if (ass.target_link.from_metaclass.kind == metaclass1.kind and
ass.target_link.to_metaclass.kind == metaclass2.kind and
ass.target_link.phrase == phrase):
return inst2, inst1, ass
raise UnknownLinkException(metaclass1.kind, metaclass2.kind, rel_id, phrase)
|
python
|
{
"resource": ""
}
|
q279205
|
Association.formalize
|
test
|
def formalize(self):
'''
Formalize the association and expose referential attributes
on instances.
'''
source_class = self.source_link.to_metaclass
target_class = self.target_link.to_metaclass
source_class.referential_attributes |= set(self.source_keys)
target_class.identifying_attributes |= set(self.target_keys)
def fget(inst, ref_name, alt_prop):
other_inst = self.target_link.navigate_one(inst)
if other_inst is None and alt_prop:
return alt_prop.fget(inst)
return getattr(other_inst, ref_name, None)
def fset(inst, value, name, ref_name, alt_prop):
kind = get_metaclass(inst).kind
raise MetaException('%s.%s is a referential attribute '\
'and cannot be assigned directly'% (kind, name))
#other_inst = self.target_link.navigate_one(inst)
#if other_inst is None and alt_prop:
# return alt_prop.fset(inst, value)
#
#elif other_inst:
# return setattr(other_inst, ref_name, value)
for ref_key, primary_key in zip(self.source_keys, self.target_keys):
prop = getattr(source_class.clazz, ref_key, None)
prop = property(partial(fget, ref_name=primary_key, alt_prop=prop),
partial(fset, name=ref_key, ref_name=primary_key, alt_prop=prop))
setattr(source_class.clazz, ref_key, prop)
|
python
|
{
"resource": ""
}
|
q279206
|
Link.compute_lookup_key
|
test
|
def compute_lookup_key(self, from_instance):
'''
Compute the lookup key for an instance, i.e. a foreign key that
can be used to identify an instance at the end of the link.
'''
kwargs = dict()
for attr, other_attr in self.key_map.items():
if _is_null(from_instance, attr):
return None
if attr in from_instance.__dict__:
kwargs[other_attr] = from_instance.__dict__[attr]
else:
kwargs[other_attr] = getattr(from_instance, attr)
return frozenset(tuple(kwargs.items()))
|
python
|
{
"resource": ""
}
|
q279207
|
Link.compute_index_key
|
test
|
def compute_index_key(self, to_instance):
'''
Compute the index key that can be used to identify an instance
on the link.
'''
kwargs = dict()
for attr in self.key_map.values():
if _is_null(to_instance, attr):
return None
if attr in to_instance.__dict__:
kwargs[attr] = to_instance.__dict__[attr]
else:
kwargs[attr] = getattr(to_instance, attr)
return frozenset(tuple(kwargs.items()))
|
python
|
{
"resource": ""
}
|
q279208
|
MetaClass.attribute_type
|
test
|
def attribute_type(self, attribute_name):
'''
Obtain the type of an attribute.
'''
attribute_name = attribute_name.upper()
for name, ty in self.attributes:
if name.upper() == attribute_name:
return ty
|
python
|
{
"resource": ""
}
|
q279209
|
MetaClass.new
|
test
|
def new(self, *args, **kwargs):
'''
Create and return a new instance.
'''
inst = self.clazz()
self.storage.append(inst)
# set all attributes with an initial default value
referential_attributes = dict()
for name, ty in self.attributes:
if name not in self.referential_attributes:
value = self.default_value(ty)
setattr(inst, name, value)
# set all positional arguments
for attr, value in zip(self.attributes, args):
name, ty = attr
if name not in self.referential_attributes:
setattr(inst, name, value)
else:
referential_attributes[name] = value
# set all named arguments
for name, value in kwargs.items():
if name not in self.referential_attributes:
setattr(inst, name, value)
else:
referential_attributes[name] = value
if not referential_attributes:
return inst
# batch relate referential attributes
for link in self.links.values():
if set(link.key_map.values()) - set(referential_attributes.keys()):
continue
kwargs = dict()
for key, value in link.key_map.items():
kwargs[key] = referential_attributes[value]
if not kwargs:
continue
for other_inst in link.to_metaclass.query(kwargs):
relate(other_inst, inst, link.rel_id, link.phrase)
for name, value in referential_attributes.items():
if getattr(inst, name) != value:
logger.warning('unable to assign %s to %s', name, inst)
return inst
|
python
|
{
"resource": ""
}
|
q279210
|
MetaModel.instances
|
test
|
def instances(self):
'''
Obtain a sequence of all instances in the metamodel.
'''
for metaclass in self.metaclasses.values():
for inst in metaclass.storage:
yield inst
|
python
|
{
"resource": ""
}
|
q279211
|
MetaModel.define_class
|
test
|
def define_class(self, kind, attributes, doc=''):
'''
Define a new class in the metamodel, and return its metaclass.
'''
ukind = kind.upper()
if ukind in self.metaclasses:
raise MetaModelException('A class with the name %s is already defined' % kind)
metaclass = MetaClass(kind, self)
for name, ty in attributes:
metaclass.append_attribute(name, ty)
self.metaclasses[ukind] = metaclass
return metaclass
|
python
|
{
"resource": ""
}
|
q279212
|
send
|
test
|
def send(socket, header, payload, topics=(), flags=0):
"""Sends header, payload, and topics through a ZeroMQ socket.
:param socket: a zmq socket.
:param header: a list of byte strings which represent a message header.
:param payload: the serialized byte string of a payload.
:param topics: a chain of topics.
:param flags: zmq flags to send messages.
"""
msgs = []
msgs.extend(topics)
msgs.append(SEAM)
msgs.extend(header)
msgs.append(payload)
return eintr_retry_zmq(socket.send_multipart, msgs, flags)
|
python
|
{
"resource": ""
}
|
q279213
|
recv
|
test
|
def recv(socket, flags=0, capture=(lambda msgs: None)):
"""Receives header, payload, and topics through a ZeroMQ socket.
:param socket: a zmq socket.
:param flags: zmq flags to receive messages.
:param capture: a function to capture received messages.
"""
msgs = eintr_retry_zmq(socket.recv_multipart, flags)
capture(msgs)
return parse(msgs)
|
python
|
{
"resource": ""
}
|
q279214
|
dead_code
|
test
|
def dead_code():
"""
This also finds code you are working on today!
"""
with safe_cd(SRC):
if IS_TRAVIS:
command = "{0} vulture {1}".format(PYTHON, PROJECT_NAME).strip().split()
else:
command = "{0} vulture {1}".format(PIPENV, PROJECT_NAME).strip().split()
output_file_name = "dead_code.txt"
with open(output_file_name, "w") as outfile:
env = config_pythonpath()
subprocess.call(command, stdout=outfile, env=env)
cutoff = 20
num_lines = sum(1 for line in open(output_file_name) if line)
if num_lines > cutoff:
print("Too many lines of dead code : {0}, max {1}".format(num_lines, cutoff))
exit(-1)
|
python
|
{
"resource": ""
}
|
q279215
|
parse_emails
|
test
|
def parse_emails(values):
'''
Take a string or list of strings and try to extract all the emails
'''
emails = []
if isinstance(values, str):
values = [values]
# now we know we have a list of strings
for value in values:
matches = re_emails.findall(value)
emails.extend([match[2] for match in matches])
return emails
|
python
|
{
"resource": ""
}
|
q279216
|
rpc
|
test
|
def rpc(f=None, **kwargs):
"""Marks a method as RPC."""
if f is not None:
if isinstance(f, six.string_types):
if 'name' in kwargs:
raise ValueError('name option duplicated')
kwargs['name'] = f
else:
return rpc(**kwargs)(f)
return functools.partial(_rpc, **kwargs)
|
python
|
{
"resource": ""
}
|
q279217
|
rpc_spec_table
|
test
|
def rpc_spec_table(app):
"""Collects methods which are speced as RPC."""
table = {}
for attr, value in inspect.getmembers(app):
rpc_spec = get_rpc_spec(value, default=None)
if rpc_spec is None:
continue
table[rpc_spec.name] = (value, rpc_spec)
return table
|
python
|
{
"resource": ""
}
|
q279218
|
normalize_postcode_middleware
|
test
|
async def normalize_postcode_middleware(request, handler):
"""
If there is a postcode in the url it validates and normalizes it.
"""
postcode: Optional[str] = request.match_info.get('postcode', None)
if postcode is None or postcode == "random":
return await handler(request)
elif not is_uk_postcode(postcode):
raise web.HTTPNotFound(text="Invalid Postcode")
postcode_processed = postcode.upper().replace(" ", "")
if postcode_processed == postcode:
return await handler(request)
else:
url_name = request.match_info.route.name
url = request.app.router[url_name]
params = dict(request.match_info)
params['postcode'] = postcode_processed
raise web.HTTPMovedPermanently(str(url.url_for(**params)))
|
python
|
{
"resource": ""
}
|
q279219
|
IdGenerator.next
|
test
|
def next(self):
'''
Progress to the next identifier, and return the current one.
'''
val = self._current
self._current = self.readfunc()
return val
|
python
|
{
"resource": ""
}
|
q279220
|
MyWalker.accept_S_SYS
|
test
|
def accept_S_SYS(self, inst):
'''
A System Model contains top-level packages
'''
for child in many(inst).EP_PKG[1401]():
self.accept(child)
|
python
|
{
"resource": ""
}
|
q279221
|
MyWalker.accept_C_C
|
test
|
def accept_C_C(self, inst):
'''
A Component contains packageable elements
'''
for child in many(inst).PE_PE[8003]():
self.accept(child)
|
python
|
{
"resource": ""
}
|
q279222
|
MyWalker.accept_EP_PKG
|
test
|
def accept_EP_PKG(self, inst):
'''
A Package contains packageable elements
'''
for child in many(inst).PE_PE[8000]():
self.accept(child)
|
python
|
{
"resource": ""
}
|
q279223
|
LightSensor.get_brightness
|
test
|
def get_brightness(self):
"""
Return the average brightness of the image.
"""
# Only download the image if it has changed
if not self.connection.has_changed():
return self.image_brightness
image_path = self.connection.download_image()
converted_image = Image.open(image_path).convert('L')
statistics = ImageStat.Stat(converted_image)
self.image_brightness = statistics.mean[0]
return self.image_brightness
|
python
|
{
"resource": ""
}
|
q279224
|
switch.match
|
test
|
def match(self, *args):
"""
Indicate whether or not to enter a case suite.
usage:
``` py
for case in switch(value):
if case('A'):
pass
elif case(1, 3):
pass # for mulit-match.
else:
pass # for default.
```
"""
if not args:
raise SyntaxError('cannot case empty pattern.')
return self.match_args(self._value, args)
|
python
|
{
"resource": ""
}
|
q279225
|
BracketMatcher._find_match
|
test
|
def _find_match(self, position):
""" Given a valid position in the text document, try to find the
position of the matching bracket. Returns -1 if unsuccessful.
"""
# Decide what character to search for and what direction to search in.
document = self._text_edit.document()
start_char = document.characterAt(position)
search_char = self._opening_map.get(start_char)
if search_char:
increment = 1
else:
search_char = self._closing_map.get(start_char)
if search_char:
increment = -1
else:
return -1
# Search for the character.
char = start_char
depth = 0
while position >= 0 and position < document.characterCount():
if char == start_char:
depth += 1
elif char == search_char:
depth -= 1
if depth == 0:
break
position += increment
char = document.characterAt(position)
else:
position = -1
return position
|
python
|
{
"resource": ""
}
|
q279226
|
BracketMatcher._selection_for_character
|
test
|
def _selection_for_character(self, position):
""" Convenience method for selecting a character.
"""
selection = QtGui.QTextEdit.ExtraSelection()
cursor = self._text_edit.textCursor()
cursor.setPosition(position)
cursor.movePosition(QtGui.QTextCursor.NextCharacter,
QtGui.QTextCursor.KeepAnchor)
selection.cursor = cursor
selection.format = self.format
return selection
|
python
|
{
"resource": ""
}
|
q279227
|
BracketMatcher._cursor_position_changed
|
test
|
def _cursor_position_changed(self):
""" Updates the document formatting based on the new cursor position.
"""
# Clear out the old formatting.
self._text_edit.setExtraSelections([])
# Attempt to match a bracket for the new cursor position.
cursor = self._text_edit.textCursor()
if not cursor.hasSelection():
position = cursor.position() - 1
match_position = self._find_match(position)
if match_position != -1:
extra_selections = [ self._selection_for_character(pos)
for pos in (position, match_position) ]
self._text_edit.setExtraSelections(extra_selections)
|
python
|
{
"resource": ""
}
|
q279228
|
ContextSuite._exc_info
|
test
|
def _exc_info(self):
"""Bottleneck to fix up IronPython string exceptions
"""
e = self.exc_info()
if sys.platform == 'cli':
if isinstance(e[0], StringException):
# IronPython throws these StringExceptions, but
# traceback checks type(etype) == str. Make a real
# string here.
e = (str(e[0]), e[1], e[2])
return e
|
python
|
{
"resource": ""
}
|
q279229
|
create_inputhook_qt4
|
test
|
def create_inputhook_qt4(mgr, app=None):
"""Create an input hook for running the Qt4 application event loop.
Parameters
----------
mgr : an InputHookManager
app : Qt Application, optional.
Running application to use. If not given, we probe Qt for an
existing application object, and create a new one if none is found.
Returns
-------
A pair consisting of a Qt Application (either the one given or the
one found or created) and a inputhook.
Notes
-----
We use a custom input hook instead of PyQt4's default one, as it
interacts better with the readline packages (issue #481).
The inputhook function works in tandem with a 'pre_prompt_hook'
which automatically restores the hook as an inputhook in case the
latter has been temporarily disabled after having intercepted a
KeyboardInterrupt.
"""
if app is None:
app = QtCore.QCoreApplication.instance()
if app is None:
app = QtGui.QApplication([" "])
# Re-use previously created inputhook if any
ip = InteractiveShell.instance()
if hasattr(ip, '_inputhook_qt4'):
return app, ip._inputhook_qt4
# Otherwise create the inputhook_qt4/preprompthook_qt4 pair of
# hooks (they both share the got_kbdint flag)
got_kbdint = [False]
def inputhook_qt4():
"""PyOS_InputHook python hook for Qt4.
Process pending Qt events and if there's no pending keyboard
input, spend a short slice of time (50ms) running the Qt event
loop.
As a Python ctypes callback can't raise an exception, we catch
the KeyboardInterrupt and temporarily deactivate the hook,
which will let a *second* CTRL+C be processed normally and go
back to a clean prompt line.
"""
try:
allow_CTRL_C()
app = QtCore.QCoreApplication.instance()
if not app: # shouldn't happen, but safer if it happens anyway...
return 0
app.processEvents(QtCore.QEventLoop.AllEvents, 300)
if not stdin_ready():
timer = QtCore.QTimer()
timer.timeout.connect(app.quit)
while not stdin_ready():
timer.start(50)
app.exec_()
timer.stop()
except KeyboardInterrupt:
ignore_CTRL_C()
got_kbdint[0] = True
print("\nKeyboardInterrupt - Ctrl-C again for new prompt")
mgr.clear_inputhook()
except: # NO exceptions are allowed to escape from a ctypes callback
ignore_CTRL_C()
from traceback import print_exc
print_exc()
print("Got exception from inputhook_qt4, unregistering.")
mgr.clear_inputhook()
finally:
allow_CTRL_C()
return 0
def preprompthook_qt4(ishell):
"""'pre_prompt_hook' used to restore the Qt4 input hook
(in case the latter was temporarily deactivated after a
CTRL+C)
"""
if got_kbdint[0]:
mgr.set_inputhook(inputhook_qt4)
got_kbdint[0] = False
ip._inputhook_qt4 = inputhook_qt4
ip.set_hook('pre_prompt_hook', preprompthook_qt4)
return app, inputhook_qt4
|
python
|
{
"resource": ""
}
|
q279230
|
Mapper.get
|
test
|
def get(cls, name=__name__):
"""Return a Mapper instance with the given name.
If the name already exist return its instance.
Does not work if a Mapper was created via its constructor.
Using `Mapper.get()`_ is the prefered way.
Args:
name (str, optional): Name for the newly created instance.
Defaults to `__name__`.
Returns:
Mapper: A mapper instance for the given name.
Raises:
TypeError: If a invalid name was given.
"""
if not isinstance(name, str):
raise TypeError('A mapper name must be a string')
if name not in cls.__instances:
cls.__instances[name] = cls()
cls.__instances[name]._name = name
return cls.__instances[name]
|
python
|
{
"resource": ""
}
|
q279231
|
Mapper.url
|
test
|
def url(self, pattern, method=None, type_cast=None):
"""Decorator for registering a path pattern.
Args:
pattern (str): Regex pattern to match a certain path
method (str, optional): Usually used to define one of GET, POST,
PUT, DELETE. You may use whatever fits your situation though.
Defaults to None.
type_cast (dict, optional): Mapping between the param name and
one of `int`, `float` or `bool`. The value reflected by the
provided param name will than be casted to the given type.
Defaults to None.
"""
if not type_cast:
type_cast = {}
def decorator(function):
self.add(pattern, function, method, type_cast)
return function
return decorator
|
python
|
{
"resource": ""
}
|
q279232
|
Mapper.s_url
|
test
|
def s_url(self, path, method=None, type_cast=None):
"""Decorator for registering a simple path.
Args:
path (str): Path to be matched.
method (str, optional): Usually used to define one of GET, POST,
PUT, DELETE. You may use whatever fits your situation though.
Defaults to None.
type_cast (dict, optional): Mapping between the param name and
one of `int`, `float` or `bool`. The value reflected by the
provided param name will than be casted to the given type.
Defaults to None.
"""
if not type_cast:
type_cast = {}
def decorator(function):
self.s_add(path, function, method, type_cast)
return function
return decorator
|
python
|
{
"resource": ""
}
|
q279233
|
Mapper.add
|
test
|
def add(self, pattern, function, method=None, type_cast=None):
"""Function for registering a path pattern.
Args:
pattern (str): Regex pattern to match a certain path.
function (function): Function to associate with this path.
method (str, optional): Usually used to define one of GET, POST,
PUT, DELETE. You may use whatever fits your situation though.
Defaults to None.
type_cast (dict, optional): Mapping between the param name and
one of `int`, `float` or `bool`. The value reflected by the
provided param name will than be casted to the given type.
Defaults to None.
"""
if not type_cast:
type_cast = {}
with self._lock:
self._data_store.append({
'pattern': pattern,
'function': function,
'method': method,
'type_cast': type_cast,
})
|
python
|
{
"resource": ""
}
|
q279234
|
Mapper.s_add
|
test
|
def s_add(self, path, function, method=None, type_cast=None):
"""Function for registering a simple path.
Args:
path (str): Path to be matched.
function (function): Function to associate with this path.
method (str, optional): Usually used to define one of GET, POST,
PUT, DELETE. You may use whatever fits your situation though.
Defaults to None.
type_cast (dict, optional): Mapping between the param name and
one of `int`, `float` or `bool`. The value reflected by the
provided param name will than be casted to the given type.
Defaults to None.
"""
with self._lock:
try:
path = '^/{}'.format(path.lstrip('/'))
path = '{}/$'.format(path.rstrip('/'))
path = path.replace('<', '(?P<')
path = path.replace('>', '>[^/]*)')
self.add(path, function, method, type_cast)
except Exception:
pass
|
python
|
{
"resource": ""
}
|
q279235
|
Mapper.call
|
test
|
def call(self, url, method=None, args=None):
"""Calls the first function matching the urls pattern and method.
Args:
url (str): Url for which to call a matching function.
method (str, optional): The method used while registering a
function.
Defaults to None
args (dict, optional): Additional args to be passed to the
matching function.
Returns:
The functions return value or `None` if no function was called.
"""
if not args:
args = {}
if sys.version_info.major == 3:
data = urllib.parse.urlparse(url)
path = data.path.rstrip('/') + '/'
_args = dict(urllib.parse.parse_qs(data.query,
keep_blank_values=True))
elif sys.version_info.major == 2:
data = urlparse.urlparse(url)
path = data.path.rstrip('/') + '/'
_args = dict(urlparse.parse_qs(data.query,
keep_blank_values=True))
for elem in self._data_store:
pattern = elem['pattern']
function = elem['function']
_method = elem['method']
type_cast = elem['type_cast']
result = re.match(pattern, path)
# Found matching method
if result and _method == method:
_args = dict(_args, **result.groupdict())
# Unpack value lists (due to urllib.parse.parse_qs) in case
# theres only one value available
for key, val in _args.items():
if isinstance(_args[key], list) and len(_args[key]) == 1:
_args[key] = _args[key][0]
# Apply typ-casting if necessary
for key, val in type_cast.items():
# Not within available _args, no type-cast required
if key not in _args:
continue
# Is None or empty, no type-cast required
if not _args[key]:
continue
# Try and cast the values
if isinstance(_args[key], list):
for i, _val in enumerate(_args[key]):
_args[key][i] = self._cast(_val, val)
else:
_args[key] = self._cast(_args[key], val)
requiered_args = self._get_function_args(function)
for key, val in args.items():
if key in requiered_args:
_args[key] = val
return function(**_args)
return None
|
python
|
{
"resource": ""
}
|
q279236
|
HistoryConsoleWidget.execute
|
test
|
def execute(self, source=None, hidden=False, interactive=False):
""" Reimplemented to the store history.
"""
if not hidden:
history = self.input_buffer if source is None else source
executed = super(HistoryConsoleWidget, self).execute(
source, hidden, interactive)
if executed and not hidden:
# Save the command unless it was an empty string or was identical
# to the previous command.
history = history.rstrip()
if history and (not self._history or self._history[-1] != history):
self._history.append(history)
# Emulate readline: reset all history edits.
self._history_edits = {}
# Move the history index to the most recent item.
self._history_index = len(self._history)
return executed
|
python
|
{
"resource": ""
}
|
q279237
|
HistoryConsoleWidget._up_pressed
|
test
|
def _up_pressed(self, shift_modifier):
""" Called when the up key is pressed. Returns whether to continue
processing the event.
"""
prompt_cursor = self._get_prompt_cursor()
if self._get_cursor().blockNumber() == prompt_cursor.blockNumber():
# Bail out if we're locked.
if self._history_locked() and not shift_modifier:
return False
# Set a search prefix based on the cursor position.
col = self._get_input_buffer_cursor_column()
input_buffer = self.input_buffer
if self._history_index == len(self._history) or \
(self._history_prefix and col != len(self._history_prefix)):
self._history_index = len(self._history)
self._history_prefix = input_buffer[:col]
# Perform the search.
self.history_previous(self._history_prefix,
as_prefix=not shift_modifier)
# Go to the first line of the prompt for seemless history scrolling.
# Emulate readline: keep the cursor position fixed for a prefix
# search.
cursor = self._get_prompt_cursor()
if self._history_prefix:
cursor.movePosition(QtGui.QTextCursor.Right,
n=len(self._history_prefix))
else:
cursor.movePosition(QtGui.QTextCursor.EndOfLine)
self._set_cursor(cursor)
return False
return True
|
python
|
{
"resource": ""
}
|
q279238
|
HistoryConsoleWidget._down_pressed
|
test
|
def _down_pressed(self, shift_modifier):
""" Called when the down key is pressed. Returns whether to continue
processing the event.
"""
end_cursor = self._get_end_cursor()
if self._get_cursor().blockNumber() == end_cursor.blockNumber():
# Bail out if we're locked.
if self._history_locked() and not shift_modifier:
return False
# Perform the search.
replaced = self.history_next(self._history_prefix,
as_prefix=not shift_modifier)
# Emulate readline: keep the cursor position fixed for a prefix
# search. (We don't need to move the cursor to the end of the buffer
# in the other case because this happens automatically when the
# input buffer is set.)
if self._history_prefix and replaced:
cursor = self._get_prompt_cursor()
cursor.movePosition(QtGui.QTextCursor.Right,
n=len(self._history_prefix))
self._set_cursor(cursor)
return False
return True
|
python
|
{
"resource": ""
}
|
q279239
|
HistoryConsoleWidget.history_previous
|
test
|
def history_previous(self, substring='', as_prefix=True):
""" If possible, set the input buffer to a previous history item.
Parameters:
-----------
substring : str, optional
If specified, search for an item with this substring.
as_prefix : bool, optional
If True, the substring must match at the beginning (default).
Returns:
--------
Whether the input buffer was changed.
"""
index = self._history_index
replace = False
while index > 0:
index -= 1
history = self._get_edited_history(index)
if (as_prefix and history.startswith(substring)) \
or (not as_prefix and substring in history):
replace = True
break
if replace:
self._store_edits()
self._history_index = index
self.input_buffer = history
return replace
|
python
|
{
"resource": ""
}
|
q279240
|
HistoryConsoleWidget.history_next
|
test
|
def history_next(self, substring='', as_prefix=True):
""" If possible, set the input buffer to a subsequent history item.
Parameters:
-----------
substring : str, optional
If specified, search for an item with this substring.
as_prefix : bool, optional
If True, the substring must match at the beginning (default).
Returns:
--------
Whether the input buffer was changed.
"""
index = self._history_index
replace = False
while self._history_index < len(self._history):
index += 1
history = self._get_edited_history(index)
if (as_prefix and history.startswith(substring)) \
or (not as_prefix and substring in history):
replace = True
break
if replace:
self._store_edits()
self._history_index = index
self.input_buffer = history
return replace
|
python
|
{
"resource": ""
}
|
q279241
|
HistoryConsoleWidget._handle_execute_reply
|
test
|
def _handle_execute_reply(self, msg):
""" Handles replies for code execution, here only session history length
"""
msg_id = msg['parent_header']['msg_id']
info = self._request_info['execute'].pop(msg_id,None)
if info and info.kind == 'save_magic' and not self._hidden:
content = msg['content']
status = content['status']
if status == 'ok':
self._max_session_history=(int(content['user_expressions']['hlen']))
|
python
|
{
"resource": ""
}
|
q279242
|
HistoryConsoleWidget._history_locked
|
test
|
def _history_locked(self):
""" Returns whether history movement is locked.
"""
return (self.history_lock and
(self._get_edited_history(self._history_index) !=
self.input_buffer) and
(self._get_prompt_cursor().blockNumber() !=
self._get_end_cursor().blockNumber()))
|
python
|
{
"resource": ""
}
|
q279243
|
HistoryConsoleWidget._get_edited_history
|
test
|
def _get_edited_history(self, index):
""" Retrieves a history item, possibly with temporary edits.
"""
if index in self._history_edits:
return self._history_edits[index]
elif index == len(self._history):
return unicode()
return self._history[index]
|
python
|
{
"resource": ""
}
|
q279244
|
HistoryConsoleWidget._set_history
|
test
|
def _set_history(self, history):
""" Replace the current history with a sequence of history items.
"""
self._history = list(history)
self._history_edits = {}
self._history_index = len(self._history)
|
python
|
{
"resource": ""
}
|
q279245
|
HistoryConsoleWidget._store_edits
|
test
|
def _store_edits(self):
""" If there are edits to the current input buffer, store them.
"""
current = self.input_buffer
if self._history_index == len(self._history) or \
self._history[self._history_index] != current:
self._history_edits[self._history_index] = current
|
python
|
{
"resource": ""
}
|
q279246
|
MyFrame.OnTimeToClose
|
test
|
def OnTimeToClose(self, evt):
"""Event handler for the button click."""
print("See ya later!")
sys.stdout.flush()
self.cleanup_consoles(evt)
self.Close()
# Not sure why, but our IPython kernel seems to prevent normal WX
# shutdown, so an explicit exit() call is needed.
sys.exit()
|
python
|
{
"resource": ""
}
|
q279247
|
build_collection
|
test
|
def build_collection(df, **kwargs):
'''
Generates a list of Record objects given a DataFrame.
Each Record instance has a series attribute which is a pandas.Series of the same attributes
in the DataFrame.
Optional data can be passed in through kwargs which will be included by the name of each object.
parameters
----------
df : pandas.DataFrame
kwargs : alternate arguments to be saved by name to the series of each object
Returns
-------
collection : list
list of Record objects where each Record represents one row from a dataframe
Examples
--------
This is how we generate a Record Collection from a DataFrame.
>>> import pandas as pd
>>> import turntable
>>>
>>> df = pd.DataFrame({'Artist':"""Michael Jackson, Pink Floyd, Whitney Houston, Meat Loaf,
Eagles, Fleetwood Mac, Bee Gees, AC/DC""".split(', '),
>>> 'Album' :"""Thriller, The Dark Side of the Moon, The Bodyguard, Bat Out of Hell,
Their Greatest Hits (1971-1975), Rumours, Saturday Night Fever, Back in Black""".split(', ')})
>>> collection = turntable.press.build_collection(df, my_favorite_record = 'nevermind')
>>> record = collection[0]
>>> print record.series
'''
print 'Generating the Record Collection...\n'
df['index_original'] = df.index
df.reset_index(drop=True, inplace=True)
if pd.__version__ >= '0.15.0':
d = df.T.to_dict(orient='series')
else:
d = df.T.to_dict(outtype='series')
collection = [load_record(item, kwargs) for item in d.items()]
return collection
|
python
|
{
"resource": ""
}
|
q279248
|
collection_to_df
|
test
|
def collection_to_df(collection):
'''
Converts a collection back into a pandas DataFrame
parameters
----------
collection : list
list of Record objects where each Record represents one row from a dataframe
Returns
-------
df : pandas.DataFrame
DataFrame of length=len(collection) where each row represents one Record
'''
return pd.concat([record.series for record in collection], axis=1).T
|
python
|
{
"resource": ""
}
|
q279249
|
spin_frame
|
test
|
def spin_frame(df, method):
'''
Runs the full turntable process on a pandas DataFrame
parameters
----------
df : pandas.DataFrame
each row represents a record
method : def method(record)
function used to process each row
Returns
-------
df : pandas.DataFrame
DataFrame processed by method
Example
-------
>>> import pandas as pd
>>> import turntable
>>>
>>> df = pd.DataFrame({'Artist':"""Michael Jackson, Pink Floyd, Whitney Houston, Meat Loaf, Eagles, Fleetwood Mac, Bee Gees, AC/DC""".split(', '), 'Album':"""Thriller, The Dark Side of the Moon, The Bodyguard, Bat Out of Hell, Their Greatest Hits (1971–1975), Rumours, Saturday Night Fever, Back in Black""".split(', ')})
>>>
>>> def method(record):
>>> record.cost = 40
>>> return record
>>>
>>> turntable.press.spin_frame(df, method)
'''
collection = build_collection(df)
collection = turntable.spin.batch(collection, method)
return collection_to_df(collection)
|
python
|
{
"resource": ""
}
|
q279250
|
RecordSetter.set_attributes
|
test
|
def set_attributes(self, kwargs):
'''
Initalizes the given argument structure as properties of the class
to be used by name in specific method execution.
Parameters
----------
kwargs : dictionary
Dictionary of extra attributes,
where keys are attributes names and values attributes values.
'''
for key, value in kwargs.items():
setattr(self, key, value)
|
python
|
{
"resource": ""
}
|
q279251
|
LogWatcher.subscribe
|
test
|
def subscribe(self):
"""Update our SUB socket's subscriptions."""
self.stream.setsockopt(zmq.UNSUBSCRIBE, '')
if '' in self.topics:
self.log.debug("Subscribing to: everything")
self.stream.setsockopt(zmq.SUBSCRIBE, '')
else:
for topic in self.topics:
self.log.debug("Subscribing to: %r"%(topic))
self.stream.setsockopt(zmq.SUBSCRIBE, topic)
|
python
|
{
"resource": ""
}
|
q279252
|
LogWatcher.log_message
|
test
|
def log_message(self, raw):
"""receive and parse a message, then log it."""
if len(raw) != 2 or '.' not in raw[0]:
self.log.error("Invalid log message: %s"%raw)
return
else:
topic, msg = raw
# don't newline, since log messages always newline:
topic,level_name = topic.rsplit('.',1)
level,topic = self._extract_level(topic)
if msg[-1] == '\n':
msg = msg[:-1]
self.log.log(level, "[%s] %s" % (topic, msg))
|
python
|
{
"resource": ""
}
|
q279253
|
mergesort
|
test
|
def mergesort(list_of_lists, key=None):
""" Perform an N-way merge operation on sorted lists.
@param list_of_lists: (really iterable of iterable) of sorted elements
(either by naturally or by C{key})
@param key: specify sort key function (like C{sort()}, C{sorted()})
Yields tuples of the form C{(item, iterator)}, where the iterator is the
built-in list iterator or something you pass in, if you pre-generate the
iterators.
This is a stable merge; complexity O(N lg N)
Examples::
>>> print list(mergesort([[1,2,3,4],
... [2,3.25,3.75,4.5,6,7],
... [2.625,3.625,6.625,9]]))
[1, 2, 2, 2.625, 3, 3.25, 3.625, 3.75, 4, 4.5, 6, 6.625, 7, 9]
# note stability
>>> print list(mergesort([[1,2,3,4],
... [2,3.25,3.75,4.5,6,7],
... [2.625,3.625,6.625,9]],
... key=int))
[1, 2, 2, 2.625, 3, 3.25, 3.75, 3.625, 4, 4.5, 6, 6.625, 7, 9]
>>> print list(mergesort([[4, 3, 2, 1],
... [7, 6, 4.5, 3.75, 3.25, 2],
... [9, 6.625, 3.625, 2.625]],
... key=lambda x: -x))
[9, 7, 6.625, 6, 4.5, 4, 3.75, 3.625, 3.25, 3, 2.625, 2, 2, 1]
"""
heap = []
for i, itr in enumerate(iter(pl) for pl in list_of_lists):
try:
item = itr.next()
if key:
toadd = (key(item), i, item, itr)
else:
toadd = (item, i, itr)
heap.append(toadd)
except StopIteration:
pass
heapq.heapify(heap)
if key:
while heap:
_, idx, item, itr = heap[0]
yield item
try:
item = itr.next()
heapq.heapreplace(heap, (key(item), idx, item, itr) )
except StopIteration:
heapq.heappop(heap)
else:
while heap:
item, idx, itr = heap[0]
yield item
try:
heapq.heapreplace(heap, (itr.next(), idx, itr))
except StopIteration:
heapq.heappop(heap)
|
python
|
{
"resource": ""
}
|
q279254
|
remote_iterator
|
test
|
def remote_iterator(view,name):
"""Return an iterator on an object living on a remote engine.
"""
view.execute('it%s=iter(%s)'%(name,name), block=True)
while True:
try:
result = view.apply_sync(lambda x: x.next(), Reference('it'+name))
# This causes the StopIteration exception to be raised.
except RemoteError as e:
if e.ename == 'StopIteration':
raise StopIteration
else:
raise e
else:
yield result
|
python
|
{
"resource": ""
}
|
q279255
|
convert_to_this_nbformat
|
test
|
def convert_to_this_nbformat(nb, orig_version=1):
"""Convert a notebook to the v2 format.
Parameters
----------
nb : NotebookNode
The Python representation of the notebook to convert.
orig_version : int
The original version of the notebook to convert.
"""
if orig_version == 1:
newnb = new_notebook()
ws = new_worksheet()
for cell in nb.cells:
if cell.cell_type == u'code':
newcell = new_code_cell(input=cell.get('code'),prompt_number=cell.get('prompt_number'))
elif cell.cell_type == u'text':
newcell = new_text_cell(u'markdown',source=cell.get('text'))
ws.cells.append(newcell)
newnb.worksheets.append(ws)
return newnb
else:
raise ValueError('Cannot convert a notebook from v%s to v2' % orig_version)
|
python
|
{
"resource": ""
}
|
q279256
|
get_supported_platform
|
test
|
def get_supported_platform():
"""Return this platform's maximum compatible version.
distutils.util.get_platform() normally reports the minimum version
of Mac OS X that would be required to *use* extensions produced by
distutils. But what we want when checking compatibility is to know the
version of Mac OS X that we are *running*. To allow usage of packages that
explicitly require a newer version of Mac OS X, we must also know the
current version of the OS.
If this condition occurs for any other platform with a version in its
platform strings, this function should be extended accordingly.
"""
plat = get_build_platform(); m = macosVersionString.match(plat)
if m is not None and sys.platform == "darwin":
try:
plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
except ValueError:
pass # not Mac OS X
return plat
|
python
|
{
"resource": ""
}
|
q279257
|
get_importer
|
test
|
def get_importer(path_item):
"""Retrieve a PEP 302 "importer" for the given path item
If there is no importer, this returns a wrapper around the builtin import
machinery. The returned importer is only cached if it was created by a
path hook.
"""
try:
importer = sys.path_importer_cache[path_item]
except KeyError:
for hook in sys.path_hooks:
try:
importer = hook(path_item)
except ImportError:
pass
else:
break
else:
importer = None
sys.path_importer_cache.setdefault(path_item,importer)
if importer is None:
try:
importer = ImpWrapper(path_item)
except ImportError:
pass
return importer
|
python
|
{
"resource": ""
}
|
q279258
|
StringIO
|
test
|
def StringIO(*args, **kw):
"""Thunk to load the real StringIO on demand"""
global StringIO
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
return StringIO(*args,**kw)
|
python
|
{
"resource": ""
}
|
q279259
|
parse_version
|
test
|
def parse_version(s):
"""Convert a version string to a chronologically-sortable key
This is a rough cross between distutils' StrictVersion and LooseVersion;
if you give it versions that would work with StrictVersion, then it behaves
the same; otherwise it acts like a slightly-smarter LooseVersion. It is
*possible* to create pathological version coding schemes that will fool
this parser, but they should be very rare in practice.
The returned value will be a tuple of strings. Numeric portions of the
version are padded to 8 digits so they will compare numerically, but
without relying on how numbers compare relative to strings. Dots are
dropped, but dashes are retained. Trailing zeros between alpha segments
or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
"2.4". Alphanumeric parts are lower-cased.
The algorithm assumes that strings like "-" and any alpha string that
alphabetically follows "final" represents a "patch level". So, "2.4-1"
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
considered newer than "2.4-1", which in turn is newer than "2.4".
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
come before "final" alphabetically) are assumed to be pre-release versions,
so that the version "2.4" is considered newer than "2.4a1".
Finally, to handle miscellaneous cases, the strings "pre", "preview", and
"rc" are treated as if they were "c", i.e. as though they were release
candidates, and therefore are not as new as a version string that does not
contain them, and "dev" is replaced with an '@' so that it sorts lower than
than any other pre-release tag.
"""
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
# remove trailing zeros from each series of numeric parts
while parts and parts[-1]=='00000000':
parts.pop()
parts.append(part)
return tuple(parts)
|
python
|
{
"resource": ""
}
|
q279260
|
_override_setuptools
|
test
|
def _override_setuptools(req):
"""Return True when distribute wants to override a setuptools dependency.
We want to override when the requirement is setuptools and the version is
a variant of 0.6.
"""
if req.project_name == 'setuptools':
if not len(req.specs):
# Just setuptools: ok
return True
for comparator, version in req.specs:
if comparator in ['==', '>=', '>']:
if '0.7' in version:
# We want some setuptools not from the 0.6 series.
return False
return True
return False
|
python
|
{
"resource": ""
}
|
q279261
|
WorkingSet.add
|
test
|
def add(self, dist, entry=None, insert=True, replace=False):
"""Add `dist` to working set, associated with `entry`
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
On exit from this routine, `entry` is added to the end of the working
set's ``.entries`` (if it wasn't already present).
`dist` is only added to the working set if it's for a project that
doesn't already have a distribution in the set, unless `replace=True`.
If it's added, any callbacks registered with the ``subscribe()`` method
will be called.
"""
if insert:
dist.insert_on(self.entries, entry)
if entry is None:
entry = dist.location
keys = self.entry_keys.setdefault(entry,[])
keys2 = self.entry_keys.setdefault(dist.location,[])
if not replace and dist.key in self.by_key:
return # ignore hidden distros
self.by_key[dist.key] = dist
if dist.key not in keys:
keys.append(dist.key)
if dist.key not in keys2:
keys2.append(dist.key)
self._added_new(dist)
|
python
|
{
"resource": ""
}
|
q279262
|
WorkingSet.find_plugins
|
test
|
def find_plugins(self,
plugin_env, full_env=None, installer=None, fallback=True
):
"""Find all activatable distributions in `plugin_env`
Example usage::
distributions, errors = working_set.find_plugins(
Environment(plugin_dirlist)
)
map(working_set.add, distributions) # add plugins+libs to sys.path
print 'Could not load', errors # display errors
The `plugin_env` should be an ``Environment`` instance that contains
only distributions that are in the project's "plugin directory" or
directories. The `full_env`, if supplied, should be an ``Environment``
contains all currently-available distributions. If `full_env` is not
supplied, one is created automatically from the ``WorkingSet`` this
method is called on, which will typically mean that every directory on
``sys.path`` will be scanned for distributions.
`installer` is a standard installer callback as used by the
``resolve()`` method. The `fallback` flag indicates whether we should
attempt to resolve older versions of a plugin if the newest version
cannot be resolved.
This method returns a 2-tuple: (`distributions`, `error_info`), where
`distributions` is a list of the distributions found in `plugin_env`
that were loadable, along with any other distributions that are needed
to resolve their dependencies. `error_info` is a dictionary mapping
unloadable plugin distributions to an exception instance describing the
error that occurred. Usually this will be a ``DistributionNotFound`` or
``VersionConflict`` instance.
"""
plugin_projects = list(plugin_env)
plugin_projects.sort() # scan project names in alphabetic order
error_info = {}
distributions = {}
if full_env is None:
env = Environment(self.entries)
env += plugin_env
else:
env = full_env + plugin_env
shadow_set = self.__class__([])
map(shadow_set.add, self) # put all our entries in shadow_set
for project_name in plugin_projects:
for dist in plugin_env[project_name]:
req = [dist.as_requirement()]
try:
resolvees = shadow_set.resolve(req, env, installer)
except ResolutionError,v:
error_info[dist] = v # save error info
if fallback:
continue # try the next older version of project
else:
break # give up on this project, keep going
else:
map(shadow_set.add, resolvees)
distributions.update(dict.fromkeys(resolvees))
# success, no need to try any more versions of this project
break
distributions = list(distributions)
distributions.sort()
return distributions, error_info
|
python
|
{
"resource": ""
}
|
q279263
|
ResourceManager.get_cache_path
|
test
|
def get_cache_path(self, archive_name, names=()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later.
"""
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
try:
_bypass_ensure_directory(target_path)
except:
self.extraction_error()
self.cached_files[target_path] = 1
return target_path
|
python
|
{
"resource": ""
}
|
q279264
|
EntryPoint.parse
|
test
|
def parse(cls, src, dist=None):
"""Parse a single entry point from string `src`
Entry point syntax follows the form::
name = some.module:some.attr [extra1,extra2]
The entry name and module name are required, but the ``:attrs`` and
``[extras]`` parts are optional
"""
try:
attrs = extras = ()
name,value = src.split('=',1)
if '[' in value:
value,extras = value.split('[',1)
req = Requirement.parse("x["+extras)
if req.specs: raise ValueError
extras = req.extras
if ':' in value:
value,attrs = value.split(':',1)
if not MODULE(attrs.rstrip()):
raise ValueError
attrs = attrs.rstrip().split('.')
except ValueError:
raise ValueError(
"EntryPoint must be in 'name=module:attrs [extras]' format",
src
)
else:
return cls(name.strip(), value.strip(), attrs, extras, dist)
|
python
|
{
"resource": ""
}
|
q279265
|
DistInfoDistribution._parsed_pkg_info
|
test
|
def _parsed_pkg_info(self):
"""Parse and cache metadata"""
try:
return self._pkg_info
except AttributeError:
from email.parser import Parser
self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO))
return self._pkg_info
|
python
|
{
"resource": ""
}
|
q279266
|
DistInfoDistribution._compute_dependencies
|
test
|
def _compute_dependencies(self):
"""Recompute this distribution's dependencies."""
from _markerlib import compile as compile_marker
dm = self.__dep_map = {None: []}
reqs = []
# Including any condition expressions
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
distvers, mark = self._preparse_requirement(req)
parsed = parse_requirements(distvers).next()
parsed.marker_fn = compile_marker(mark)
reqs.append(parsed)
def reqs_for_extra(extra):
for req in reqs:
if req.marker_fn(override={'extra':extra}):
yield req
common = frozenset(reqs_for_extra(None))
dm[None].extend(common)
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
extra = safe_extra(extra.strip())
dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
return dm
|
python
|
{
"resource": ""
}
|
q279267
|
parse_filename
|
test
|
def parse_filename(fname):
"""Parse a notebook filename.
This function takes a notebook filename and returns the notebook
format (json/py) and the notebook name. This logic can be
summarized as follows:
* notebook.ipynb -> (notebook.ipynb, notebook, json)
* notebook.json -> (notebook.json, notebook, json)
* notebook.py -> (notebook.py, notebook, py)
* notebook -> (notebook.ipynb, notebook, json)
Parameters
----------
fname : unicode
The notebook filename. The filename can use a specific filename
extention (.ipynb, .json, .py) or none, in which case .ipynb will
be assumed.
Returns
-------
(fname, name, format) : (unicode, unicode, unicode)
The filename, notebook name and format.
"""
if fname.endswith(u'.ipynb'):
format = u'json'
elif fname.endswith(u'.json'):
format = u'json'
elif fname.endswith(u'.py'):
format = u'py'
else:
fname = fname + u'.ipynb'
format = u'json'
name = fname.split('.')[0]
return fname, name, format
|
python
|
{
"resource": ""
}
|
q279268
|
_collapse_leading_ws
|
test
|
def _collapse_leading_ws(header, txt):
"""
``Description`` header must preserve newlines; all others need not
"""
if header.lower() == 'description': # preserve newlines
return '\n'.join([x[8:] if x.startswith(' ' * 8) else x
for x in txt.strip().splitlines()])
else:
return ' '.join([x.strip() for x in txt.splitlines()])
|
python
|
{
"resource": ""
}
|
q279269
|
CompletionWidget.hideEvent
|
test
|
def hideEvent(self, event):
""" Reimplemented to disconnect signal handlers and event filter.
"""
super(CompletionWidget, self).hideEvent(event)
self._text_edit.cursorPositionChanged.disconnect(self._update_current)
self._text_edit.removeEventFilter(self)
|
python
|
{
"resource": ""
}
|
q279270
|
CompletionWidget.showEvent
|
test
|
def showEvent(self, event):
""" Reimplemented to connect signal handlers and event filter.
"""
super(CompletionWidget, self).showEvent(event)
self._text_edit.cursorPositionChanged.connect(self._update_current)
self._text_edit.installEventFilter(self)
|
python
|
{
"resource": ""
}
|
q279271
|
CompletionWidget._current_text_cursor
|
test
|
def _current_text_cursor(self):
""" Returns a cursor with text between the start position and the
current position selected.
"""
cursor = self._text_edit.textCursor()
if cursor.position() >= self._start_position:
cursor.setPosition(self._start_position,
QtGui.QTextCursor.KeepAnchor)
return cursor
|
python
|
{
"resource": ""
}
|
q279272
|
CompletionWidget._update_current
|
test
|
def _update_current(self):
""" Updates the current item based on the current text.
"""
prefix = self._current_text_cursor().selection().toPlainText()
if prefix:
items = self.findItems(prefix, (QtCore.Qt.MatchStartsWith |
QtCore.Qt.MatchCaseSensitive))
if items:
self.setCurrentItem(items[0])
else:
self.hide()
else:
self.hide()
|
python
|
{
"resource": ""
}
|
q279273
|
registerAdminSite
|
test
|
def registerAdminSite(appName, excludeModels=[]):
"""Registers the models of the app with the given "appName" for the admin site"""
for model in apps.get_app_config(appName).get_models():
if model not in excludeModels:
admin.site.register(model)
|
python
|
{
"resource": ""
}
|
q279274
|
disk_partitions
|
test
|
def disk_partitions(all):
"""Return disk partitions."""
rawlist = _psutil_mswindows.get_disk_partitions(all)
return [nt_partition(*x) for x in rawlist]
|
python
|
{
"resource": ""
}
|
q279275
|
get_system_cpu_times
|
test
|
def get_system_cpu_times():
"""Return system CPU times as a named tuple."""
user, system, idle = 0, 0, 0
# computes system global times summing each processor value
for cpu_time in _psutil_mswindows.get_system_cpu_times():
user += cpu_time[0]
system += cpu_time[1]
idle += cpu_time[2]
return _cputimes_ntuple(user, system, idle)
|
python
|
{
"resource": ""
}
|
q279276
|
get_system_per_cpu_times
|
test
|
def get_system_per_cpu_times():
"""Return system per-CPU times as a list of named tuples."""
ret = []
for cpu_t in _psutil_mswindows.get_system_cpu_times():
user, system, idle = cpu_t
item = _cputimes_ntuple(user, system, idle)
ret.append(item)
return ret
|
python
|
{
"resource": ""
}
|
q279277
|
Win32ShellCommandController._stdin_raw_nonblock
|
test
|
def _stdin_raw_nonblock(self):
"""Use the raw Win32 handle of sys.stdin to do non-blocking reads"""
# WARNING: This is experimental, and produces inconsistent results.
# It's possible for the handle not to be appropriate for use
# with WaitForSingleObject, among other things.
handle = msvcrt.get_osfhandle(sys.stdin.fileno())
result = WaitForSingleObject(handle, 100)
if result == WAIT_FAILED:
raise ctypes.WinError()
elif result == WAIT_TIMEOUT:
print(".", end='')
return None
else:
data = ctypes.create_string_buffer(256)
bytesRead = DWORD(0)
print('?', end='')
if not ReadFile(handle, data, 256,
ctypes.byref(bytesRead), None):
raise ctypes.WinError()
# This ensures the non-blocking works with an actual console
# Not checking the error, so the processing will still work with
# other handle types
FlushConsoleInputBuffer(handle)
data = data.value
data = data.replace('\r\n', '\n')
data = data.replace('\r', '\n')
print(repr(data) + " ", end='')
return data
|
python
|
{
"resource": ""
}
|
q279278
|
Win32ShellCommandController._stdin_raw_block
|
test
|
def _stdin_raw_block(self):
"""Use a blocking stdin read"""
# The big problem with the blocking read is that it doesn't
# exit when it's supposed to in all contexts. An extra
# key-press may be required to trigger the exit.
try:
data = sys.stdin.read(1)
data = data.replace('\r', '\n')
return data
except WindowsError as we:
if we.winerror == ERROR_NO_DATA:
# This error occurs when the pipe is closed
return None
else:
# Otherwise let the error propagate
raise we
|
python
|
{
"resource": ""
}
|
q279279
|
MainWindow.update_tab_bar_visibility
|
test
|
def update_tab_bar_visibility(self):
""" update visibility of the tabBar depending of the number of tab
0 or 1 tab, tabBar hidden
2+ tabs, tabBar visible
send a self.close if number of tab ==0
need to be called explicitly, or be connected to tabInserted/tabRemoved
"""
if self.tab_widget.count() <= 1:
self.tab_widget.tabBar().setVisible(False)
else:
self.tab_widget.tabBar().setVisible(True)
if self.tab_widget.count()==0 :
self.close()
|
python
|
{
"resource": ""
}
|
q279280
|
MainWindow.create_tab_with_current_kernel
|
test
|
def create_tab_with_current_kernel(self):
"""create a new frontend attached to the same kernel as the current tab"""
current_widget = self.tab_widget.currentWidget()
current_widget_index = self.tab_widget.indexOf(current_widget)
current_widget_name = self.tab_widget.tabText(current_widget_index)
widget = self.slave_frontend_factory(current_widget)
if 'slave' in current_widget_name:
# don't keep stacking slaves
name = current_widget_name
else:
name = '(%s) slave' % current_widget_name
self.add_tab_with_frontend(widget,name=name)
|
python
|
{
"resource": ""
}
|
q279281
|
MainWindow.add_tab_with_frontend
|
test
|
def add_tab_with_frontend(self,frontend,name=None):
""" insert a tab with a given frontend in the tab bar, and give it a name
"""
if not name:
name = 'kernel %i' % self.next_kernel_id
self.tab_widget.addTab(frontend,name)
self.update_tab_bar_visibility()
self.make_frontend_visible(frontend)
frontend.exit_requested.connect(self.close_tab)
|
python
|
{
"resource": ""
}
|
q279282
|
MainWindow.add_menu_action
|
test
|
def add_menu_action(self, menu, action, defer_shortcut=False):
"""Add action to menu as well as self
So that when the menu bar is invisible, its actions are still available.
If defer_shortcut is True, set the shortcut context to widget-only,
where it will avoid conflict with shortcuts already bound to the
widgets themselves.
"""
menu.addAction(action)
self.addAction(action)
if defer_shortcut:
action.setShortcutContext(QtCore.Qt.WidgetShortcut)
|
python
|
{
"resource": ""
}
|
q279283
|
MainWindow._make_dynamic_magic
|
test
|
def _make_dynamic_magic(self,magic):
"""Return a function `fun` that will execute `magic` on active frontend.
Parameters
----------
magic : string
string that will be executed as is when the returned function is called
Returns
-------
fun : function
function with no parameters, when called will execute `magic` on the
current active frontend at call time
See Also
--------
populate_all_magic_menu : generate the "All Magics..." menu
Notes
-----
`fun` executes `magic` in active frontend at the moment it is triggered,
not the active frontend at the moment it was created.
This function is mostly used to create the "All Magics..." Menu at run time.
"""
# need two level nested function to be sure to pass magic
# to active frontend **at run time**.
def inner_dynamic_magic():
self.active_frontend.execute(magic)
inner_dynamic_magic.__name__ = "dynamics_magic_s"
return inner_dynamic_magic
|
python
|
{
"resource": ""
}
|
q279284
|
MainWindow.populate_all_magic_menu
|
test
|
def populate_all_magic_menu(self, listofmagic=None):
"""Clean "All Magics..." menu and repopulate it with `listofmagic`
Parameters
----------
listofmagic : string,
repr() of a list of strings, send back by the kernel
Notes
-----
`listofmagic`is a repr() of list because it is fed with the result of
a 'user_expression'
"""
for k,v in self._magic_menu_dict.items():
v.clear()
self.all_magic_menu.clear()
protected_magic = set(["more","less","load_ext","pycat","loadpy","load","save","psource"])
mlist=ast.literal_eval(listofmagic)
for magic in mlist:
cell = (magic['type'] == 'cell')
name = magic['name']
mclass = magic['class']
if cell :
prefix='%%'
else :
prefix='%'
magic_menu = self._get_magic_menu(mclass)
if name in protected_magic:
suffix = '?'
else :
suffix = ''
pmagic = '%s%s%s'%(prefix,name,suffix)
xaction = QtGui.QAction(pmagic,
self,
triggered=self._make_dynamic_magic(pmagic)
)
magic_menu.addAction(xaction)
self.all_magic_menu.addAction(xaction)
|
python
|
{
"resource": ""
}
|
q279285
|
MainWindow.closeEvent
|
test
|
def closeEvent(self, event):
""" Forward the close event to every tabs contained by the windows
"""
if self.tab_widget.count() == 0:
# no tabs, just close
event.accept()
return
# Do Not loop on the widget count as it change while closing
title = self.window().windowTitle()
cancel = QtGui.QMessageBox.Cancel
okay = QtGui.QMessageBox.Ok
if self.confirm_exit:
if self.tab_widget.count() > 1:
msg = "Close all tabs, stop all kernels, and Quit?"
else:
msg = "Close console, stop kernel, and Quit?"
info = "Kernels not started here (e.g. notebooks) will be left alone."
closeall = QtGui.QPushButton("&Quit", self)
closeall.setShortcut('Q')
box = QtGui.QMessageBox(QtGui.QMessageBox.Question,
title, msg)
box.setInformativeText(info)
box.addButton(cancel)
box.addButton(closeall, QtGui.QMessageBox.YesRole)
box.setDefaultButton(closeall)
box.setEscapeButton(cancel)
pixmap = QtGui.QPixmap(self._app.icon.pixmap(QtCore.QSize(64,64)))
box.setIconPixmap(pixmap)
reply = box.exec_()
else:
reply = okay
if reply == cancel:
event.ignore()
return
if reply == okay:
while self.tab_widget.count() >= 1:
# prevent further confirmations:
widget = self.active_frontend
widget._confirm_exit = False
self.close_tab(widget)
event.accept()
|
python
|
{
"resource": ""
}
|
q279286
|
passwd
|
test
|
def passwd(passphrase=None, algorithm='sha1'):
"""Generate hashed password and salt for use in notebook configuration.
In the notebook configuration, set `c.NotebookApp.password` to
the generated string.
Parameters
----------
passphrase : str
Password to hash. If unspecified, the user is asked to input
and verify a password.
algorithm : str
Hashing algorithm to use (e.g, 'sha1' or any argument supported
by :func:`hashlib.new`).
Returns
-------
hashed_passphrase : str
Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'.
Examples
--------
In [1]: passwd('mypassword')
Out[1]: 'sha1:7cf3:b7d6da294ea9592a9480c8f52e63cd42cfb9dd12'
"""
if passphrase is None:
for i in range(3):
p0 = getpass.getpass('Enter password: ')
p1 = getpass.getpass('Verify password: ')
if p0 == p1:
passphrase = p0
break
else:
print('Passwords do not match.')
else:
raise UsageError('No matching passwords found. Giving up.')
h = hashlib.new(algorithm)
salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len)
h.update(cast_bytes(passphrase, 'utf-8') + str_to_bytes(salt, 'ascii'))
return ':'.join((algorithm, salt, h.hexdigest()))
|
python
|
{
"resource": ""
}
|
q279287
|
passwd_check
|
test
|
def passwd_check(hashed_passphrase, passphrase):
"""Verify that a given passphrase matches its hashed version.
Parameters
----------
hashed_passphrase : str
Hashed password, in the format returned by `passwd`.
passphrase : str
Passphrase to validate.
Returns
-------
valid : bool
True if the passphrase matches the hash.
Examples
--------
In [1]: from IPython.lib.security import passwd_check
In [2]: passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a',
...: 'mypassword')
Out[2]: True
In [3]: passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a',
...: 'anotherpassword')
Out[3]: False
"""
try:
algorithm, salt, pw_digest = hashed_passphrase.split(':', 2)
except (ValueError, TypeError):
return False
try:
h = hashlib.new(algorithm)
except ValueError:
return False
if len(pw_digest) == 0:
return False
h.update(cast_bytes(passphrase, 'utf-8') + str_to_bytes(salt, 'ascii'))
return h.hexdigest() == pw_digest
|
python
|
{
"resource": ""
}
|
q279288
|
ajax_editable_boolean_cell
|
test
|
def ajax_editable_boolean_cell(item, attr, text='', override=None):
"""
Generate a html snippet for showing a boolean value on the admin page.
Item is an object, attr is the attribute name we should display. Text
is an optional explanatory text to be included in the output.
This function will emit code to produce a checkbox input with its state
corresponding to the item.attr attribute if no override value is passed.
This input is wired to run a JS ajax updater to toggle the value.
If override is passed in, ignores the attr attribute and returns a
static image for the override boolean with no user interaction possible
(useful for "disabled and you can't change it" situations).
"""
if text:
text = ' (%s)' % unicode(text)
if override is not None:
a = [django_boolean_icon(override, text), text]
else:
value = getattr(item, attr)
a = [
'<input type="checkbox"',
value and ' checked="checked"' or '',
' onclick="return inplace_toggle_boolean(%d, \'%s\')";' % (item.id, attr),
' />',
text,
]
a.insert(0, '<div id="wrap_%s_%d">' % ( attr, item.id ))
a.append('</div>')
return unicode(''.join(a))
|
python
|
{
"resource": ""
}
|
q279289
|
TreeEditor.indented_short_title
|
test
|
def indented_short_title(self, item):
r = ""
"""
Generate a short title for an object, indent it depending on
the object's depth in the hierarchy.
"""
if hasattr(item, 'get_absolute_url'):
r = '<input type="hidden" class="medialibrary_file_path" value="%s" />' % item.get_absolute_url()
editable_class = ''
if not getattr(item, 'feincms_editable', True):
editable_class = ' tree-item-not-editable'
r += '<span id="page_marker-%d" class="page_marker%s" style="width: %dpx;"> </span> ' % (
item.id, editable_class, 14 + item.level * 18)
# r += '<span tabindex="0">'
if hasattr(item, 'short_title'):
r += item.short_title()
else:
r += unicode(item)
# r += '</span>'
return mark_safe(r)
|
python
|
{
"resource": ""
}
|
q279290
|
TreeEditor._collect_editable_booleans
|
test
|
def _collect_editable_booleans(self):
"""
Collect all fields marked as editable booleans. We do not
want the user to be able to edit arbitrary fields by crafting
an AJAX request by hand.
"""
if hasattr(self, '_ajax_editable_booleans'):
return
self._ajax_editable_booleans = {}
for field in self.list_display:
# The ajax_editable_boolean return value has to be assigned
# to the ModelAdmin class
item = getattr(self.__class__, field, None)
if not item:
continue
attr = getattr(item, 'editable_boolean_field', None)
if attr:
def _fn(self, instance):
return [ajax_editable_boolean_cell(instance, _fn.attr)]
_fn.attr = attr
result_func = getattr(item, 'editable_boolean_result', _fn)
self._ajax_editable_booleans[attr] = result_func
|
python
|
{
"resource": ""
}
|
q279291
|
TreeEditor._toggle_boolean
|
test
|
def _toggle_boolean(self, request):
"""
Handle an AJAX toggle_boolean request
"""
try:
item_id = int(request.POST.get('item_id', None))
attr = str(request.POST.get('attr', None))
except:
return HttpResponseBadRequest("Malformed request")
if not request.user.is_staff:
logging.warning("Denied AJAX request by non-staff %s to toggle boolean %s for object #%s", request.user,
attr, item_id)
return HttpResponseForbidden("You do not have permission to access this object")
self._collect_editable_booleans()
if not self._ajax_editable_booleans.has_key(attr):
return HttpResponseBadRequest("not a valid attribute %s" % attr)
try:
obj = self.model._default_manager.get(pk=item_id)
except self.model.DoesNotExist:
return HttpResponseNotFound("Object does not exist")
can_change = False
if hasattr(obj, "user_can") and obj.user_can(request.user, change_page=True):
# Was added in c7f04dfb5d, but I've no idea what user_can is about.
can_change = True
else:
can_change = self.has_change_permission(request, obj=obj)
if not can_change:
logging.warning("Denied AJAX request by %s to toggle boolean %s for object %s", request.user, attr, item_id)
return HttpResponseForbidden("You do not have permission to access this object")
logging.info("Processing request by %s to toggle %s on %s", request.user, attr, obj)
try:
before_data = self._ajax_editable_booleans[attr](self, obj)
setattr(obj, attr, not getattr(obj, attr))
obj.save()
self._refresh_changelist_caches() # ???: Perhaps better a post_save signal?
# Construct html snippets to send back to client for status update
data = self._ajax_editable_booleans[attr](self, obj)
except Exception:#, e:
logging.exception("Unhandled exception while toggling %s on %s", attr, obj)
return HttpResponseServerError("Unable to toggle %s on %s" % (attr, obj))
# Weed out unchanged cells to keep the updates small. This assumes
# that the order a possible get_descendents() returns does not change
# before and after toggling this attribute. Unlikely, but still...
d = []
for a, b in zip(before_data, data):
if a != b:
d.append(b)
# TODO: Shorter: [ y for x,y in zip(a,b) if x!=y ]
return HttpResponse(json.dumps(d), mimetype="application/json")
|
python
|
{
"resource": ""
}
|
q279292
|
TreeEditor.has_change_permission
|
test
|
def has_change_permission(self, request, obj=None):
"""
Implement a lookup for object level permissions. Basically the same as
ModelAdmin.has_change_permission, but also passes the obj parameter in.
"""
if settings.TREE_EDITOR_OBJECT_PERMISSIONS:
opts = self.opts
r = request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), obj)
else:
r = True
return r and super(TreeEditor, self).has_change_permission(request, obj)
|
python
|
{
"resource": ""
}
|
q279293
|
TreeEditor.has_delete_permission
|
test
|
def has_delete_permission(self, request, obj=None):
"""
Implement a lookup for object level permissions. Basically the same as
ModelAdmin.has_delete_permission, but also passes the obj parameter in.
"""
if settings.TREE_EDITOR_OBJECT_PERMISSIONS:
opts = self.opts
r = request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission(), obj)
else:
r = True
return r and super(TreeEditor, self).has_delete_permission(request, obj)
|
python
|
{
"resource": ""
}
|
q279294
|
add_children
|
test
|
def add_children(G, parent, level, n=2):
"""Add children recursively to a binary tree."""
if level == 0:
return
for i in range(n):
child = parent+str(i)
G.add_node(child)
G.add_edge(parent,child)
add_children(G, child, level-1, n)
|
python
|
{
"resource": ""
}
|
q279295
|
make_bintree
|
test
|
def make_bintree(levels):
"""Make a symmetrical binary tree with @levels"""
G = nx.DiGraph()
root = '0'
G.add_node(root)
add_children(G, root, levels, 2)
return G
|
python
|
{
"resource": ""
}
|
q279296
|
submit_jobs
|
test
|
def submit_jobs(view, G, jobs):
"""Submit jobs via client where G describes the time dependencies."""
results = {}
for node in nx.topological_sort(G):
with view.temp_flags(after=[ results[n] for n in G.predecessors(node) ]):
results[node] = view.apply(jobs[node])
return results
|
python
|
{
"resource": ""
}
|
q279297
|
validate_tree
|
test
|
def validate_tree(G, results):
"""Validate that jobs executed after their dependencies."""
for node in G:
started = results[node].metadata.started
for parent in G.predecessors(node):
finished = results[parent].metadata.completed
assert started > finished, "%s should have happened after %s"%(node, parent)
|
python
|
{
"resource": ""
}
|
q279298
|
make_color_table
|
test
|
def make_color_table(in_class):
"""Build a set of color attributes in a class.
Helper function for building the *TermColors classes."""
for name,value in color_templates:
setattr(in_class,name,in_class._base % value)
|
python
|
{
"resource": ""
}
|
q279299
|
ColorScheme.copy
|
test
|
def copy(self,name=None):
"""Return a full copy of the object, optionally renaming it."""
if name is None:
name = self.name
return ColorScheme(name, self.colors.dict())
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.