_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q277800
|
infer_gaps_in_tree
|
test
|
def infer_gaps_in_tree(df_seq, tree, id_col='id', sequence_col='sequence'):
"""Adds a character matrix to DendroPy tree and infers gaps using
Fitch's algorithm.
Infer gaps in sequences at ancestral nodes.
"""
taxa = tree.taxon_namespace
# Get alignment as fasta
alignment = df_seq.phylo.to_fasta(id_col=id_col, id_only=True,
sequence_col=sequence_col)
# Build a Sequence data matrix from Dendropy
data = dendropy.ProteinCharacterMatrix.get(
data=alignment,
schema="fasta",
taxon_namespace=taxa)
# Construct a map object between sequence data and tree data.
taxon_state_sets_map = data.taxon_state_sets_map(gaps_as_missing=False)
# Fitch algorithm to determine placement of gaps
dendropy.model.parsimony.fitch_down_pass(tree.postorder_node_iter(),
taxon_state_sets_map=taxon_state_sets_map)
dendropy.model.parsimony.fitch_up_pass(tree.preorder_node_iter())
return tree
|
python
|
{
"resource": ""
}
|
q277801
|
eval_step.nvim_io_recover
|
test
|
def nvim_io_recover(self, io: NvimIORecover[A]) -> NvimIO[B]:
'''calls `map` to shift the recover execution to flat_map_nvim_io
'''
return eval_step(self.vim)(io.map(lambda a: a))
|
python
|
{
"resource": ""
}
|
q277802
|
install_jinja_translations
|
test
|
def install_jinja_translations():
"""
Install our gettext and ngettext functions into Jinja2's environment.
"""
class Translation(object):
"""
We pass this object to jinja so it can find our gettext implementation.
If we pass the GNUTranslation object directly, it won't have our
context and whitespace stripping action.
"""
ugettext = staticmethod(ugettext)
ungettext = staticmethod(ungettext)
import jingo
jingo.env.install_gettext_translations(Translation)
|
python
|
{
"resource": ""
}
|
q277803
|
exclusive_ns
|
test
|
def exclusive_ns(guard: StateGuard[A], desc: str, thunk: Callable[..., NS[A, B]], *a: Any) -> Do:
'''this is the central unsafe function, using a lock and updating the state in `guard` in-place.
'''
yield guard.acquire()
log.debug2(lambda: f'exclusive: {desc}')
state, response = yield N.ensure_failure(thunk(*a).run(guard.state), guard.release)
yield N.delay(lambda v: unsafe_update_state(guard, state))
yield guard.release()
log.debug2(lambda: f'release: {desc}')
yield N.pure(response)
|
python
|
{
"resource": ""
}
|
q277804
|
_percent
|
test
|
def _percent(data, part, total):
"""
Calculate a percentage.
"""
try:
return round(100 * float(data[part]) / float(data[total]), 1)
except ZeroDivisionError:
return 0
|
python
|
{
"resource": ""
}
|
q277805
|
_get_cache_stats
|
test
|
def _get_cache_stats(server_name=None):
"""
Get stats info.
"""
server_info = {}
for svr in mc_client.get_stats():
svr_info = svr[0].split(' ')
svr_name = svr_info[0]
svr_stats = svr[1]
svr_stats['bytes_percent'] = _percent(svr_stats, 'bytes', 'limit_maxbytes')
svr_stats['get_hit_rate'] = _percent(svr_stats, 'get_hits', 'cmd_get')
svr_stats['get_miss_rate'] = _percent(svr_stats, 'get_misses', 'cmd_get')
if server_name and server_name == svr_name:
return svr_stats
server_info[svr_name] = svr_stats
return server_info
|
python
|
{
"resource": ""
}
|
q277806
|
_get_cache_slabs
|
test
|
def _get_cache_slabs(server_name=None):
"""
Get slabs info.
"""
server_info = {}
for svr in mc_client.get_slabs():
svr_info = svr[0].split(' ')
svr_name = svr_info[0]
if server_name and server_name == svr_name:
return svr[1]
server_info[svr_name] = svr[1]
return server_info
|
python
|
{
"resource": ""
}
|
q277807
|
_context_data
|
test
|
def _context_data(data, request=None):
"""
Add admin global context, for compatibility with Django 1.7
"""
try:
return dict(site.each_context(request).items() + data.items())
except AttributeError:
return data
|
python
|
{
"resource": ""
}
|
q277808
|
server_status
|
test
|
def server_status(request):
"""
Return the status of all servers.
"""
data = {
'cache_stats': _get_cache_stats(),
'can_get_slabs': hasattr(mc_client, 'get_slabs'),
}
return render_to_response('memcache_admin/server_status.html', data, RequestContext(request))
|
python
|
{
"resource": ""
}
|
q277809
|
dashboard
|
test
|
def dashboard(request):
"""
Show the dashboard.
"""
# mc_client will be a dict if memcached is not configured
if not isinstance(mc_client, dict):
cache_stats = _get_cache_stats()
else:
cache_stats = None
if cache_stats:
data = _context_data({
'title': _('Memcache Dashboard'),
'cache_stats': cache_stats,
'can_get_slabs': hasattr(mc_client, 'get_slabs'),
'REFRESH_RATE': SETTINGS['REFRESH_RATE'],
},
request)
template = 'memcache_admin/dashboard.html'
else:
data = _context_data({
'title': _('Memcache Dashboard - Error'),
'error_message': _('Unable to connect to a memcache server.'),
},
request)
template = 'memcache_admin/dashboard_error.html'
return render_to_response(template, data, RequestContext(request))
|
python
|
{
"resource": ""
}
|
q277810
|
stats
|
test
|
def stats(request, server_name):
"""
Show server statistics.
"""
server_name = server_name.strip('/')
data = _context_data({
'title': _('Memcache Statistics for %s') % server_name,
'cache_stats': _get_cache_stats(server_name),
},
request)
return render_to_response('memcache_admin/stats.html', data, RequestContext(request))
|
python
|
{
"resource": ""
}
|
q277811
|
slabs
|
test
|
def slabs(request, server_name):
"""
Show server slabs.
"""
data = _context_data({
'title': _('Memcache Slabs for %s') % server_name,
'cache_slabs': _get_cache_slabs(server_name),
},
request)
return render_to_response('memcache_admin/slabs.html', data, RequestContext(request))
|
python
|
{
"resource": ""
}
|
q277812
|
human_bytes
|
test
|
def human_bytes(value):
"""
Convert a byte value into a human-readable format.
"""
value = float(value)
if value >= 1073741824:
gigabytes = value / 1073741824
size = '%.2f GB' % gigabytes
elif value >= 1048576:
megabytes = value / 1048576
size = '%.2f MB' % megabytes
elif value >= 1024:
kilobytes = value / 1024
size = '%.2f KB' % kilobytes
else:
size = '%.2f B' % value
return size
|
python
|
{
"resource": ""
}
|
q277813
|
Environment.find_config
|
test
|
def find_config(self, children):
"""
Find a config in our children so we can fill in variables in our other
children with its data.
"""
named_config = None
found_config = None
# first see if we got a kwarg named 'config', as this guy is special
if 'config' in children:
if type(children['config']) == str:
children['config'] = ConfigFile(children['config'])
elif isinstance(children['config'], Config):
children['config'] = children['config']
elif type(children['config']) == dict:
children['config'] = Config(data=children['config'])
else:
raise TypeError("Don't know how to turn {} into a Config".format(type(children['config'])))
named_config = children['config']
# next check the other kwargs
for k in children:
if isinstance(children[k], Config):
found_config = children[k]
# if we still don't have a config, see if there's a directory with one
for k in children:
if isinstance(children[k], Directory):
for j in children[k]._children:
if j == 'config' and not named_config:
named_config = children[k]._children[j]
if isinstance(children[k]._children[j], Config):
found_config = children[k]._children[j]
if named_config:
return named_config
else:
return found_config
|
python
|
{
"resource": ""
}
|
q277814
|
Environment.add
|
test
|
def add(self, **kwargs):
"""
Add objects to the environment.
"""
for key in kwargs:
if type(kwargs[key]) == str:
self._children[key] = Directory(kwargs[key])
else:
self._children[key] = kwargs[key]
self._children[key]._env = self
self._children[key].apply_config(ConfigApplicator(self.config))
self._children[key].prepare()
|
python
|
{
"resource": ""
}
|
q277815
|
File.apply_config
|
test
|
def apply_config(self, applicator):
"""
Replace any config tokens in the file's path with values from the config.
"""
if type(self._fpath) == str:
self._fpath = applicator.apply(self._fpath)
|
python
|
{
"resource": ""
}
|
q277816
|
File.path
|
test
|
def path(self):
"""
Get the path to the file relative to its parent.
"""
if self._parent:
return os.path.join(self._parent.path, self._fpath)
else:
return self._fpath
|
python
|
{
"resource": ""
}
|
q277817
|
File.read
|
test
|
def read(self):
"""
Read and return the contents of the file.
"""
with open(self.path) as f:
d = f.read()
return d
|
python
|
{
"resource": ""
}
|
q277818
|
File.write
|
test
|
def write(self, data, mode='w'):
"""
Write data to the file.
`data` is the data to write
`mode` is the mode argument to pass to `open()`
"""
with open(self.path, mode) as f:
f.write(data)
|
python
|
{
"resource": ""
}
|
q277819
|
LogFile.configure
|
test
|
def configure(self):
"""
Configure the Python logging module for this file.
"""
# build a file handler for this file
handler = logging.FileHandler(self.path, delay=True)
# if we got a format string, create a formatter with it
if self._format:
handler.setFormatter(logging.Formatter(self._format))
# if we got a string for the formatter, assume it's the name of a
# formatter in the environment's config
if type(self._formatter) == str:
if self._env and self._env.config.logging.dict_config.formatters[self._formatter]:
d = self._env.config.logging.dict_config.formatters[self._formatter].to_dict()
handler.setFormatter(logging.Formatter(**d))
elif type(self._formatter) == dict:
# if it's a dict it must be the actual formatter params
handler.setFormatter(logging.Formatter(**self._formatter))
# add the file handler to whatever loggers were specified
if len(self._loggers):
for name in self._loggers:
logging.getLogger(name).addHandler(handler)
else:
# none specified, just add it to the root logger
logging.getLogger().addHandler(handler)
|
python
|
{
"resource": ""
}
|
q277820
|
LockFile.create
|
test
|
def create(self):
"""
Create the file.
If the file already exists an exception will be raised
"""
if not os.path.exists(self.path):
open(self.path, 'a').close()
else:
raise Exception("File exists: {}".format(self.path))
|
python
|
{
"resource": ""
}
|
q277821
|
Directory.apply_config
|
test
|
def apply_config(self, applicator):
"""
Replace any config tokens with values from the config.
"""
if type(self._path) == str:
self._path = applicator.apply(self._path)
for key in self._children:
self._children[key].apply_config(applicator)
|
python
|
{
"resource": ""
}
|
q277822
|
Directory.path
|
test
|
def path(self):
"""
Return the path to this directory.
"""
p = ''
if self._parent and self._parent.path:
p = os.path.join(p, self._parent.path)
if self._base:
p = os.path.join(p, self._base)
if self._path:
p = os.path.join(p, self._path)
return p
|
python
|
{
"resource": ""
}
|
q277823
|
Directory.remove
|
test
|
def remove(self, recursive=True, ignore_error=True):
"""
Remove the directory.
"""
try:
if recursive or self._cleanup == 'recursive':
shutil.rmtree(self.path)
else:
os.rmdir(self.path)
except Exception as e:
if not ignore_error:
raise e
|
python
|
{
"resource": ""
}
|
q277824
|
Directory.prepare
|
test
|
def prepare(self):
"""
Prepare the Directory for use in an Environment.
This will create the directory if the create flag is set.
"""
if self._create:
self.create()
for k in self._children:
self._children[k]._env = self._env
self._children[k].prepare()
|
python
|
{
"resource": ""
}
|
q277825
|
Directory.cleanup
|
test
|
def cleanup(self):
"""
Clean up children and remove the directory.
Directory will only be removed if the cleanup flag is set.
"""
for k in self._children:
self._children[k].cleanup()
if self._cleanup:
self.remove(True)
|
python
|
{
"resource": ""
}
|
q277826
|
Directory.path_to
|
test
|
def path_to(self, path):
"""
Find the path to something inside this directory.
"""
return os.path.join(self.path, str(path))
|
python
|
{
"resource": ""
}
|
q277827
|
Directory.list
|
test
|
def list(self):
"""
List the contents of the directory.
"""
return [File(f, parent=self) for f in os.listdir(self.path)]
|
python
|
{
"resource": ""
}
|
q277828
|
Directory.write
|
test
|
def write(self, filename, data, mode='w'):
"""
Write to a file in the directory.
"""
with open(self.path_to(str(filename)), mode) as f:
f.write(data)
|
python
|
{
"resource": ""
}
|
q277829
|
Directory.read
|
test
|
def read(self, filename):
"""
Read a file from the directory.
"""
with open(self.path_to(str(filename))) as f:
d = f.read()
return d
|
python
|
{
"resource": ""
}
|
q277830
|
Directory.add
|
test
|
def add(self, *args, **kwargs):
"""
Add objects to the directory.
"""
for key in kwargs:
if isinstance(kwargs[key], str):
self._children[key] = File(kwargs[key])
else:
self._children[key] = kwargs[key]
self._children[key]._parent = self
self._children[key]._env = self._env
added = []
for arg in args:
if isinstance(arg, File):
self._children[arg.name] = arg
self._children[arg.name]._parent = self
self._children[arg.name]._env = self._env
elif isinstance(arg, str):
f = File(arg)
added.append(f)
self._children[arg] = f
self._children[arg]._parent = self
self._children[arg]._env = self._env
else:
raise TypeError(type(arg))
# if we were passed a single file/filename, return the File object for convenience
if len(added) == 1:
return added[0]
if len(args) == 1:
return args[0]
|
python
|
{
"resource": ""
}
|
q277831
|
State.save
|
test
|
def save(self):
"""
Save the state to a file.
"""
with open(self.path, 'w') as f:
f.write(yaml.dump(dict(self.d)))
|
python
|
{
"resource": ""
}
|
q277832
|
State.load
|
test
|
def load(self):
"""
Load a saved state file.
"""
if os.path.exists(self.path):
with open(self.path, 'r') as f:
self.d = yaml.safe_load(f.read().replace('\t', ' '*4))
|
python
|
{
"resource": ""
}
|
q277833
|
State.cleanup
|
test
|
def cleanup(self):
"""
Clean up the saved state.
"""
if os.path.exists(self.path):
os.remove(self.path)
|
python
|
{
"resource": ""
}
|
q277834
|
PluginManager.load_plugins
|
test
|
def load_plugins(self, directory):
"""
Loads plugins from the specified directory.
`directory` is the full path to a directory containing python modules
which each contain a subclass of the Plugin class.
There is no criteria for a valid plugin at this level - any python
module found in the directory will be loaded. Only modules that
implement a subclass of the Plugin class above will be collected.
The directory will be traversed recursively.
"""
# walk directory
for filename in os.listdir(directory):
# path to file
filepath = os.path.join(directory, filename)
# if it's a file, load it
modname, ext = os.path.splitext(filename)
if os.path.isfile(filepath) and ext == '.py':
file, path, descr = imp.find_module(modname, [directory])
if file:
mod = imp.load_module(modname, file, path, descr)
# if it's a directory, recurse into it
if os.path.isdir(filepath):
self.load_plugins(filepath)
|
python
|
{
"resource": ""
}
|
q277835
|
update_dict
|
test
|
def update_dict(target, source):
"""
Recursively merge values from a nested dictionary into another nested
dictionary.
For example:
>>> target = {
... 'thing': 123,
... 'thang': {
... 'a': 1,
... 'b': 2
... }
... }
>>> source = {
... 'thang': {
... 'a': 666,
... 'c': 777
... }
... }
>>> update_dict(target, source)
>>> target
{
'thing': 123,
'thang': {
'a': 666,
'b': 2,
'c': 777
}
}
"""
for k,v in source.items():
if isinstance(v, dict) and k in target and isinstance(source[k], dict):
update_dict(target[k], v)
else:
target[k] = v
|
python
|
{
"resource": ""
}
|
q277836
|
ConfigNode._child
|
test
|
def _child(self, path):
"""
Return a ConfigNode object representing a child node with the specified
relative path.
"""
if self._path:
path = '{}.{}'.format(self._path, path)
return ConfigNode(root=self._root, path=path)
|
python
|
{
"resource": ""
}
|
q277837
|
ConfigNode._resolve_path
|
test
|
def _resolve_path(self, create=False):
"""
Returns a tuple of a reference to the last container in the path, and
the last component in the key path.
For example, with a self._value like this:
{
'thing': {
'another': {
'some_leaf': 5,
'one_more': {
'other_leaf': 'x'
}
}
}
}
And a self._path of: 'thing.another.some_leaf'
This will return a tuple of a reference to the 'another' dict, and
'some_leaf', allowing the setter and casting methods to directly access
the item referred to by the key path.
"""
# Split up the key path
if type(self._path) == str:
key_path = self._path.split('.')
else:
key_path = [self._path]
# Start at the root node
node = self._root._data
nodes = [self._root._data]
# Traverse along key path
while len(key_path):
# Get the next key in the key path
key = key_path.pop(0)
# See if the test could be an int for array access, if so assume it is
try:
key = int(key)
except:
pass
# If the next level doesn't exist, create it
if create:
if type(node) == dict and key not in node:
node[key] = {}
elif type(node) == list and type(key) == int and len(node) < key:
node.append([None for i in range(key-len(node))])
# Store the last node and traverse down the hierarchy
nodes.append(node)
try:
node = node[key]
except TypeError:
if type(key) == int:
raise IndexError(key)
else:
raise KeyError(key)
return (nodes[-1], key)
|
python
|
{
"resource": ""
}
|
q277838
|
ConfigNode._get_value
|
test
|
def _get_value(self):
"""
Get the value represented by this node.
"""
if self._path:
try:
container, last = self._resolve_path()
return container[last]
except KeyError:
return None
except IndexError:
return None
else:
return self._data
|
python
|
{
"resource": ""
}
|
q277839
|
ConfigNode.update
|
test
|
def update(self, data={}, options={}):
"""
Update the configuration with new data.
This can be passed either or both `data` and `options`.
`options` is a dict of keypath/value pairs like this (similar to
CherryPy's config mechanism:
>>> c.update(options={
... 'server.port': 8080,
... 'server.host': 'localhost',
... 'admin.email': 'admin@lol'
... })
`data` is a dict of actual config data, like this:
>>> c.update(data={
... 'server': {
... 'port': 8080,
... 'host': 'localhost'
... },
... 'admin': {
... 'email': 'admin@lol'
... }
... })
"""
# Handle an update with a set of options like CherryPy does
for key in options:
self[key] = options[key]
# Merge in any data in `data`
if isinstance(data, ConfigNode):
data = data._get_value()
update_dict(self._get_value(), data)
|
python
|
{
"resource": ""
}
|
q277840
|
ConfigFile.load
|
test
|
def load(self, reload=False):
"""
Load the config and defaults from files.
"""
if reload or not self._loaded:
# load defaults
if self._defaults_file and type(self._defaults_file) == str:
self._defaults_file = File(self._defaults_file, parent=self._parent)
defaults = {}
if self._defaults_file:
defaults = yaml.safe_load(self._defaults_file.read().replace('\t', ' '))
# load data
data = {}
if self.exists:
data = yaml.safe_load(self.read().replace('\t', ' '))
# initialise with the loaded data
self._defaults = defaults
self._data = copy.deepcopy(self._defaults)
self.update(data=data)
# if specified, apply environment variables
if self._apply_env:
self.update(ConfigEnv(self._env_prefix))
self._loaded = True
return self
|
python
|
{
"resource": ""
}
|
q277841
|
ConfigApplicator.apply_to_str
|
test
|
def apply_to_str(self, obj):
"""
Apply the config to a string.
"""
toks = re.split('({config:|})', obj)
newtoks = []
try:
while len(toks):
tok = toks.pop(0)
if tok == '{config:':
# pop the config variable, look it up
var = toks.pop(0)
val = self.config[var]
# if we got an empty node, then it didn't exist
if type(val) == ConfigNode and val == None:
raise KeyError("No such config variable '{}'".format(var))
# add the value to the list
newtoks.append(str(val))
# pop the '}'
toks.pop(0)
else:
# not the start of a config block, just append it to the list
newtoks.append(tok)
return ''.join(newtoks)
except IndexError:
pass
return obj
|
python
|
{
"resource": ""
}
|
q277842
|
build_callback_url
|
test
|
def build_callback_url(request, urlname, message):
"""
Build Twilio callback url for confirming message delivery status
:type message: OutgoingSMS
"""
location = reverse(urlname, kwargs={"pk": message.pk})
callback_domain = getattr(settings, "TWILIO_CALLBACK_DOMAIN", None)
if callback_domain:
url = "{}://{}{}".format(
"https" if getattr(settings, "TWILIO_CALLBACK_USE_HTTPS", False) else "http",
callback_domain,
location
)
elif request is not None:
url = request.build_absolute_uri(location)
else:
raise ValueError(
"Unable to build callback url. Configure TWILIO_CALLBACK_DOMAIN "
"or pass request object to function call"
)
return url
|
python
|
{
"resource": ""
}
|
q277843
|
SocketConnection.process_input
|
test
|
def process_input(self):
"""Called when socket is read-ready"""
try:
pyngus.read_socket_input(self.connection, self.socket)
except Exception as e:
LOG.error("Exception on socket read: %s", str(e))
self.connection.close_input()
self.connection.close()
self.connection.process(time.time())
|
python
|
{
"resource": ""
}
|
q277844
|
SocketConnection.send_output
|
test
|
def send_output(self):
"""Called when socket is write-ready"""
try:
pyngus.write_socket_output(self.connection,
self.socket)
except Exception as e:
LOG.error("Exception on socket write: %s", str(e))
self.connection.close_output()
self.connection.close()
self.connection.process(time.time())
|
python
|
{
"resource": ""
}
|
q277845
|
MyCaller._send_request
|
test
|
def _send_request(self):
"""Send a message containing the RPC method call
"""
msg = Message()
msg.subject = "An RPC call!"
msg.address = self._to
msg.reply_to = self._reply_to
msg.body = self._method
msg.correlation_id = 5 # whatever...
print("sending RPC call request: %s" % str(self._method))
# @todo send timeout self._sender.send(msg, self, None, time.time() +
# 10)
self._sender.send(msg, self)
|
python
|
{
"resource": ""
}
|
q277846
|
read_socket_input
|
test
|
def read_socket_input(connection, socket_obj):
"""Read from the network layer and processes all data read. Can
support both blocking and non-blocking sockets.
Returns the number of input bytes processed, or EOS if input processing
is done. Any exceptions raised by the socket are re-raised.
"""
count = connection.needs_input
if count <= 0:
return count # 0 or EOS
while True:
try:
sock_data = socket_obj.recv(count)
break
except socket.timeout as e:
LOG.debug("Socket timeout exception %s", str(e))
raise # caller must handle
except socket.error as e:
err = e.errno
if err in [errno.EAGAIN,
errno.EWOULDBLOCK,
errno.EINTR]:
# try again later
return 0
# otherwise, unrecoverable, caller must handle
LOG.debug("Socket error exception %s", str(e))
raise
except Exception as e: # beats me... assume fatal
LOG.debug("unknown socket exception %s", str(e))
raise # caller must handle
if len(sock_data) > 0:
count = connection.process_input(sock_data)
else:
LOG.debug("Socket closed")
count = Connection.EOS
connection.close_input()
connection.close_output()
return count
|
python
|
{
"resource": ""
}
|
q277847
|
write_socket_output
|
test
|
def write_socket_output(connection, socket_obj):
"""Write data to the network layer. Can support both blocking and
non-blocking sockets.
Returns the number of output bytes sent, or EOS if output processing
is done. Any exceptions raised by the socket are re-raised.
"""
count = connection.has_output
if count <= 0:
return count # 0 or EOS
data = connection.output_data()
if not data:
# error - has_output > 0, but no data?
return Connection.EOS
while True:
try:
count = socket_obj.send(data)
break
except socket.timeout as e:
LOG.debug("Socket timeout exception %s", str(e))
raise # caller must handle
except socket.error as e:
err = e.errno
if err in [errno.EAGAIN,
errno.EWOULDBLOCK,
errno.EINTR]:
# try again later
return 0
# else assume fatal let caller handle it:
LOG.debug("Socket error exception %s", str(e))
raise
except Exception as e: # beats me... assume fatal
LOG.debug("unknown socket exception %s", str(e))
raise
if count > 0:
connection.output_written(count)
elif data:
LOG.debug("Socket closed")
count = Connection.EOS
connection.close_output()
connection.close_input()
return count
|
python
|
{
"resource": ""
}
|
q277848
|
_not_reentrant
|
test
|
def _not_reentrant(func):
"""Decorator that prevents callbacks from calling into link methods that
are not reentrant """
def wrap(*args, **kws):
link = args[0]
if link._callback_lock.in_callback:
m = "Link %s cannot be invoked from a callback!" % func
raise RuntimeError(m)
return func(*args, **kws)
return wrap
|
python
|
{
"resource": ""
}
|
q277849
|
_get_remote_settle_modes
|
test
|
def _get_remote_settle_modes(pn_link):
"""Return a map containing the settle modes as provided by the remote.
Skip any default value.
"""
modes = {}
snd = pn_link.remote_snd_settle_mode
if snd == proton.Link.SND_UNSETTLED:
modes['snd-settle-mode'] = 'unsettled'
elif snd == proton.Link.SND_SETTLED:
modes['snd-settle-mode'] = 'settled'
if pn_link.remote_rcv_settle_mode == proton.Link.RCV_SECOND:
modes['rcv-settle-mode'] = 'second'
return modes
|
python
|
{
"resource": ""
}
|
q277850
|
_Link.configure
|
test
|
def configure(self, target_address, source_address, handler, properties):
"""Assign addresses, properties, etc."""
self._handler = handler
self._properties = properties
dynamic_props = None
if properties:
dynamic_props = properties.get("dynamic-node-properties")
mode = _dist_modes.get(properties.get("distribution-mode"))
if mode is not None:
self._pn_link.source.distribution_mode = mode
mode = _snd_settle_modes.get(properties.get("snd-settle-mode"))
if mode is not None:
self._pn_link.snd_settle_mode = mode
mode = _rcv_settle_modes.get(properties.get("rcv-settle-mode"))
if mode is not None:
self._pn_link.rcv_settle_mode = mode
if target_address is None:
if not self._pn_link.is_sender:
raise Exception("Dynamic target not allowed")
self._pn_link.target.dynamic = True
if dynamic_props:
self._pn_link.target.properties.clear()
self._pn_link.target.properties.put_dict(dynamic_props)
elif target_address:
self._pn_link.target.address = target_address
if source_address is None:
if not self._pn_link.is_receiver:
raise Exception("Dynamic source not allowed")
self._pn_link.source.dynamic = True
if dynamic_props:
self._pn_link.source.properties.clear()
self._pn_link.source.properties.put_dict(dynamic_props)
elif source_address:
self._pn_link.source.address = source_address
|
python
|
{
"resource": ""
}
|
q277851
|
_Link.source_address
|
test
|
def source_address(self):
"""Return the authorative source of the link."""
# If link is a sender, source is determined by the local
# value, else use the remote.
if self._pn_link.is_sender:
return self._pn_link.source.address
else:
return self._pn_link.remote_source.address
|
python
|
{
"resource": ""
}
|
q277852
|
_Link.target_address
|
test
|
def target_address(self):
"""Return the authorative target of the link."""
# If link is a receiver, target is determined by the local
# value, else use the remote.
if self._pn_link.is_receiver:
return self._pn_link.target.address
else:
return self._pn_link.remote_target.address
|
python
|
{
"resource": ""
}
|
q277853
|
_Link._session_closed
|
test
|
def _session_closed(self):
"""Remote has closed the session used by this link."""
# if link not already closed:
if self._endpoint_state & proton.Endpoint.REMOTE_ACTIVE:
# simulate close received
self._process_remote_state()
elif self._endpoint_state & proton.Endpoint.REMOTE_UNINIT:
# locally created link, will never come up
self._failed = True
self._link_failed("Parent session closed.")
|
python
|
{
"resource": ""
}
|
q277854
|
_SessionProxy.new_sender
|
test
|
def new_sender(self, name):
"""Create a new sender link."""
pn_link = self._pn_session.sender(name)
return self.request_sender(pn_link)
|
python
|
{
"resource": ""
}
|
q277855
|
_SessionProxy.request_sender
|
test
|
def request_sender(self, pn_link):
"""Create link from request for a sender."""
sl = SenderLink(self._connection, pn_link)
self._links.add(sl)
return sl
|
python
|
{
"resource": ""
}
|
q277856
|
_SessionProxy.new_receiver
|
test
|
def new_receiver(self, name):
"""Create a new receiver link."""
pn_link = self._pn_session.receiver(name)
return self.request_receiver(pn_link)
|
python
|
{
"resource": ""
}
|
q277857
|
_SessionProxy.request_receiver
|
test
|
def request_receiver(self, pn_link):
"""Create link from request for a receiver."""
rl = ReceiverLink(self._connection, pn_link)
self._links.add(rl)
return rl
|
python
|
{
"resource": ""
}
|
q277858
|
_SessionProxy.link_destroyed
|
test
|
def link_destroyed(self, link):
"""Link has been destroyed."""
self._links.discard(link)
if not self._links:
# no more links
LOG.debug("destroying unneeded session")
self._pn_session.close()
self._pn_session.free()
self._pn_session = None
self._connection = None
|
python
|
{
"resource": ""
}
|
q277859
|
_SessionProxy._ep_need_close
|
test
|
def _ep_need_close(self):
"""Peer has closed its end of the session."""
LOG.debug("Session %s close requested - closing...",
self._name)
links = self._links.copy() # may modify _links
for link in links:
link._session_closed()
|
python
|
{
"resource": ""
}
|
q277860
|
Endpoint._process_endpoint_event
|
test
|
def _process_endpoint_event(self, event):
"""Called when the Proton Engine generates an endpoint state change
event.
"""
state_fsm = Endpoint._FSM[self._state]
entry = state_fsm.get(event)
if not entry:
# protocol error: invalid event for current state
old_state = self._state
self._state = Endpoint.STATE_ERROR
self._ep_error("invalid event=%s in state=%s" %
(Endpoint.EVENT_NAMES[event],
Endpoint.STATE_NAMES[old_state]))
return
self._state = entry[0]
if entry[1]:
entry[1](self)
|
python
|
{
"resource": ""
}
|
q277861
|
HighlightExtension.extendMarkdown
|
test
|
def extendMarkdown(self, md, md_globals):
"""Modifies inline patterns."""
mark_tag = SimpleTagPattern(MARK_RE, 'mark')
md.inlinePatterns.add('mark', mark_tag, '_begin')
|
python
|
{
"resource": ""
}
|
q277862
|
ReceiverEventHandler.receiver_remote_closed
|
test
|
def receiver_remote_closed(self, receiver_link, pn_condition):
"""Peer has closed its end of the link."""
LOG.debug("receiver_remote_closed condition=%s", pn_condition)
receiver_link.close()
self.done = True
|
python
|
{
"resource": ""
}
|
q277863
|
ReceiverEventHandler.receiver_failed
|
test
|
def receiver_failed(self, receiver_link, error):
"""Protocol error occurred."""
LOG.warn("receiver_failed error=%s", error)
receiver_link.close()
self.done = True
|
python
|
{
"resource": ""
}
|
q277864
|
get_host_port
|
test
|
def get_host_port(server_address):
"""Parse the hostname and port out of the server_address."""
regex = re.compile(r"^amqp://([a-zA-Z0-9.]+)(:([\d]+))?$")
x = regex.match(server_address)
if not x:
raise Exception("Bad address syntax: %s" % server_address)
matches = x.groups()
host = matches[0]
port = int(matches[2]) if matches[2] else None
return host, port
|
python
|
{
"resource": ""
}
|
q277865
|
connect_socket
|
test
|
def connect_socket(host, port, blocking=True):
"""Create a TCP connection to the server."""
addr = socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM)
if not addr:
raise Exception("Could not translate address '%s:%s'"
% (host, str(port)))
my_socket = socket.socket(addr[0][0], addr[0][1], addr[0][2])
if not blocking:
my_socket.setblocking(0)
try:
my_socket.connect(addr[0][4])
except socket.error as e:
if e.errno != errno.EINPROGRESS:
raise
return my_socket
|
python
|
{
"resource": ""
}
|
q277866
|
server_socket
|
test
|
def server_socket(host, port, backlog=10):
"""Create a TCP listening socket for a server."""
addr = socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM)
if not addr:
raise Exception("Could not translate address '%s:%s'"
% (host, str(port)))
my_socket = socket.socket(addr[0][0], addr[0][1], addr[0][2])
my_socket.setblocking(0) # 0=non-blocking
try:
my_socket.bind(addr[0][4])
my_socket.listen(backlog)
except socket.error as e:
if e.errno != errno.EINPROGRESS:
raise
return my_socket
|
python
|
{
"resource": ""
}
|
q277867
|
Container.need_processing
|
test
|
def need_processing(self):
"""A utility to help determine which connections need
processing. Returns a triple of lists containing those connections that
0) need to read from the network, 1) need to write to the network, 2)
waiting for pending timers to expire. The timer list is sorted with
the connection next expiring at index 0.
"""
readers = []
writers = []
timer_heap = []
for c in iter(self._connections.values()):
if c.needs_input > 0:
readers.append(c)
if c.has_output > 0:
writers.append(c)
if c.deadline:
heapq.heappush(timer_heap, (c.next_tick, c))
timers = []
while timer_heap:
x = heapq.heappop(timer_heap)
timers.append(x[1])
return (readers, writers, timers)
|
python
|
{
"resource": ""
}
|
q277868
|
Connection._not_reentrant
|
test
|
def _not_reentrant(func):
"""Decorator that prevents callbacks from calling into methods that are
not reentrant
"""
def wrap(self, *args, **kws):
if self._callback_lock and self._callback_lock.in_callback:
m = "Connection %s cannot be invoked from a callback!" % func
raise RuntimeError(m)
return func(self, *args, **kws)
return wrap
|
python
|
{
"resource": ""
}
|
q277869
|
Connection.process
|
test
|
def process(self, now):
"""Perform connection state processing."""
if self._pn_connection is None:
LOG.error("Connection.process() called on destroyed connection!")
return 0
# do nothing until the connection has been opened
if self._pn_connection.state & proton.Endpoint.LOCAL_UNINIT:
return 0
if self._pn_sasl and not self._sasl_done:
# wait until SASL has authenticated
if (_PROTON_VERSION < (0, 10)):
if self._pn_sasl.state not in (proton.SASL.STATE_PASS,
proton.SASL.STATE_FAIL):
LOG.debug("SASL in progress. State=%s",
str(self._pn_sasl.state))
if self._handler:
with self._callback_lock:
self._handler.sasl_step(self, self._pn_sasl)
return self._next_deadline
self._sasl_done = True
if self._handler:
with self._callback_lock:
self._handler.sasl_done(self, self._pn_sasl,
self._pn_sasl.outcome)
else:
if self._pn_sasl.outcome is not None:
self._sasl_done = True
if self._handler:
with self._callback_lock:
self._handler.sasl_done(self, self._pn_sasl,
self._pn_sasl.outcome)
# process timer events:
timer_deadline = self._expire_timers(now)
transport_deadline = self._pn_transport.tick(now)
if timer_deadline and transport_deadline:
self._next_deadline = min(timer_deadline, transport_deadline)
else:
self._next_deadline = timer_deadline or transport_deadline
# process events from proton:
pn_event = self._pn_collector.peek()
while pn_event:
# LOG.debug("pn_event: %s received", pn_event.type)
if _Link._handle_proton_event(pn_event, self):
pass
elif self._handle_proton_event(pn_event):
pass
elif _SessionProxy._handle_proton_event(pn_event, self):
pass
self._pn_collector.pop()
pn_event = self._pn_collector.peek()
# check for connection failure after processing all pending
# engine events:
if self._error:
if self._handler:
# nag application until connection is destroyed
self._next_deadline = now
with self._callback_lock:
self._handler.connection_failed(self, self._error)
elif (self._endpoint_state == self._CLOSED and
self._read_done and self._write_done):
# invoke closed callback after endpoint has fully closed and
# all pending I/O has completed:
if self._handler:
with self._callback_lock:
self._handler.connection_closed(self)
return self._next_deadline
|
python
|
{
"resource": ""
}
|
q277870
|
Connection.output_data
|
test
|
def output_data(self):
"""Get a buffer of data that needs to be written to the network.
"""
c = self.has_output
if c <= 0:
return None
try:
buf = self._pn_transport.peek(c)
except Exception as e:
self._connection_failed(str(e))
return None
return buf
|
python
|
{
"resource": ""
}
|
q277871
|
Connection.create_sender
|
test
|
def create_sender(self, source_address, target_address=None,
event_handler=None, name=None, properties=None):
"""Factory method for Sender links."""
ident = name or str(source_address)
if ident in self._sender_links:
raise KeyError("Sender %s already exists!" % ident)
session = _SessionProxy("session-%s" % ident, self)
session.open()
sl = session.new_sender(ident)
sl.configure(target_address, source_address, event_handler, properties)
self._sender_links[ident] = sl
return sl
|
python
|
{
"resource": ""
}
|
q277872
|
Connection.reject_sender
|
test
|
def reject_sender(self, link_handle, pn_condition=None):
"""Rejects the SenderLink, and destroys the handle."""
link = self._sender_links.get(link_handle)
if not link:
raise Exception("Invalid link_handle: %s" % link_handle)
link.reject(pn_condition)
# note: normally, link.destroy() cannot be called from a callback,
# but this link was never made available to the application so this
# link is only referenced by the connection
link.destroy()
|
python
|
{
"resource": ""
}
|
q277873
|
Connection.create_receiver
|
test
|
def create_receiver(self, target_address, source_address=None,
event_handler=None, name=None, properties=None):
"""Factory method for creating Receive links."""
ident = name or str(target_address)
if ident in self._receiver_links:
raise KeyError("Receiver %s already exists!" % ident)
session = _SessionProxy("session-%s" % ident, self)
session.open()
rl = session.new_receiver(ident)
rl.configure(target_address, source_address, event_handler, properties)
self._receiver_links[ident] = rl
return rl
|
python
|
{
"resource": ""
}
|
q277874
|
Connection._connection_failed
|
test
|
def _connection_failed(self, error="Error not specified!"):
"""Clean up after connection failure detected."""
if not self._error:
LOG.error("Connection failed: %s", str(error))
self._error = error
|
python
|
{
"resource": ""
}
|
q277875
|
Connection._ep_active
|
test
|
def _ep_active(self):
"""Both ends of the Endpoint have become active."""
LOG.debug("Connection is up")
if self._handler:
with self._callback_lock:
self._handler.connection_active(self)
|
python
|
{
"resource": ""
}
|
q277876
|
Connection._ep_need_close
|
test
|
def _ep_need_close(self):
"""The remote has closed its end of the endpoint."""
LOG.debug("Connection remotely closed")
if self._handler:
cond = self._pn_connection.remote_condition
with self._callback_lock:
self._handler.connection_remote_closed(self, cond)
|
python
|
{
"resource": ""
}
|
q277877
|
Connection._ep_error
|
test
|
def _ep_error(self, error):
"""The endpoint state machine failed due to protocol error."""
super(Connection, self)._ep_error(error)
self._connection_failed("Protocol error occurred.")
|
python
|
{
"resource": ""
}
|
q277878
|
twilio_view
|
test
|
def twilio_view(f):
"""This decorator provides several helpful shortcuts for writing Twilio
views.
- It ensures that only requests from Twilio are passed through. This
helps protect you from forged requests.
- It ensures your view is exempt from CSRF checks via Django's
@csrf_exempt decorator. This is necessary for any view that accepts
POST requests from outside the local domain (eg: Twilio's servers).
- It allows your view to (optionally) return TwiML to pass back to
Twilio's servers instead of building a ``HttpResponse`` object
manually.
- It allows your view to (optionally) return any ``twilio.Verb`` object
instead of building a ``HttpResponse`` object manually.
Usage::
from twilio.twiml import Response
@twilio_view
def my_view(request):
r = Response()
r.sms("Thanks for the SMS message!")
return r
"""
@csrf_exempt
@wraps(f)
def decorator(request, *args, **kwargs):
# Attempt to gather all required information to allow us to check the
# incoming HTTP request for forgery. If any of this information is not
# available, then we'll throw a HTTP 403 error (forbidden).
# Ensure the request method is POST
if request.method != "POST":
logger.error("Twilio: Expected POST request", extra={"request": request})
return HttpResponseNotAllowed(request.method)
if not getattr(settings, "TWILIO_SKIP_SIGNATURE_VALIDATION"):
# Validate the request
try:
validator = RequestValidator(settings.TWILIO_AUTH_TOKEN)
url = request.build_absolute_uri()
# Ensure the original requested url is tested for validation
# Prevents breakage when processed behind a proxy server
if "HTTP_X_FORWARDED_SERVER" in request.META:
protocol = "https" if request.META["HTTP_X_TWILIO_SSL"] == "Enabled" else "http"
url = "{0}://{1}{2}".format(
protocol, request.META["HTTP_X_FORWARDED_SERVER"], request.META["REQUEST_URI"]
)
signature = request.META["HTTP_X_TWILIO_SIGNATURE"]
except (AttributeError, KeyError) as e:
logger.exception("Twilio: Missing META param", extra={"request": request})
return HttpResponseForbidden("Missing META param: %s" % e)
# Now that we have all the required information to perform forgery
# checks, we'll actually do the forgery check.
if not validator.validate(url, request.POST, signature):
logger.error(
"Twilio: Invalid url signature %s - %s - %s",
url, request.POST, signature, extra={"request": request}
)
return HttpResponseForbidden("Invalid signature")
# Run the wrapped view, and capture the data returned.
response = f(request, *args, **kwargs)
# If the view returns a string (or a ``twilio.Verb`` object), we'll
# assume it is XML TwilML data and pass it back with the appropriate
# mimetype. We won't check the XML data because that would be too time
# consuming for every request. Instead, we'll let the errors pass
# through to be dealt with by the developer.
if isinstance(response, six.text_type):
return HttpResponse(response, mimetype="application/xml")
elif isinstance(response, Verb):
return HttpResponse(force_text(response), mimetype="application/xml")
else:
return response
return decorator
|
python
|
{
"resource": ""
}
|
q277879
|
PDFColor._get_color_string
|
test
|
def _get_color_string(self):
"""Adobe output string for defining colors"""
s = ''
if self.color_type == 'd':
if self.name is "black":
s = '%.3f G' % 0
else:
s = '%.3f %.3f %.3f RG' % (
self.red / 255.0, self.green / 255.0, self.blue / 255.0)
elif self.color_type == 'f' or self.color_type == 't':
if self.name is "black":
s = '%.3f g' % 0
else:
s = '%.3f %.3f %.3f rg' % (
self.red / 255.0, self.green / 255.0, self.blue / 255.0)
return s
|
python
|
{
"resource": ""
}
|
q277880
|
FontLoader.get_ttf
|
test
|
def get_ttf(self):
""" Given a search path, find file with requested extension """
font_dict = {}
families = []
rootdirlist = string.split(self.search_path, os.pathsep)
#for rootdir in rootdirlist:
# rootdir = os.path.expanduser(rootdir)
for dirName, subdirList, filelist in itertools.chain.from_iterable(os.walk(path) for path in rootdirlist):
for item in filelist:
root, ext = os.path.splitext(item)
if ext == '.ttf':
if root[0].lower() in english:
source = os.path.join(dirName, item)
name = root.lower().replace('_', ' ')
if ' bold' in name:
name = name.replace(' bold', '_bold')
if ' italic' in name:
name = name.replace(' italic', '_italic')
elif 'bold' in name:
name = name.replace('bold', '_bold')
if 'italic' in name:
name = name.replace('italic', '_italic')
elif ' italic' in name:
name = name.replace(' italic', '_italic')
elif 'italic' in name:
name = name.replace('italic', '_italic')
elif 'oblique' in name:
name = name.replace('oblique', '_italic')
else:
families.append(name)
font_dict[name] = source
else:
source = os.path.join(dirName, item)
name = root.lower().replace('_', ' ')
font_dict[name] = source
families.append(name)
self.font_dict = font_dict
self.families = families
|
python
|
{
"resource": ""
}
|
q277881
|
_Session._set_compression
|
test
|
def _set_compression(self, value):
""" May be used to compress PDF files. Code is more readable
for testing and inspection if not compressed. Requires a boolean. """
if isinstance(value, bool):
self.compression = value
else:
raise Exception(
TypeError, "%s is not a valid option for compression" % value)
|
python
|
{
"resource": ""
}
|
q277882
|
_Session._add_object
|
test
|
def _add_object(self, flag=None):
""" The flag is a simple integer to force the placement
of the object into position in the object array.
Used for overwriting the placeholder objects.
"""
self.offset = len(self.buffer)
if flag is None:
objnum = len(self.objects)
obj = _PDFObject(objnum, self.offset)
self.objects.append(obj)
else:
objnum = flag
obj = _PDFObject(objnum, self.offset)
self.objects[flag] = obj
self._out(str(objnum) + ' 0 obj')
return obj
|
python
|
{
"resource": ""
}
|
q277883
|
_Session._out
|
test
|
def _out(self, stream, page=None):
""" Stores the pdf code in a buffer. If it is page related,
provide the page object.
"""
if page is not None:
page.buffer += str(stream) + "\n"
else:
self.buffer += str(stream) + "\n"
|
python
|
{
"resource": ""
}
|
q277884
|
_Session._put_stream
|
test
|
def _put_stream(self, stream):
""" Creates a PDF text stream sandwich.
"""
self._out('stream')
self._out(stream)
self._out('endstream')
|
python
|
{
"resource": ""
}
|
q277885
|
_Session._add_page
|
test
|
def _add_page(self, text):
""" Helper function for PDFText, to have the document
add a page, and retry adding a large block of
text that would otherwise have been to long for the
page.
"""
save_cursor = self.parent.document.page.cursor.copy()
save_cursor.x_reset()
save_cursor.y_reset()
self.parent.document.add_page()
self.parent.document.set_cursor(save_cursor)
self.parent.document.add_text(text)
|
python
|
{
"resource": ""
}
|
q277886
|
PDFDocument._set_color_scheme
|
test
|
def _set_color_scheme(self, draw_color=None, fill_color=None, text_color=None):
""" Default color object is black letters
& black lines."""
if draw_color is None:
draw_color = PDFColor()
draw_color._set_type('d')
if fill_color is None:
fill_color = PDFColor()
fill_color._set_type('f')
if text_color is None:
text_color = PDFColor()
text_color._set_type('t')
self.draw_color = draw_color
self.fill_color = fill_color
self.text_color = text_color
|
python
|
{
"resource": ""
}
|
q277887
|
PDFDocument._set_default_font
|
test
|
def _set_default_font(self):
""" Internal method to set the initial default font. Change
the font using set_font method."""
self.font = PDFFont(self.session)
self.font._set_index()
self.fonts.append(self.font)
self.fontkeys.append(self.font.font_key)
|
python
|
{
"resource": ""
}
|
q277888
|
PDFDocument.add_page
|
test
|
def add_page(self, page=None):
""" May generate and add a PDFPage separately, or use this to generate
a default page."""
if page is None:
self.page = PDFPage(self.orientation_default, self.layout_default, self.margins)
else:
self.page = page
self.page._set_index(len(self.pages))
self.pages.append(self.page)
currentfont = self.font
self.set_font(font=currentfont)
self.session._reset_colors()
|
python
|
{
"resource": ""
}
|
q277889
|
PDFDocument.set_font_size
|
test
|
def set_font_size(self, size):
"""Convenience method for just changing font size."""
if self.font.font_size == size:
pass
else:
self.font._set_size(size)
|
python
|
{
"resource": ""
}
|
q277890
|
PDFDocument.add_text
|
test
|
def add_text(self, text, cursor=None, justification=None):
""" Input text, short or long. Writes in order, within the defined page boundaries. Sequential add_text commands will print without
additional whitespace. """
if cursor is None:
cursor = self.page.cursor
text = re.sub("\s\s+" , " ", text)
if justification is None:
justification = self.justification
if '\n' in text:
text_list = text.split('\n')
for text in text_list:
PDFText(self.session, self.page, text, self.font, self.text_color, cursor, justification, self.double_spacing)
self.add_newline()
else:
PDFText(self.session, self.page, text, self.font, self.text_color, cursor, justification, self.double_spacing)
|
python
|
{
"resource": ""
}
|
q277891
|
PDFDocument.add_newline
|
test
|
def add_newline(self, number=1):
""" Starts over again at the new line. If number is specified,
it will leave multiple lines."""
if isinstance(number, int):
try:
self.page._add_newline(self.font, number, self.double_spacing)
except ValueError:
self.add_page()
else:
raise TypeError("Number of newlines must be an integer.")
|
python
|
{
"resource": ""
}
|
q277892
|
PDFDocument.add_pie_chart
|
test
|
def add_pie_chart(self, data, cursor, width, height, title=None, data_type="raw", fill_colors=None, labels=False, background=None, legend=None):
""" Data type may be "raw" or "percent" """
save_draw_color = self.draw_color
save_fill_color = self.fill_color
chart = PDFPieChart(self.session, self.page, data, cursor, width, height, title, data_type, fill_colors, labels, background, legend)
self.set_draw_color(save_draw_color)
self.set_fill_color(save_fill_color)
|
python
|
{
"resource": ""
}
|
q277893
|
PDFDocument._output_pages
|
test
|
def _output_pages(self):
""" Called by the PDFLite object to prompt creating
the page objects."""
if not self.orientation_changes:
self._get_orientation_changes()
# Page
for page in self.pages:
obj = self.session._add_object()
self.session._out('<</Type /Page')
self.session._out('/Parent 1 0 R')
if self.orientation_changes:
self.session._out('/MediaBox [0 0 %.2f %.2f]' % (page.width, page.height))
self.session._out('/Resources 2 0 R')
self.session._out('/Group <</Type /Group /S /Transparency /CS /DeviceRGB>>')
self.session._out('/Contents %s 0 R>>' % (obj.id + 1))
self.session._out('endobj')
# Page content
self.session._add_object()
if self.session.compression is True:
textfilter = ' /Filter /FlateDecode '
page._compress()
else:
textfilter = ''
self.session._out('<<%s/Length %s >>' % (textfilter, len(page.buffer)))
self.session._put_stream(page.buffer)
self.session._out('endobj')
|
python
|
{
"resource": ""
}
|
q277894
|
PDFDocument._get_orientation_changes
|
test
|
def _get_orientation_changes(self):
""" Returns a list of the pages that have
orientation changes."""
self.orientation_changes = []
for page in self.pages:
if page.orientation_change is True:
self.orientation_changes.append(page.index)
else:
pass
return self.orientation_changes
|
python
|
{
"resource": ""
}
|
q277895
|
PDFDocument._output_fonts
|
test
|
def _output_fonts(self):
""" Called by the PDFLite object to prompt creating
the font objects."""
self.session._save_object_number()
self._output_encoding_diffs()
self._output_font_files()
for font in self.fonts:
obj = self.session._add_object()
font._set_number(obj.id)
font._output()
|
python
|
{
"resource": ""
}
|
q277896
|
PDFDocument._output_images
|
test
|
def _output_images(self):
""" Creates reference images, that can be
drawn throughout the document."""
for image in self.images:
obj = self.session._add_object()
image._set_number(obj.id)
image._output()
|
python
|
{
"resource": ""
}
|
q277897
|
PDFImage._output
|
test
|
def _output(self):
""" Prompts the creating of image objects.
"""
self.session._out('<</Type /XObject')
self.session._out('/Subtype /Image')
self.session._out('/Width %s' % self.width)
self.session._out('/Height %s' % self.height)
if self.colorspace is 'Indexed':
self.session._out('/ColorSpace [/Indexed /DeviceRGB %s %s 0 R' %
(self.pal, self.number + 1))
else:
self.session._out('/ColorSpace /%s' % self.colorspace)
if self.colorspace is 'DeviceCMYK':
self.session._out('/Decode [1 0 1 0 1 0 1 0]')
self.session._out('/BitsPerComponent %s' % self.bits_per_component)
if self.filter:
self.session._out('/Filter /%s' % self.filter)
if self.decode:
self.session._out('/DecodeParms << %s >>' % self.decode)
if self.transparent:
self.session._out('/Mask [%s]' % self.transparent_string)
if self.soft_mask:
self.session._out('/SMask %s 0 R' % (self.number + 1))
self.session._out('/Length %s >>' % self.size)
self.session._put_stream(self.image_data)
self.session._out('endobj')
if self.colorspace is 'Indexed':
self.session._out('<<%s /Length %s >>' % (self.palette_filter, self.palette_length))
self.session._put_stream(self.palette)
self.session._out('endobj')
if isinstance(self.soft_mask, PDFImage):
obj = self.session._add_object()
self.soft_mask._set_number(obj.id)
self.soft_mask._output()
|
python
|
{
"resource": ""
}
|
q277898
|
PDFTransform.transform
|
test
|
def transform(self, a, b, c, d, e, f):
""" Adjust the current transformation state of the current graphics state
matrix. Not recommended for the faint of heart.
"""
a0, b0, c0, d0, e0, f0 = self._currentMatrix
self._currentMatrix = (a0 * a + c0 * b, b0 * a + d0 * b,
a0 * c + c0 * d, b0 * c + d0 * d,
a0 * e + c0 * f + e0, b0 * e + d0 * f + f0)
a1, b1, c1, d1, e1, f1 = self._currentMatrix
self.session._out('%.2f %.2f %.2f %.2f %.2f %.2f cm' % (a1, b1, c1, d1, e1, f1), self.page)
|
python
|
{
"resource": ""
}
|
q277899
|
PDFTransform.absolute_position
|
test
|
def absolute_position(self, x, y):
"""return the absolute position of x,y in user space w.r.t. default user space"""
(a, b, c, d, e, f) = self._currentMatrix
xp = a * x + c * y + e
yp = b * x + d * y + f
return xp, yp
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.