text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Shortcut for making a response to the client's request.
<END_TASK>
<USER_TASK:>
Description:
def make_response(self, status, content_type, response):
"""Shortcut for making a response to the client's request.""" |
headers = [('Access-Control-Allow-Origin', '*'),
('Access-Control-Allow-Methods', 'GET, POST, OPTIONS'),
('Access-Control-Allow-Headers', 'Content-Type'),
('Access-Control-Max-Age', '86400'),
('Content-type', content_type)
]
self.start_response(status, headers)
return [response.encode()] |
<SYSTEM_TASK:>
Extracts the request, feeds the module, and returns the response.
<END_TASK>
<USER_TASK:>
Description:
def on_post(self):
"""Extracts the request, feeds the module, and returns the response.""" |
request = self.environ['wsgi.input']
try:
return self.process_request(request)
except ClientError as exc:
return self.on_client_error(exc)
except BadGateway as exc:
return self.on_bad_gateway(exc)
except InvalidConfig:
raise
except Exception as exc: # pragma: no cover # pylint: disable=W0703
logging.error('Unknown exception: ', exc_info=exc)
return self.on_internal_error() |
<SYSTEM_TASK:>
Handles dispatching of the request.
<END_TASK>
<USER_TASK:>
Description:
def dispatch(self):
"""Handles dispatching of the request.""" |
method_name = 'on_' + self.environ['REQUEST_METHOD'].lower()
method = getattr(self, method_name, None)
if method:
return method()
else:
return self.on_bad_method() |
<SYSTEM_TASK:>
Tuple containing the contents of the Block.
<END_TASK>
<USER_TASK:>
Description:
def serialised( self ):
"""Tuple containing the contents of the Block.""" |
klass = self.__class__
return ((klass.__module__, klass.__name__), tuple( (name, field.serialise( self._field_data[name], parent=self ) ) for name, field in klass._fields.items())) |
<SYSTEM_TASK:>
Clone data from another Block.
<END_TASK>
<USER_TASK:>
Description:
def clone_data( self, source ):
"""Clone data from another Block.
source
Block instance to copy from.
""" |
klass = self.__class__
assert isinstance( source, klass )
for name in klass._fields:
self._field_data[name] = getattr( source, name ) |
<SYSTEM_TASK:>
Import data from a byte array.
<END_TASK>
<USER_TASK:>
Description:
def import_data( self, raw_buffer ):
"""Import data from a byte array.
raw_buffer
Byte array to import from.
""" |
klass = self.__class__
if raw_buffer:
assert common.is_bytes( raw_buffer )
# raw_buffer = memoryview( raw_buffer )
self._field_data = {}
for name in klass._fields:
if raw_buffer:
self._field_data[name] = klass._fields[name].get_from_buffer(
raw_buffer, parent=self
)
else:
self._field_data[name] = klass._fields[name].default
if raw_buffer:
for name, check in klass._checks.items():
check.check_buffer( raw_buffer, parent=self )
# if we have debug logging on, check the roundtrip works
if logger.isEnabledFor( logging.INFO ):
test = self.export_data()
if logger.getEffectiveLevel() <= logging.DEBUG:
logger.debug( 'Stats for {}:'.format( self ) )
logger.debug( 'Import buffer size: {}'.format( len( raw_buffer ) ) )
logger.debug( 'Export size: {}'.format( len( test ) ) )
if test == raw_buffer:
logger.debug( 'Content: exact match!' )
elif test == raw_buffer[:len( test )]:
logger.debug( 'Content: partial match!' )
else:
logger.debug( 'Content: different!' )
for x in utils.hexdump_diff_iter( raw_buffer[:len( test )], test ):
logger.debug( x )
elif test != raw_buffer[:len( test )]:
logger.info( '{} export produced changed output from import'.format( self ) )
# if raw_buffer:
# raw_buffer.release()
return |
<SYSTEM_TASK:>
Export data to a byte array.
<END_TASK>
<USER_TASK:>
Description:
def export_data( self ):
"""Export data to a byte array.""" |
klass = self.__class__
output = bytearray( b'\x00'*self.get_size() )
# prevalidate all data before export.
# this is important to ensure that any dependent fields
# are updated beforehand, e.g. a count referenced
# in a BlockField
queue = []
for name in klass._fields:
self.scrub_field( name )
self.validate_field( name )
self.update_deps()
for name in klass._fields:
klass._fields[name].update_buffer_with_value(
self._field_data[name], output, parent=self
)
for name, check in klass._checks.items():
check.update_buffer( output, parent=self )
return output |
<SYSTEM_TASK:>
Update dependencies on all the fields on this Block instance.
<END_TASK>
<USER_TASK:>
Description:
def update_deps( self ):
"""Update dependencies on all the fields on this Block instance.""" |
klass = self.__class__
for name in klass._fields:
self.update_deps_on_field( name )
return |
<SYSTEM_TASK:>
Validate all the fields on this Block instance.
<END_TASK>
<USER_TASK:>
Description:
def validate( self ):
"""Validate all the fields on this Block instance.""" |
klass = self.__class__
for name in klass._fields:
self.validate_field( name )
return |
<SYSTEM_TASK:>
Save the GADDAG to file.
<END_TASK>
<USER_TASK:>
Description:
def save(self, path, compressed=True, exist_ok=False):
"""
Save the GADDAG to file.
Args:
path: path to save the GADDAG to.
compressed: compress the saved GADDAG using gzip.
exist_ok: overwrite existing file at `path`.
""" |
path = os.path.expandvars(os.path.expanduser(path))
if os.path.isfile(path) and not exist_ok:
raise OSError(17, os.strerror(17), path)
if os.path.isdir(path):
path = os.path.join(path, "out.gdg")
if compressed:
bytes_written = cgaddag.gdg_save_compressed(self.gdg, path.encode("ascii"))
else:
bytes_written = cgaddag.gdg_save(self.gdg, path.encode("ascii"))
if bytes_written == -1:
errno = ctypes.c_int.in_dll(ctypes.pythonapi, "errno").value
raise OSError(errno, os.strerror(errno), path)
return bytes_written |
<SYSTEM_TASK:>
Load a GADDAG from file, replacing the words currently in this GADDAG.
<END_TASK>
<USER_TASK:>
Description:
def load(self, path):
"""
Load a GADDAG from file, replacing the words currently in this GADDAG.
Args:
path: path to saved GADDAG to be loaded.
""" |
path = os.path.expandvars(os.path.expanduser(path))
gdg = cgaddag.gdg_load(path.encode("ascii"))
if not gdg:
errno = ctypes.c_int.in_dll(ctypes.pythonapi, "errno").value
raise OSError(errno, os.strerror(errno), path)
self.__del__()
self.gdg = gdg.contents |
<SYSTEM_TASK:>
Find all words starting with a prefix.
<END_TASK>
<USER_TASK:>
Description:
def starts_with(self, prefix):
"""
Find all words starting with a prefix.
Args:
prefix: A prefix to be searched for.
Returns:
A list of all words found.
""" |
prefix = prefix.lower()
found_words = []
res = cgaddag.gdg_starts_with(self.gdg, prefix.encode(encoding="ascii"))
tmp = res
while tmp:
word = tmp.contents.str.decode("ascii")
found_words.append(word)
tmp = tmp.contents.next
cgaddag.gdg_destroy_result(res)
return found_words |
<SYSTEM_TASK:>
Find all words containing a substring.
<END_TASK>
<USER_TASK:>
Description:
def contains(self, sub):
"""
Find all words containing a substring.
Args:
sub: A substring to be searched for.
Returns:
A list of all words found.
""" |
sub = sub.lower()
found_words = set()
res = cgaddag.gdg_contains(self.gdg, sub.encode(encoding="ascii"))
tmp = res
while tmp:
word = tmp.contents.str.decode("ascii")
found_words.add(word)
tmp = tmp.contents.next
cgaddag.gdg_destroy_result(res)
return list(found_words) |
<SYSTEM_TASK:>
Find all words ending with a suffix.
<END_TASK>
<USER_TASK:>
Description:
def ends_with(self, suffix):
"""
Find all words ending with a suffix.
Args:
suffix: A suffix to be searched for.
Returns:
A list of all words found.
""" |
suffix = suffix.lower()
found_words = []
res = cgaddag.gdg_ends_with(self.gdg, suffix.encode(encoding="ascii"))
tmp = res
while tmp:
word = tmp.contents.str.decode("ascii")
found_words.append(word)
tmp = tmp.contents.next
cgaddag.gdg_destroy_result(res)
return found_words |
<SYSTEM_TASK:>
Add a word to the GADDAG.
<END_TASK>
<USER_TASK:>
Description:
def add_word(self, word):
"""
Add a word to the GADDAG.
Args:
word: A word to be added to the GADDAG.
""" |
word = word.lower()
if not (word.isascii() and word.isalpha()):
raise ValueError("Invalid character in word '{}'".format(word))
word = word.encode(encoding="ascii")
result = cgaddag.gdg_add_word(self.gdg, word)
if result == 1:
raise ValueError("Invalid character in word '{}'".format(word))
elif result == 2:
raise MemoryError("Out of memory, GADDAG is in an undefined state") |
<SYSTEM_TASK:>
Similar to format, but takes additional reserved params to promote logging best-practices
<END_TASK>
<USER_TASK:>
Description:
def formatLog(source="", level="", title="", data={}):
""" Similar to format, but takes additional reserved params to promote logging best-practices
:param level - severity of message - how bad is it?
:param source - application context - where did it come from?
:param title - brief description - what kind of event happened?
:param data - additional information - what details help to investigate?
""" |
# consistently output empty string for unset params, because null values differ by language
source = "" if source is None else source
level = "" if level is None else level
title = "" if title is None else title
if not type(data) is dict:
data = {}
data['source'] = source
data['level'] = level
data['title'] = title
return format(data) |
<SYSTEM_TASK:>
Convert a string to a list with sanitization.
<END_TASK>
<USER_TASK:>
Description:
def _str_to_list(value, separator):
"""Convert a string to a list with sanitization.""" |
value_list = [item.strip() for item in value.split(separator)]
value_list_sanitized = builtins.list(filter(None, value_list))
if len(value_list_sanitized) > 0:
return value_list_sanitized
else:
raise ValueError('Invalid list variable.') |
<SYSTEM_TASK:>
Write a raw env value.
<END_TASK>
<USER_TASK:>
Description:
def write(name, value):
"""Write a raw env value.
A ``None`` value clears the environment variable.
Args:
name: The environment variable name
value: The value to write
""" |
if value is not None:
environ[name] = builtins.str(value)
elif environ.get(name):
del environ[name] |
<SYSTEM_TASK:>
Read the raw env value.
<END_TASK>
<USER_TASK:>
Description:
def read(name, default=None, allow_none=False, fallback=None):
"""Read the raw env value.
Read the raw environment variable or use the default. If the value is not
found and no default is set throw an exception.
Args:
name: The environment variable name
default: The default value to use if no environment variable is found
allow_none: If the return value can be `None` (i.e. optional)
fallback: A list of fallback env variables to try and read if the primary environment
variable is unavailable.
""" |
raw_value = environ.get(name)
if raw_value is None and fallback is not None:
if not isinstance(fallback, builtins.list) and not isinstance(fallback, builtins.tuple):
fallback = [fallback]
for fall in fallback:
raw_value = environ.get(fall)
if raw_value is not None:
break
if raw_value or raw_value == '':
return raw_value
elif default is not None or allow_none:
return default
else:
raise KeyError('Set the "{0}" environment variable'.format(name)) |
<SYSTEM_TASK:>
Get a string based environment value or the default.
<END_TASK>
<USER_TASK:>
Description:
def str(name, default=None, allow_none=False, fallback=None):
"""Get a string based environment value or the default.
Args:
name: The environment variable name
default: The default value to use if no environment variable is found
allow_none: If the return value can be `None` (i.e. optional)
""" |
value = read(name, default, allow_none, fallback=fallback)
if value is None and allow_none:
return None
else:
return builtins.str(value).strip() |
<SYSTEM_TASK:>
Get a boolean based environment value or the default.
<END_TASK>
<USER_TASK:>
Description:
def bool(name, default=None, allow_none=False, fallback=None):
"""Get a boolean based environment value or the default.
Args:
name: The environment variable name
default: The default value to use if no environment variable is found
allow_none: If the return value can be `None` (i.e. optional)
""" |
value = read(name, default, allow_none, fallback=fallback)
if isinstance(value, builtins.bool):
return value
elif isinstance(value, builtins.int):
return True if value > 0 else False
elif value is None and allow_none:
return None
else:
value_str = builtins.str(value).lower().strip()
return _strtobool(value_str) |
<SYSTEM_TASK:>
Get a string environment value or the default.
<END_TASK>
<USER_TASK:>
Description:
def int(name, default=None, allow_none=False, fallback=None):
"""Get a string environment value or the default.
Args:
name: The environment variable name
default: The default value to use if no environment variable is found
allow_none: If the return value can be `None` (i.e. optional)
""" |
value = read(name, default, allow_none, fallback=fallback)
if isinstance(value, builtins.str):
value = value.strip()
if value is None and allow_none:
return None
else:
return builtins.int(value) |
<SYSTEM_TASK:>
Get a list of strings or the default.
<END_TASK>
<USER_TASK:>
Description:
def list(name, default=None, allow_none=False, fallback=None, separator=','):
"""Get a list of strings or the default.
The individual list elements are whitespace-stripped.
Args:
name: The environment variable name
default: The default value to use if no environment variable is found
allow_none: If the return value can be `None` (i.e. optional)
separator: The list item separator character or pattern
""" |
value = read(name, default, allow_none, fallback=fallback)
if isinstance(value, builtins.list):
return value
elif isinstance(value, builtins.str):
return _str_to_list(value, separator)
elif value is None and allow_none:
return None
else:
return [builtins.str(value)] |
<SYSTEM_TASK:>
this function adds some configuration for the application
<END_TASK>
<USER_TASK:>
Description:
def includeme(config):
"""this function adds some configuration for the application""" |
config.add_route('references', '/references')
_add_referencer(config.registry)
config.add_view_deriver(protected_resources.protected_view)
config.add_renderer('json_item', json_renderer)
config.scan() |
<SYSTEM_TASK:>
Gets the Referencer from config and adds it to the registry.
<END_TASK>
<USER_TASK:>
Description:
def _add_referencer(registry):
"""
Gets the Referencer from config and adds it to the registry.
""" |
referencer = registry.queryUtility(IReferencer)
if referencer is not None:
return referencer
ref = registry.settings['urireferencer.referencer']
url = registry.settings['urireferencer.registry_url']
r = DottedNameResolver()
registry.registerUtility(r.resolve(ref)(url), IReferencer)
return registry.queryUtility(IReferencer) |
<SYSTEM_TASK:>
Get the referencer class
<END_TASK>
<USER_TASK:>
Description:
def get_referencer(registry):
"""
Get the referencer class
:rtype: pyramid_urireferencer.referencer.AbstractReferencer
""" |
# Argument might be a config or request
regis = getattr(registry, 'registry', None)
if regis is None:
regis = registry
return regis.queryUtility(IReferencer) |
<SYSTEM_TASK:>
Clear the layout off all its components
<END_TASK>
<USER_TASK:>
Description:
def clear_layout(layout: QLayout) -> None:
"""Clear the layout off all its components""" |
if layout is not None:
while layout.count():
item = layout.takeAt(0)
widget = item.widget()
if widget is not None:
widget.deleteLater()
else:
clear_layout(item.layout()) |
<SYSTEM_TASK:>
Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its
<END_TASK>
<USER_TASK:>
Description:
def _get_hangul_syllable_type(hangul_syllable):
"""
Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its
Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard,
ch. 03, section 3.12, Conjoining Jamo Behavior.
https://www.unicode.org/versions/latest/ch03.pdf
:param hangul_syllable: Unicode scalar value representing a Hangul syllable
:return: Returns a string representing its Hangul_Syllable_Type property ("L", "V", "T", "LV" or "LVT")
""" |
if not _is_hangul_syllable(hangul_syllable):
raise ValueError("Value 0x%0.4x does not represent a Hangul syllable!" % hangul_syllable)
if not _hangul_syllable_types:
_load_hangul_syllable_types()
return _hangul_syllable_types[hangul_syllable] |
<SYSTEM_TASK:>
Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its
<END_TASK>
<USER_TASK:>
Description:
def _get_jamo_short_name(jamo):
"""
Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its
Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard,
ch. 03, section 3.12, Conjoining Jamo Behavior.
https://www.unicode.org/versions/latest/ch03.pdf
:param jamo: Unicode scalar value representing a Jamo
:return: Returns a string representing its Jamo_Short_Name property
""" |
if not _is_jamo(jamo):
raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo)
if not _jamo_short_names:
_load_jamo_short_names()
return _jamo_short_names[jamo] |
<SYSTEM_TASK:>
Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul
<END_TASK>
<USER_TASK:>
Description:
def compose_hangul_syllable(jamo):
"""
Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul
syllable. If the values in the list or tuple passed in are not in the ranges of Jamo, a ValueError will be raised.
The algorithm for doing the composition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo
Behavior."
Example: (U+1111, U+1171) -> U+D4CC
(U+D4CC, U+11B6) -> U+D4DB
(U+1111, U+1171, U+11B6) -> U+D4DB
:param jamo: Tuple of list of Jamo to compose
:return: Composed Hangul syllable
""" |
fmt_str_invalid_sequence = "{0} does not represent a valid sequence of Jamo!"
if len(jamo) == 3:
l_part, v_part, t_part = jamo
if not (l_part in range(0x1100, 0x1112 + 1) and
v_part in range(0x1161, 0x1175 + 1) and
t_part in range(0x11a8, 0x11c2 + 1)):
raise ValueError(fmt_str_invalid_sequence.format(jamo))
l_index = l_part - L_BASE
v_index = v_part - V_BASE
t_index = t_part - T_BASE
lv_index = l_index * N_COUNT + v_index * T_COUNT
return S_BASE + lv_index + t_index
elif len(jamo) == 2:
if jamo[0] in range(0x1100, 0x1112 + 1) and jamo[1] in range(0x1161, 0x1175 + 1):
l_part, v_part = jamo
l_index = l_part - L_BASE
v_index = v_part - V_BASE
lv_index = l_index * N_COUNT + v_index * T_COUNT
return S_BASE + lv_index
elif _get_hangul_syllable_type(jamo[0]) == "LV" and jamo[1] in range(0x11a8, 0x11c2 + 1):
lv_part, t_part = jamo
t_index = t_part - T_BASE
return lv_part + t_index
else:
raise ValueError(fmt_str_invalid_sequence.format(jamo))
else:
raise ValueError(fmt_str_invalid_sequence.format(jamo)) |
<SYSTEM_TASK:>
Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as
<END_TASK>
<USER_TASK:>
Description:
def _get_hangul_syllable_name(hangul_syllable):
"""
Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as
defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information.
:param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert
:return: String representing its syllable name as transformed according to naming rule NR1.
""" |
if not _is_hangul_syllable(hangul_syllable):
raise ValueError("Value passed in does not represent a Hangul syllable!")
jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True)
result = ''
for j in jamo:
if j is not None:
result += _get_jamo_short_name(j)
return result |
<SYSTEM_TASK:>
Return a dict mapping method names to anonymous functions that
<END_TASK>
<USER_TASK:>
Description:
def generate_client_callers(spec, timeout, error_callback, local, app):
"""Return a dict mapping method names to anonymous functions that
will call the server's endpoint of the corresponding name as
described in the api defined by the swagger dict and bravado spec""" |
callers_dict = {}
def mycallback(endpoint):
if not endpoint.handler_client:
return
callers_dict[endpoint.handler_client] = _generate_client_caller(spec, endpoint, timeout, error_callback, local, app)
spec.call_on_each_endpoint(mycallback)
return callers_dict |
<SYSTEM_TASK:>
Chop a stream of data into MODBUS packets.
<END_TASK>
<USER_TASK:>
Description:
def stream_to_packet(data):
"""
Chop a stream of data into MODBUS packets.
:param data: stream of data
:returns: a tuple of the data that is a packet with the remaining
data, or ``None``
""" |
if len(data) < 6:
return None
# unpack the length
pktlen = struct.unpack(">H", data[4:6])[0] + 6
if (len(data) < pktlen):
return None
return (data[:pktlen], data[pktlen:]) |
<SYSTEM_TASK:>
Convert a complex object into primitives.
<END_TASK>
<USER_TASK:>
Description:
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
""" |
# handle obvious types first - order of basic types determined by running
# full tests on nova project, resulting in the following counts:
# 572754 <type 'NoneType'>
# 460353 <type 'int'>
# 379632 <type 'unicode'>
# 274610 <type 'str'>
# 199918 <type 'dict'>
# 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
if isinstance(value, datetime.datetime):
if convert_datetime:
return timeutils.strtime(value)
else:
return value
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
if isinstance(value, dict):
return dict((k, recursive(v)) for k, v in six.iteritems(value))
elif isinstance(value, (list, tuple)):
return [recursive(lv) for lv in value]
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if xmlrpclib and isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif isinstance(value, gettextutils.Message):
return value.data
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return recursive(list(value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
elif netaddr and isinstance(value, netaddr.IPAddress):
return six.text_type(value)
else:
if any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return six.text_type(value) |
<SYSTEM_TASK:>
Creates a final report of all the ConFindr results
<END_TASK>
<USER_TASK:>
Description:
def confindr_reporter(self, analysistype='confindr'):
"""
Creates a final report of all the ConFindr results
""" |
# Initialise the data strings
data = 'Strain,Genus,NumContamSNVs,ContamStatus,PercentContam,PercentContamSTD\n'
with open(os.path.join(self.reportpath, analysistype + '.csv'), 'w') as report:
# Iterate through all the results
for sample in self.runmetadata.samples:
data += '{str},{genus},{numcontamsnv},{status},{pc},{pcs}\n'.format(
str=sample.name,
genus=sample.confindr.genus,
numcontamsnv=sample.confindr.num_contaminated_snvs,
status=sample.confindr.contam_status,
pc=sample.confindr.percent_contam,
pcs=sample.confindr.percent_contam_std
)
# Write the string to the report
report.write(data) |
<SYSTEM_TASK:>
Create final reports collating results from all the individual iterations through the method pipeline
<END_TASK>
<USER_TASK:>
Description:
def methodreporter(self):
"""
Create final reports collating results from all the individual iterations through the method pipeline
""" |
# Ensure that the analyses are set to complete
self.analysescomplete = True
# Reset the report path to original value
self.reportpath = os.path.join(self.path, 'reports')
# Clear the runmetadata - it will be populated with all the metadata from completemetadata
self.runmetadata = MetadataObject()
self.runmetadata.samples = list()
# As the samples were entered into self.completemetadata depending on when they passed the quality threshold,
# this list is not ordered numerically/alphabetically like the original runmetadata. Reset the order.
for strain in self.samples:
for sample in self.completemetadata:
if sample.name == strain:
# Append the sample to the ordered list of objects
self.runmetadata.samples.append(sample)
# Create the reports
self.reporter()
self.genusspecific()
self.sixteensreporter()
self.gdcsreporter()
self.confindr_reporter() |
<SYSTEM_TASK:>
Run the methods required to create the genesippr report summary image
<END_TASK>
<USER_TASK:>
Description:
def main(self):
"""
Run the methods required to create the genesippr report summary image
""" |
self.dataframe_setup()
self.figure_populate(self.outputfolder,
self.image_report,
self.header_list,
self.samples,
'genesippr',
'report',
fail=self.fail) |
<SYSTEM_TASK:>
Tells if there is a remote directory at the given location.
<END_TASK>
<USER_TASK:>
Description:
def dir_exists(location, use_sudo=False):
"""Tells if there is a remote directory at the given location.""" |
with settings(hide('running', 'stdout', 'stderr'), warn_only=True):
if use_sudo:
# convert return code 0 to True
return not bool(sudo('test -d %s' % (location)).return_code)
else:
return not bool(run('test -d %s' % (location)).return_code) |
<SYSTEM_TASK:>
allow sudo calls through ssh without a tty
<END_TASK>
<USER_TASK:>
Description:
def disable_requiretty_on_sudoers(log=False):
""" allow sudo calls through ssh without a tty """ |
if log:
bookshelf2.logging_helpers.log_green(
'disabling requiretty on sudo calls')
comment_line('/etc/sudoers',
'^Defaults.*requiretty', use_sudo=True)
return True |
<SYSTEM_TASK:>
returns the linux distribution in lower case
<END_TASK>
<USER_TASK:>
Description:
def linux_distribution():
""" returns the linux distribution in lower case """ |
with settings(hide('warnings', 'running', 'stdout', 'stderr'),
warn_only=True, capture=True):
data = os_release()
return(data['ID']) |
<SYSTEM_TASK:>
manipulates systemd services
<END_TASK>
<USER_TASK:>
Description:
def systemd(service, start=True, enabled=True, unmask=False, restart=False):
""" manipulates systemd services """ |
with settings(hide('warnings', 'running', 'stdout', 'stderr'),
warn_only=True, capture=True):
if restart:
sudo('systemctl restart %s' % service)
else:
if start:
sudo('systemctl start %s' % service)
else:
sudo('systemctl stop %s' % service)
if enabled:
sudo('systemctl enable %s' % service)
else:
sudo('systemctl disable %s' % service)
if unmask:
sudo('systemctl unmask %s' % service) |
<SYSTEM_TASK:>
Updates this task whitelist on the saltant server.
<END_TASK>
<USER_TASK:>
Description:
def put(self):
"""Updates this task whitelist on the saltant server.
Returns:
:class:`saltant.models.task_whitelist.TaskWhitelist`:
A task whitelist model instance representing the task
whitelist just updated.
""" |
return self.manager.put(
id=self.id,
name=self.name,
description=self.description,
whitelisted_container_task_types=(
self.whitelisted_container_task_types
),
whitelisted_executable_task_types=(
self.whitelisted_executable_task_types
),
) |
<SYSTEM_TASK:>
Create a task whitelist.
<END_TASK>
<USER_TASK:>
Description:
def create(
self,
name,
description="",
whitelisted_container_task_types=None,
whitelisted_executable_task_types=None,
):
"""Create a task whitelist.
Args:
name (str): The name of the task whitelist.
description (str, optional): A description of the task whitelist.
whitelisted_container_task_types (list, optional): A list of
whitelisted container task type IDs.
whitelisted_executable_task_types (list, optional): A list
of whitelisted executable task type IDs.
Returns:
:class:`saltant.models.task_whitelist.TaskWhitelist`:
A task whitelist model instance representing the task
whitelist just created.
""" |
# Translate whitelists None to [] if necessary
if whitelisted_container_task_types is None:
whitelisted_container_task_types = []
if whitelisted_executable_task_types is None:
whitelisted_executable_task_types = []
# Create the object
request_url = self._client.base_api_url + self.list_url
data_to_post = {
"name": name,
"description": description,
"whitelisted_container_task_types": whitelisted_container_task_types,
"whitelisted_executable_task_types": whitelisted_executable_task_types,
}
response = self._client.session.post(request_url, data=data_to_post)
# Validate that the request was successful
self.validate_request_success(
response_text=response.text,
request_url=request_url,
status_code=response.status_code,
expected_status_code=HTTP_201_CREATED,
)
# Return a model instance representing the task instance
return self.response_data_to_model_instance(response.json()) |
<SYSTEM_TASK:>
Partially updates a task whitelist on the saltant server.
<END_TASK>
<USER_TASK:>
Description:
def patch(
self,
id,
name=None,
description=None,
whitelisted_container_task_types=None,
whitelisted_executable_task_types=None,
):
"""Partially updates a task whitelist on the saltant server.
Args:
id (int): The ID of the task whitelist.
name (str, optional): The name of the task whitelist.
description (str, optional): A description of the task whitelist.
whitelisted_container_task_types (list, optional): A list of
whitelisted container task type IDs.
whitelisted_executable_task_types (list, optional): A list
of whitelisted executable task type IDs.
Returns:
:class:`saltant.models.task_whitelist.TaskWhitelist`:
A task whitelist model instance representing the task
whitelist just updated.
""" |
# Update the object
request_url = self._client.base_api_url + self.detail_url.format(id=id)
data_to_patch = {}
if name is not None:
data_to_patch["name"] = name
if description is not None:
data_to_patch["description"] = description
if whitelisted_container_task_types is not None:
data_to_patch[
"whitelisted_container_task_types"
] = whitelisted_container_task_types
if whitelisted_executable_task_types is not None:
data_to_patch[
"whitelisted_executable_task_types"
] = whitelisted_executable_task_types
response = self._client.session.patch(request_url, data=data_to_patch)
# Validate that the request was successful
self.validate_request_success(
response_text=response.text,
request_url=request_url,
status_code=response.status_code,
expected_status_code=HTTP_200_OK,
)
# Return a model instance representing the task instance
return self.response_data_to_model_instance(response.json()) |
<SYSTEM_TASK:>
Enable sending logs to stderr. Useful for shell sessions.
<END_TASK>
<USER_TASK:>
Description:
def enable_logging( level='WARNING' ):
"""Enable sending logs to stderr. Useful for shell sessions.
level
Logging threshold, as defined in the logging module of the Python
standard library. Defaults to 'WARNING'.
""" |
log = logging.getLogger( 'mrcrowbar' )
log.setLevel( level )
out = logging.StreamHandler()
out.setLevel( level )
form = logging.Formatter( '[%(levelname)s] %(name)s - %(message)s' )
out.setFormatter( form )
log.addHandler( out ) |
<SYSTEM_TASK:>
Iterate through every location a substring can be found in a source string.
<END_TASK>
<USER_TASK:>
Description:
def find_all_iter( source, substring, start=None, end=None, overlap=False ):
"""Iterate through every location a substring can be found in a source string.
source
The source string to search.
start
Start offset to read from (default: start)
end
End offset to stop reading at (default: end)
overlap
Whether to return overlapping matches (default: false)
""" |
data = source
base = 0
if end is not None:
data = data[:end]
if start is not None:
data = data[start:]
base = start
pointer = 0
increment = 1 if overlap else (len( substring ) or 1)
while True:
pointer = data.find( substring, pointer )
if pointer == -1:
return
yield base+pointer
pointer += increment |
<SYSTEM_TASK:>
Return every location a substring can be found in a source string.
<END_TASK>
<USER_TASK:>
Description:
def find_all( source, substring, start=None, end=None, overlap=False ):
"""Return every location a substring can be found in a source string.
source
The source string to search.
start
Start offset to read from (default: start)
end
End offset to stop reading at (default: end)
overlap
Whether to return overlapping matches (default: false)
""" |
return [x for x in find_all_iter( source, substring, start, end, overlap )] |
<SYSTEM_TASK:>
Return the contents of a byte string as a 256 colour image.
<END_TASK>
<USER_TASK:>
Description:
def pixdump_iter( source, start=None, end=None, length=None, width=64, height=None, palette=None ):
"""Return the contents of a byte string as a 256 colour image.
source
The byte string to print.
start
Start offset to read from (default: start)
end
End offset to stop reading at (default: end)
length
Length to read in (optional replacement for end)
width
Width of image to render in pixels (default: 64)
height
Height of image to render in pixels (default: auto)
palette
List of Colours to use (default: test palette)
""" |
assert is_bytes( source )
if not palette:
palette = colour.TEST_PALETTE
start = 0 if (start is None) else start
if (end is not None) and (length is not None):
raise ValueError( 'Can\'t define both an end and a length!' )
elif (length is not None):
end = start+length
elif (end is not None):
pass
else:
end = len( source )
start = max( start, 0 )
end = min( end, len( source ) )
if len( source ) == 0 or (start == end == 0):
return iter(())
if height is None:
height = math.ceil( (end-start)/width )
def data_fetch( x_pos, y_pos, frame ):
index = y_pos*width + x_pos + start
if index >= end:
return (0, 0, 0, 0)
return palette[source[index]]
return ansi.format_image_iter( data_fetch, width=width, height=height ) |
<SYSTEM_TASK:>
Print the contents of a byte string as a 256 colour image.
<END_TASK>
<USER_TASK:>
Description:
def pixdump( source, start=None, end=None, length=None, width=64, height=None, palette=None ):
"""Print the contents of a byte string as a 256 colour image.
source
The byte string to print.
start
Start offset to read from (default: start)
end
End offset to stop reading at (default: end)
length
Length to read in (optional replacement for end)
width
Width of image to render in pixels (default: 64)
height
Height of image to render in pixels (default: auto)
palette
List of Colours to use (default: test palette)
""" |
for line in pixdump_iter( source, start, end, length, width, height, palette ):
print( line ) |
<SYSTEM_TASK:>
Push bits into the target.
<END_TASK>
<USER_TASK:>
Description:
def put_bits( self, value, count ):
"""Push bits into the target.
value
Integer containing bits to push, ordered from least-significant bit to
most-significant bit.
count
Number of bits to push to the target.
""" |
for _ in range( count ):
# bits are retrieved from the source LSB first
bit = (value & 1)
value >>= 1
# however, bits are put into the result based on the rule
if self.bits_reverse:
if self.insert_at_msb:
self.current_bits |= (bit << (self.bits_remaining-1))
else:
self.current_bits <<= 1
self.current_bits |= bit
else:
if self.insert_at_msb:
self.current_bits >>= 1
self.current_bits |= (bit << 7)
else:
self.current_bits |= (bit << (8-self.bits_remaining))
self.bits_remaining -= 1
if self.bits_remaining <= 0:
self.output.append( self.current_bits )
self.current_bits = 0
self.bits_remaining = 8 |
<SYSTEM_TASK:>
Return a byte string containing the target as currently written.
<END_TASK>
<USER_TASK:>
Description:
def get_buffer( self ):
"""Return a byte string containing the target as currently written.""" |
last_byte = self.current_bits if (self.bits_remaining < 8) else None
result = self.output
if last_byte is not None:
result = bytearray( result )
result.append( last_byte )
if self.bytes_reverse:
return bytes( reversed( result ) )
else:
return bytes( result ) |
<SYSTEM_TASK:>
Pluralises the class_name using utterly simple algo and returns as table_name
<END_TASK>
<USER_TASK:>
Description:
def table_name(self):
"""Pluralises the class_name using utterly simple algo and returns as table_name""" |
if not self.class_name:
raise ValueError
else:
tbl_name = ModelCompiler.convert_case(self.class_name)
last_letter = tbl_name[-1]
if last_letter in ("y",):
return "{}ies".format(tbl_name[:-1])
elif last_letter in ("s",):
return "{}es".format(tbl_name)
else:
return "{}s".format(tbl_name) |
<SYSTEM_TASK:>
All the unique types found in user supplied model
<END_TASK>
<USER_TASK:>
Description:
def types(self):
"""All the unique types found in user supplied model""" |
res = []
for column in self.column_definitions:
tmp = column.get('type', None)
res.append(ModelCompiler.get_column_type(tmp)) if tmp else False
res = list(set(res))
return res |
<SYSTEM_TASK:>
Returns non-postgres types referenced in user supplied model
<END_TASK>
<USER_TASK:>
Description:
def basic_types(self):
"""Returns non-postgres types referenced in user supplied model """ |
if not self.foreign_key_definitions:
return self.standard_types
else:
tmp = self.standard_types
tmp.append('ForeignKey')
return tmp |
<SYSTEM_TASK:>
Returns the primary keys referenced in user supplied model
<END_TASK>
<USER_TASK:>
Description:
def primary_keys(self):
"""Returns the primary keys referenced in user supplied model""" |
res = []
for column in self.column_definitions:
if 'primary_key' in column.keys():
tmp = column.get('primary_key', None)
res.append(column['name']) if tmp else False
return res |
<SYSTEM_TASK:>
Returns compiled column definitions
<END_TASK>
<USER_TASK:>
Description:
def compiled_columns(self):
"""Returns compiled column definitions""" |
def get_column_args(column):
tmp = []
for arg_name, arg_val in column.items():
if arg_name not in ('name', 'type'):
if arg_name in ('server_default', 'server_onupdate'):
arg_val = '"{}"'.format(arg_val)
tmp.append(ALCHEMY_TEMPLATES.column_arg.safe_substitute(arg_name=arg_name,
arg_val=arg_val))
return ", ".join(tmp)
res = []
for column in self.column_definitions:
column_args = get_column_args(column)
column_type, type_params = ModelCompiler.get_col_type_info(column.get('type'))
column_name = column.get('name')
if column_type in MUTABLE_DICT_TYPES:
column_type = ALCHEMY_TEMPLATES.mutable_dict_type.safe_substitute(type=column_type,
type_params=type_params)
type_params = ''
res.append(
ALCHEMY_TEMPLATES.column_definition.safe_substitute(column_name=column_name,
column_type=column_type,
column_args=column_args,
type_params=type_params))
join_string = "\n" + self.tab
return join_string.join(res) |
<SYSTEM_TASK:>
Returns compiled foreign key definitions
<END_TASK>
<USER_TASK:>
Description:
def compiled_foreign_keys(self):
"""Returns compiled foreign key definitions""" |
def get_column_args(column):
tmp = []
for arg_name, arg_val in column.items():
if arg_name not in ('name', 'type', 'reference'):
if arg_name in ('server_default', 'server_onupdate'):
arg_val = '"{}"'.format(arg_val)
tmp.append(ALCHEMY_TEMPLATES.column_arg.safe_substitute(arg_name=arg_name,
arg_val=arg_val))
return ", ".join(tmp)
def get_fkey_args(column):
table = column['reference']['table']
column = column['reference']['column']
return ALCHEMY_TEMPLATES.foreign_key_arg.safe_substitute(reference_table=table, reference_column=column)
res = []
for column in self.foreign_key_definitions:
column_args = get_column_args(column)
column_type, type_params = ModelCompiler.get_col_type_info(column.get('type'))
column_name = column.get('name')
reference = get_fkey_args(column)
if column_type in MUTABLE_DICT_TYPES:
column_type = ALCHEMY_TEMPLATES.mutable_dict_type.safe_substitute(type=column_type,
type_params=type_params)
type_params = ''
res.append(
ALCHEMY_TEMPLATES.foreign_key.safe_substitute(column_name=column_name,
column_type=column_type,
column_args=column_args,
foreign_key_args=reference,
type_params=type_params))
join_string = "\n" + self.tab
return join_string.join(res) |
<SYSTEM_TASK:>
Returns compiled relationship definitions
<END_TASK>
<USER_TASK:>
Description:
def compiled_relationships(self):
"""Returns compiled relationship definitions""" |
def get_column_args(column):
tmp = []
for arg_name, arg_val in column.items():
if arg_name not in ('name', 'type', 'reference', 'class'):
if arg_name in ('back_populates', ):
arg_val = "'{}'".format(arg_val)
tmp.append(ALCHEMY_TEMPLATES.column_arg.safe_substitute(arg_name=arg_name,
arg_val=arg_val))
return ", ".join(tmp)
res = []
for column in self.relationship_definitions:
column_args = get_column_args(column)
column_name = column.get('name')
cls_name = column.get("class")
res.append(
ALCHEMY_TEMPLATES.relationship.safe_substitute(column_name=column_name,
column_args=column_args,
class_name=cls_name))
join_string = "\n" + self.tab
return join_string.join(res) |
<SYSTEM_TASK:>
Returns compiled hash function based on hash of stringified primary_keys.
<END_TASK>
<USER_TASK:>
Description:
def compiled_hash_func(self):
"""Returns compiled hash function based on hash of stringified primary_keys.
This isn't the most efficient way""" |
def get_primary_key_str(pkey_name):
return "str(self.{})".format(pkey_name)
hash_str = "+ ".join([get_primary_key_str(n) for n in self.primary_keys])
return ALCHEMY_TEMPLATES.hash_function.safe_substitute(concated_primary_key_strs=hash_str) |
<SYSTEM_TASK:>
Returns compile ORM class for the user supplied model
<END_TASK>
<USER_TASK:>
Description:
def compiled_model(self):
"""Returns compile ORM class for the user supplied model""" |
return ALCHEMY_TEMPLATES.model.safe_substitute(class_name=self.class_name,
table_name=self.table_name,
column_definitions=self.compiled_columns,
init_function=self.compiled_init_func,
update_function=self.compiled_update_func,
hash_function=self.compiled_hash_func,
eq_function=self.compiled_eq_func,
neq_function=self.compiled_neq_func,
str_function=self.compiled_str_func,
unicode_function=self.compiled_unicode_func,
repr_function=self.compiled_repr_func,
types=", ".join(self.basic_types),
username=self.username,
foreign_keys=self.compiled_foreign_keys,
relationships=self.compiled_relationships,
named_imports=self.compiled_named_imports,
orm_imports=self.compiled_orm_imports,
get_proxy_cls_function=self.compiled_proxy_cls_func,
add_function=ALCHEMY_TEMPLATES.add_function.template,
delete_function=ALCHEMY_TEMPLATES.delete_function.template,
to_dict_function=ALCHEMY_TEMPLATES.to_dict_function.template,
to_proxy_function=ALCHEMY_TEMPLATES.to_proxy_function.template,
from_proxy_function=ALCHEMY_TEMPLATES.from_proxy_function.template) |
<SYSTEM_TASK:>
Get a task queue.
<END_TASK>
<USER_TASK:>
Description:
def get(self, id=None, name=None):
"""Get a task queue.
Either the id xor the name of the task type must be specified.
Args:
id (int, optional): The id of the task type to get.
name (str, optional): The name of the task type to get.
Returns:
:class:`saltant.models.task_queue.TaskQueue`:
A task queue model instance representing the task queue
requested.
Raises:
ValueError: Neither id nor name were set *or* both id and
name were set.
""" |
# Validate arguments - use an xor
if not (id is None) ^ (name is None):
raise ValueError("Either id or name must be set (but not both!)")
# If it's just ID provided, call the parent function
if id is not None:
return super(TaskQueueManager, self).get(id=id)
# Try getting the task queue by name
return self.list(filters={"name": name})[0] |
<SYSTEM_TASK:>
Create a task queue.
<END_TASK>
<USER_TASK:>
Description:
def create(
self,
name,
description="",
private=False,
runs_executable_tasks=True,
runs_docker_container_tasks=True,
runs_singularity_container_tasks=True,
active=True,
whitelists=None,
):
"""Create a task queue.
Args:
name (str): The name of the task queue.
description (str, optional): A description of the task queue.
private (bool, optional): A boolean specifying whether the
queue is exclusive to its creator. Defaults to False.
runs_executable_tasks (bool, optional): A Boolean specifying
whether the queue runs executable tasks. Defaults to
True.
runs_docker_container_tasks (bool, optional): A Boolean
specifying whether the queue runs container tasks that
run in Docker containers. Defaults to True.
runs_singularity_container_tasks (bool, optional): A Boolean
specifying whether the queue runs container tasks that
run in Singularity containers. Defaults to True.
active (bool, optional): A boolean specifying whether the
queue is active. Default to True.
whitelists (list, optional): A list of task whitelist IDs.
Defaults to None (which gets translated to []).
Returns:
:class:`saltant.models.task_queue.TaskQueue`:
A task queue model instance representing the task queue
just created.
""" |
# Translate whitelists None to [] if necessary
if whitelists is None:
whitelists = []
# Create the object
request_url = self._client.base_api_url + self.list_url
data_to_post = {
"name": name,
"description": description,
"private": private,
"runs_executable_tasks": runs_executable_tasks,
"runs_docker_container_tasks": runs_docker_container_tasks,
"runs_singularity_container_tasks": runs_singularity_container_tasks,
"active": active,
"whitelists": whitelists,
}
response = self._client.session.post(request_url, data=data_to_post)
# Validate that the request was successful
self.validate_request_success(
response_text=response.text,
request_url=request_url,
status_code=response.status_code,
expected_status_code=HTTP_201_CREATED,
)
# Return a model instance representing the task instance
return self.response_data_to_model_instance(response.json()) |
<SYSTEM_TASK:>
Partially updates a task queue on the saltant server.
<END_TASK>
<USER_TASK:>
Description:
def patch(
self,
id,
name=None,
description=None,
private=None,
runs_executable_tasks=None,
runs_docker_container_tasks=None,
runs_singularity_container_tasks=None,
active=None,
whitelists=None,
):
"""Partially updates a task queue on the saltant server.
Args:
id (int): The ID of the task queue.
name (str, optional): The name of the task queue.
description (str, optional): The description of the task
queue.
private (bool, optional): A Booleon signalling whether the
queue can only be used by its associated user.
runs_executable_tasks (bool, optional): A Boolean specifying
whether the queue runs executable tasks.
runs_docker_container_tasks (bool, optional): A Boolean
specifying whether the queue runs container tasks that
run in Docker containers.
runs_singularity_container_tasks (bool, optional): A Boolean
specifying whether the queue runs container tasks that
run in Singularity containers.
active (bool, optional): A Booleon signalling whether the
queue is active.
whitelists (list, optional): A list of task whitelist IDs.
Returns:
:class:`saltant.models.task_queue.TaskQueue`:
A task queue model instance representing the task queue
just updated.
""" |
# Update the object
request_url = self._client.base_api_url + self.detail_url.format(id=id)
data_to_patch = {}
if name is not None:
data_to_patch["name"] = name
if description is not None:
data_to_patch["description"] = description
if private is not None:
data_to_patch["private"] = private
if runs_executable_tasks is not None:
data_to_patch["runs_executable_tasks"] = runs_executable_tasks
if runs_docker_container_tasks is not None:
data_to_patch[
"runs_docker_container_tasks"
] = runs_docker_container_tasks
if runs_singularity_container_tasks is not None:
data_to_patch[
"runs_singularity_container_tasks"
] = runs_singularity_container_tasks
if active is not None:
data_to_patch["active"] = active
if whitelists is not None:
data_to_patch["whitelists"] = whitelists
response = self._client.session.patch(request_url, data=data_to_patch)
# Validate that the request was successful
self.validate_request_success(
response_text=response.text,
request_url=request_url,
status_code=response.status_code,
expected_status_code=HTTP_200_OK,
)
# Return a model instance representing the task instance
return self.response_data_to_model_instance(response.json()) |
<SYSTEM_TASK:>
Updates a task queue on the saltant server.
<END_TASK>
<USER_TASK:>
Description:
def put(
self,
id,
name,
description,
private,
runs_executable_tasks,
runs_docker_container_tasks,
runs_singularity_container_tasks,
active,
whitelists,
):
"""Updates a task queue on the saltant server.
Args:
id (int): The ID of the task queue.
name (str): The name of the task queue.
description (str): The description of the task queue.
private (bool): A Booleon signalling whether the queue can
only be used by its associated user.
runs_executable_tasks (bool): A Boolean specifying whether
the queue runs executable tasks.
runs_docker_container_tasks (bool): A Boolean specifying
whether the queue runs container tasks that run in
Docker containers.
runs_singularity_container_tasks (bool): A Boolean
specifying whether the queue runs container tasks that
run in Singularity containers.
active (bool): A Booleon signalling whether the queue is
active.
whitelists (list): A list of task whitelist IDs.
Returns:
:class:`saltant.models.task_queue.TaskQueue`:
A task queue model instance representing the task queue
just updated.
""" |
# Update the object
request_url = self._client.base_api_url + self.detail_url.format(id=id)
data_to_put = {
"name": name,
"description": description,
"private": private,
"runs_executable_tasks": runs_executable_tasks,
"runs_docker_container_tasks": runs_docker_container_tasks,
"runs_singularity_container_tasks": runs_singularity_container_tasks,
"active": active,
"whitelists": whitelists,
}
response = self._client.session.put(request_url, data=data_to_put)
# Validate that the request was successful
self.validate_request_success(
response_text=response.text,
request_url=request_url,
status_code=response.status_code,
expected_status_code=HTTP_200_OK,
)
# Return a model instance representing the task instance
return self.response_data_to_model_instance(response.json()) |
<SYSTEM_TASK:>
Run the required methods in the appropriate order
<END_TASK>
<USER_TASK:>
Description:
def main(self):
"""
Run the required methods in the appropriate order
""" |
self.targets()
self.bait(k=49)
self.reversebait(maskmiddle='t', k=19)
self.subsample_reads() |
<SYSTEM_TASK:>
Using the data from the BLAST analyses, set the targets folder, and create the 'mapping file'. This is the
<END_TASK>
<USER_TASK:>
Description:
def targets(self):
"""
Using the data from the BLAST analyses, set the targets folder, and create the 'mapping file'. This is the
genera-specific FASTA file that will be used for all the reference mapping; it replaces the 'bait file' in the
code
""" |
logging.info('Performing analysis with {} targets folder'.format(self.analysistype))
for sample in self.runmetadata:
if sample.general.bestassemblyfile != 'NA':
sample[self.analysistype].targetpath = \
os.path.join(self.targetpath, 'genera', sample[self.analysistype].genus, '')
# There is a relatively strict databasing scheme necessary for the custom targets. Eventually,
# there will be a helper script to combine individual files into a properly formatted combined file
try:
sample[self.analysistype].mappingfile = glob('{}*.fa'
.format(sample[self.analysistype].targetpath))[0]
# If the fasta file is missing, raise a custom error
except IndexError as e:
# noinspection PyPropertyAccess
e.args = ['Cannot find the combined fasta file in {}. Please note that the file must have a '
'.fasta extension'.format(sample[self.analysistype].targetpath)]
if os.path.isdir(sample[self.analysistype].targetpath):
raise
else:
sample.general.bestassemblyfile = 'NA' |
<SYSTEM_TASK:>
Subsample 1000 reads from the baited files
<END_TASK>
<USER_TASK:>
Description:
def subsample(self):
"""
Subsample 1000 reads from the baited files
""" |
# Create the threads for the analysis
logging.info('Subsampling FASTQ reads')
for _ in range(self.cpus):
threads = Thread(target=self.subsamplethreads, args=())
threads.setDaemon(True)
threads.start()
with progressbar(self.runmetadata.samples) as bar:
for sample in bar:
if sample.general.bestassemblyfile != 'NA':
# Set the name of the subsampled FASTQ file
sample[self.analysistype].subsampledfastq = \
os.path.splitext(sample[self.analysistype].baitedfastq)[0] + '_subsampled.fastq'
# Set the system call
sample[self.analysistype].seqtkcall = 'reformat.sh in={} out={} samplereadstarget=1000'\
.format(sample[self.analysistype].baitedfastq,
sample[self.analysistype].subsampledfastq)
# Add the sample to the queue
self.samplequeue.put(sample)
self.samplequeue.join() |
<SYSTEM_TASK:>
Convert the subsampled reads to FASTA format using reformat.sh
<END_TASK>
<USER_TASK:>
Description:
def fasta(self):
"""
Convert the subsampled reads to FASTA format using reformat.sh
""" |
logging.info('Converting FASTQ files to FASTA format')
# Create the threads for the analysis
for _ in range(self.cpus):
threads = Thread(target=self.fastathreads, args=())
threads.setDaemon(True)
threads.start()
with progressbar(self.runmetadata.samples) as bar:
for sample in bar:
if sample.general.bestassemblyfile != 'NA':
# Set the name as the FASTA file - the same as the FASTQ, but with .fa file extension
sample[self.analysistype].fasta = \
os.path.splitext(sample[self.analysistype].subsampledfastq)[0] + '.fa'
# Set the system call
sample[self.analysistype].reformatcall = 'reformat.sh in={fastq} out={fasta}'\
.format(fastq=sample[self.analysistype].subsampledfastq,
fasta=sample[self.analysistype].fasta)
# Add the sample to the queue
self.fastaqueue.put(sample)
self.fastaqueue.join() |
<SYSTEM_TASK:>
Run BLAST analyses of the subsampled FASTQ reads against the NCBI 16S reference database
<END_TASK>
<USER_TASK:>
Description:
def blast(self):
"""
Run BLAST analyses of the subsampled FASTQ reads against the NCBI 16S reference database
""" |
logging.info('BLASTing FASTA files against {} database'.format(self.analysistype))
for _ in range(self.cpus):
threads = Thread(target=self.blastthreads, args=())
threads.setDaemon(True)
threads.start()
with progressbar(self.runmetadata.samples) as bar:
for sample in bar:
if sample.general.bestassemblyfile != 'NA':
# Set the name of the BLAST report
sample[self.analysistype].blastreport = os.path.join(
sample[self.analysistype].outputdir,
'{}_{}_blastresults.csv'.format(sample.name, self.analysistype))
# Use the NCBI BLASTn command line wrapper module from BioPython to set the parameters of the search
blastn = NcbiblastnCommandline(query=sample[self.analysistype].fasta,
db=os.path.splitext(sample[self.analysistype].baitfile)[0],
max_target_seqs=1,
num_threads=self.threads,
outfmt="'6 qseqid sseqid positive mismatch gaps evalue "
"bitscore slen length qstart qend qseq sstart send sseq'",
out=sample[self.analysistype].blastreport)
# Add a string of the command to the metadata object
sample[self.analysistype].blastcall = str(blastn)
# Add the object and the command to the BLAST queue
self.blastqueue.put((sample, blastn))
self.blastqueue.join() |
<SYSTEM_TASK:>
Parse the blast results, and store necessary data in dictionaries in sample object
<END_TASK>
<USER_TASK:>
Description:
def blastparse(self):
"""
Parse the blast results, and store necessary data in dictionaries in sample object
""" |
logging.info('Parsing BLAST results')
# Load the NCBI 16S reference database as a dictionary
for sample in self.runmetadata.samples:
if sample.general.bestassemblyfile != 'NA':
# Load the NCBI 16S reference database as a dictionary
dbrecords = SeqIO.to_dict(SeqIO.parse(sample[self.analysistype].baitfile, 'fasta'))
# Allow for no BLAST results
if os.path.isfile(sample[self.analysistype].blastreport):
# Initialise a dictionary to store the number of times a genus is the best hit
sample[self.analysistype].frequency = dict()
# Open the sequence profile file as a dictionary
blastdict = DictReader(open(sample[self.analysistype].blastreport),
fieldnames=self.fieldnames, dialect='excel-tab')
recorddict = dict()
for record in blastdict:
# Create the subject id. It will look like this: gi|1018196593|ref|NR_136472.1|
subject = record['subject_id']
# Extract the genus name. Use the subject id as a key in the dictionary of the reference db.
# It will return the full record e.g. gi|1018196593|ref|NR_136472.1| Escherichia marmotae
# strain HT073016 16S ribosomal RNA, partial sequence
# This full description can be manipulated to extract the genus e.g. Escherichia
genus = dbrecords[subject].description.split('|')[-1].split()[0]
# Increment the number of times this genus was found, or initialise the dictionary with this
# genus the first time it is seen
try:
sample[self.analysistype].frequency[genus] += 1
except KeyError:
sample[self.analysistype].frequency[genus] = 1
try:
recorddict[dbrecords[subject].description] += 1
except KeyError:
recorddict[dbrecords[subject].description] = 1
# Sort the dictionary based on the number of times a genus is seen
sample[self.analysistype].sortedgenera = sorted(sample[self.analysistype].frequency.items(),
key=operator.itemgetter(1), reverse=True)
try:
# Extract the top result, and set it as the genus of the sample
sample[self.analysistype].genus = sample[self.analysistype].sortedgenera[0][0]
# Previous code relies on having the closest refseq genus, so set this as above
# sample.general.closestrefseqgenus = sample[self.analysistype].genus
except IndexError:
# Populate attributes with 'NA'
sample[self.analysistype].sortedgenera = 'NA'
sample[self.analysistype].genus = 'NA'
# sample.general.closestrefseqgenus = 'NA'
else:
# Populate attributes with 'NA'
sample[self.analysistype].sortedgenera = 'NA'
sample[self.analysistype].genus = 'NA'
# sample.general.closestrefseqgenus = 'NA'
else:
# Populate attributes with 'NA'
sample[self.analysistype].sortedgenera = 'NA'
sample[self.analysistype].genus = 'NA' |
<SYSTEM_TASK:>
Sort an array of strings to groups by patterns
<END_TASK>
<USER_TASK:>
Description:
def sort2groups(array, gpat=['_R1','_R2']):
""" Sort an array of strings to groups by patterns """ |
groups = [REGroup(gp) for gp in gpat]
unmatched = []
for item in array:
matched = False
for m in groups:
if m.match(item):
matched = True
break
if not matched: unmatched.append(item)
return [sorted(m.list) for m in groups], sorted(unmatched) |
<SYSTEM_TASK:>
Sort an array of strings to groups by alphabetically continuous
<END_TASK>
<USER_TASK:>
Description:
def sort_and_distribute(array, splits=2):
""" Sort an array of strings to groups by alphabetically continuous
distribution
""" |
if not isinstance(array, (list,tuple)): raise TypeError("array must be a list")
if not isinstance(splits, int): raise TypeError("splits must be an integer")
remaining = sorted(array)
if sys.version_info < (3, 0):
myrange = xrange(splits)
else:
myrange = range(splits)
groups = [[] for i in myrange]
while len(remaining) > 0:
for i in myrange:
if len(remaining) > 0: groups[i].append(remaining.pop(0))
return groups |
<SYSTEM_TASK:>
This function will zip the files created in the runroot directory and
<END_TASK>
<USER_TASK:>
Description:
def file_zipper(root_dir):
""" This function will zip the files created in the runroot directory and
subdirectories """ |
# FINDING AND ZIPPING UNZIPPED FILES
for root, dirs, files in os.walk(root_dir, topdown=False):
if root != "":
if root[-1] != '/': root += '/'
for current_file in files:
filepath = "%s/%s"%(root, current_file)
try:
file_size = os.path.getsize(filepath)
except Exception as e:
file_size = 0
debug.log('Error: file_zipper failed to zip following file '+filepath, e)
# Excluding small files, gzipped files and links
if ( file_size > 50
and current_file[-3:] != ".gz"
and not os.path.islink(filepath)
):
if current_file[-4:] == ".zip":
# Unzip file
ec = Popen('unzip -qq "%s" -d %s > /dev/null 2>&1'%(filepath, root), shell=True).wait()
if ec > 0:
debug.log('Error: fileZipper failed to unzip following file %s'%filepath)
continue
else:
ec = Popen('rm -f "%s" > /dev/null 2>&1'%(filepath), shell=True).wait()
if ec > 0: debug.log('Error: fileZipper failed to delete the original zip file (%s)'%filepath)
filepath = filepath[:-4]
# Saving a gzipped version
with open_(filepath, 'rb') as f, open_(filepath+".gz", 'wb', 9) as gz:
gz.writelines(f)
# Deleting old (non-zipped) file
try: os.remove(filepath)
except OSError as e:
debug.log(("WARNING! The file %s could not be "
"removed!\n%s")%(current_file, e)) |
<SYSTEM_TASK:>
This function will unzip all files in the runroot directory and
<END_TASK>
<USER_TASK:>
Description:
def file_unzipper(directory):
""" This function will unzip all files in the runroot directory and
subdirectories
""" |
debug.log("Unzipping directory (%s)..."%directory)
#FINDING AND UNZIPPING ZIPPED FILES
for root, dirs, files in os.walk(directory, topdown=False):
if root != "":
orig_dir = os.getcwd()
os.chdir(directory)
Popen('gunzip -q -f *.gz > /dev/null 2>&1', shell=True).wait()
Popen('unzip -qq -o "*.zip" > /dev/null 2>&1', shell=True).wait()
Popen('rm -f *.zip > /dev/null 2>&1', shell=True).wait()
os.chdir(orig_dir) |
<SYSTEM_TASK:>
this function will simply move the file from the source path to the dest
<END_TASK>
<USER_TASK:>
Description:
def move_file(src, dst):
""" this function will simply move the file from the source path to the dest
path given as input
""" |
# Sanity checkpoint
src = re.sub('[^\w/\-\.\*]', '', src)
dst = re.sub('[^\w/\-\.\*]', '', dst)
if len(re.sub('[\W]', '', src)) < 5 or len(re.sub('[\W]', '', dst)) < 5:
debug.log("Error: Moving file failed. Provided paths are invalid! src='%s' dst='%s'"%(src, dst))
else:
# Check destination
check = False
if dst[-1] == '/':
if os.path.exists(dst):
check = True # Valid Dir
else:
debug.log("Error: Moving file failed. Destination directory does not exist (%s)"%(dst)) #DEBUG
elif os.path.exists(dst):
if os.path.isdir(dst):
check = True # Valid Dir
dst += '/' # Add missing slash
else:
debug.log("Error: Moving file failed. %s exists!"%dst)
elif os.path.exists(os.path.dirname(dst)):
check = True # Valid file path
else:
debug.log("Error: Moving file failed. %s is an invalid distination!"%dst)
if check:
# Check source
files = glob.glob(src)
if len(files) != 0:
debug.log("Moving File(s)...", "Move from %s"%src, "to %s"%dst)
for file_ in files:
# Check if file contains invalid symbols:
invalid_chars = re.findall('[^\w/\-\.\*]', os.path.basename(file_))
if invalid_chars:
debug.graceful_exit(("Error: File %s contains invalid "
"characters %s!"
)%(os.path.basename(file_), invalid_chars))
continue
# Check file exists
if os.path.isfile(file_):
debug.log("Moving file: %s"%file_)
shutil.move(file_, dst)
else:
debug.log("Error: Moving file failed. %s is not a regular file!"%file_)
else: debug.log("Error: Moving file failed. No files were found! (%s)"%src) |
<SYSTEM_TASK:>
Print list of strings to the predefined logfile if debug is set. and
<END_TASK>
<USER_TASK:>
Description:
def log(self, *lst):
""" Print list of strings to the predefined logfile if debug is set. and
sets the caught_error message if an error is found
""" |
self.print2file(self.logfile, self.debug, True, *lst)
if 'Error' in '\n'.join([str(x) for x in lst]):
self.caught_error = '\n'.join([str(x) for x in lst]) |
<SYSTEM_TASK:>
print the message to the predefined log file without newline
<END_TASK>
<USER_TASK:>
Description:
def log_no_newline(self, msg):
""" print the message to the predefined log file without newline """ |
self.print2file(self.logfile, False, False, msg) |
<SYSTEM_TASK:>
This function Tries to update the MSQL database before exiting.
<END_TASK>
<USER_TASK:>
Description:
def graceful_exit(self, msg):
""" This function Tries to update the MSQL database before exiting. """ |
# Print stored errors to stderr
if self.caught_error:
self.print2file(self.stderr, False, False, self.caught_error)
# Kill process with error message
self.log(msg)
sys.exit(1) |
<SYSTEM_TASK:>
Matches the string to the stored regular expression, and stores all
<END_TASK>
<USER_TASK:>
Description:
def match(self, s):
""" Matches the string to the stored regular expression, and stores all
groups in mathches. Returns False on negative match. """ |
self.matches = self.re.search(s)
return self.matches |
<SYSTEM_TASK:>
Return the edge characters of this node.
<END_TASK>
<USER_TASK:>
Description:
def edges(self):
"""
Return the edge characters of this node.
""" |
edge_str = ctypes.create_string_buffer(MAX_CHARS)
cgaddag.gdg_edges(self.gdg, self.node, edge_str)
return [char for char in edge_str.value.decode("ascii")] |
<SYSTEM_TASK:>
Return the letter set of this node.
<END_TASK>
<USER_TASK:>
Description:
def letter_set(self):
"""
Return the letter set of this node.
""" |
end_str = ctypes.create_string_buffer(MAX_CHARS)
cgaddag.gdg_letter_set(self.gdg, self.node, end_str)
return [char for char in end_str.value.decode("ascii")] |
<SYSTEM_TASK:>
Return `True` if this `char` is part of this node's letter set,
<END_TASK>
<USER_TASK:>
Description:
def is_end(self, char):
"""
Return `True` if this `char` is part of this node's letter set,
`False` otherwise.
""" |
char = char.lower()
return bool(cgaddag.gdg_is_end(self.gdg, self.node, char.encode("ascii"))) |
<SYSTEM_TASK:>
Traverse the GADDAG to the node at the end of the given characters.
<END_TASK>
<USER_TASK:>
Description:
def follow(self, chars):
"""
Traverse the GADDAG to the node at the end of the given characters.
Args:
chars: An string of characters to traverse in the GADDAG.
Returns:
The Node which is found by traversing the tree.
""" |
chars = chars.lower()
node = self.node
for char in chars:
node = cgaddag.gdg_follow_edge(self.gdg, node, char.encode("ascii"))
if not node:
raise KeyError(char)
return Node(self.gdg, node) |
<SYSTEM_TASK:>
Get the raw docker link value.
<END_TASK>
<USER_TASK:>
Description:
def read(alias_name, allow_none=False):
"""Get the raw docker link value.
Get the raw environment variable for the docker link
Args:
alias_name: The environment variable name
default: The default value if the link isn't available
allow_none: If the return value can be `None` (i.e. optional)
""" |
warnings.warn('Will be removed in v1.0', DeprecationWarning, stacklevel=2)
return core.read('{0}_PORT'.format(alias_name), default=None, allow_none=allow_none) |
<SYSTEM_TASK:>
Return a boolean if the docker link is set or not and is a valid looking docker link value.
<END_TASK>
<USER_TASK:>
Description:
def isset(alias_name):
"""Return a boolean if the docker link is set or not and is a valid looking docker link value.
Args:
alias_name: The link alias name
""" |
warnings.warn('Will be removed in v1.0', DeprecationWarning, stacklevel=2)
raw_value = read(alias_name, allow_none=True)
if raw_value:
if re.compile(r'.+://.+:\d+').match(raw_value):
return True
else:
warnings.warn('"{0}_PORT={1}" does not look like a docker link.'.format(alias_name, raw_value), stacklevel=2)
return False
return False |
<SYSTEM_TASK:>
Get the protocol from the docker link alias or return the default.
<END_TASK>
<USER_TASK:>
Description:
def protocol(alias_name, default=None, allow_none=False):
"""Get the protocol from the docker link alias or return the default.
Args:
alias_name: The docker link alias
default: The default value if the link isn't available
allow_none: If the return value can be `None` (i.e. optional)
Examples:
Assuming a Docker link was created with ``docker --link postgres:db``
and the resulting environment variable is ``DB_PORT=tcp://172.17.0.82:5432``.
>>> envitro.docker.protocol('DB')
tcp
""" |
warnings.warn('Will be removed in v1.0', DeprecationWarning, stacklevel=2)
try:
return _split_docker_link(alias_name)[0]
except KeyError as err:
if default or allow_none:
return default
else:
raise err |
<SYSTEM_TASK:>
Get the port from the docker link alias or return the default.
<END_TASK>
<USER_TASK:>
Description:
def port(alias_name, default=None, allow_none=False):
"""Get the port from the docker link alias or return the default.
Args:
alias_name: The docker link alias
default: The default value if the link isn't available
allow_none: If the return value can be `None` (i.e. optional)
Examples:
Assuming a Docker link was created with ``docker --link postgres:db``
and the resulting environment variable is ``DB_PORT=tcp://172.17.0.82:5432``.
>>> envitro.docker.port('DB')
5432
""" |
warnings.warn('Will be removed in v1.0', DeprecationWarning, stacklevel=2)
try:
return int(_split_docker_link(alias_name)[2])
except KeyError as err:
if default or allow_none:
return default
else:
raise err |
<SYSTEM_TASK:>
Parses the 16S target files to link accession numbers stored in the .fai and metadata files to the genera stored
<END_TASK>
<USER_TASK:>
Description:
def attributer(self):
"""
Parses the 16S target files to link accession numbers stored in the .fai and metadata files to the genera stored
in the target file
""" |
from Bio import SeqIO
import operator
for sample in self.runmetadata.samples:
# Load the records from the target file into a dictionary
record_dict = SeqIO.to_dict(SeqIO.parse(sample[self.analysistype].baitfile, "fasta"))
sample[self.analysistype].classification = set()
sample[self.analysistype].genera = dict()
# Add all the genera with hits into the set of genera
for result in sample[self.analysistype].results:
genus, species = record_dict[result].description.split('|')[-1].split()[:2]
sample[self.analysistype].classification.add(genus)
sample[self.analysistype].genera[result] = genus
# Convert the set to a list for easier JSON serialisation
sample[self.analysistype].classification = list(sample[self.analysistype].classification)
# If there is a mixed sample, then further analyses will be complicated
if len(sample[self.analysistype].classification) > 1:
# print('multiple: ', sample.name, sample[self.analysistype].classification)
sample.general.closestrefseqgenus = sample[self.analysistype].classification
# sample.general.bestassemblyfile = 'NA'
sample[self.analysistype].multiple = True
else:
sample[self.analysistype].multiple = False
try:
# Recreate the results dictionary with the percent identity as a float rather than a string
sample[self.analysistype].intresults = \
{key: float(value) for key, value in sample[self.analysistype].results.items()}
# Set the best hit to be the top entry from the sorted results
sample[self.analysistype].besthit = sorted(sample[self.analysistype].intresults.items(),
key=operator.itemgetter(1), reverse=True)[0]
sample.general.closestrefseqgenus = sample[self.analysistype].classification[0]
except IndexError:
sample.general.bestassemblyfile = 'NA' |
<SYSTEM_TASK:>
Take a a Flask app and a swagger file in YAML format describing a REST
<END_TASK>
<USER_TASK:>
Description:
def spawn_server_api(api_name, app, api_spec, error_callback, decorator):
"""Take a a Flask app and a swagger file in YAML format describing a REST
API, and populate the app with routes handling all the paths and methods
declared in the swagger file.
Also handle marshaling and unmarshaling between json and object instances
representing the definitions from the swagger file.
""" |
def mycallback(endpoint):
handler_func = get_function(endpoint.handler_server)
# Generate api endpoint around that handler
handler_wrapper = _generate_handler_wrapper(api_name, api_spec, endpoint, handler_func, error_callback, decorator)
# Bind handler to the API path
log.info("Binding %s %s ==> %s" % (endpoint.method, endpoint.path, endpoint.handler_server))
endpoint_name = '_'.join([endpoint.method, endpoint.path]).replace('/', '_')
app.add_url_rule(endpoint.path, endpoint_name, handler_wrapper, methods=[endpoint.method])
api_spec.call_on_each_endpoint(mycallback)
# Add custom error handlers to the app
add_error_handlers(app) |
<SYSTEM_TASK:>
Take a bravado-core model representing an error, and return a Flask Response
<END_TASK>
<USER_TASK:>
Description:
def _responsify(api_spec, error, status):
"""Take a bravado-core model representing an error, and return a Flask Response
with the given error code and error instance as body""" |
result_json = api_spec.model_to_json(error)
r = jsonify(result_json)
r.status_code = status
return r |
<SYSTEM_TASK:>
Returns the ANSI escape sequence to set character formatting.
<END_TASK>
<USER_TASK:>
Description:
def format_escape( foreground=None, background=None, bold=False, faint=False,
italic=False, underline=False, blink=False, inverted=False ):
"""Returns the ANSI escape sequence to set character formatting.
foreground
Foreground colour to use. Accepted types: None, int (xterm
palette ID), tuple (RGB, RGBA), Colour
background
Background colour to use. Accepted types: None, int (xterm
palette ID), tuple (RGB, RGBA), Colour
bold
Enable bold text (default: False)
faint
Enable faint text (default: False)
italic
Enable italic text (default: False)
underline
Enable underlined text (default: False)
blink
Enable blinky text (default: False)
inverted
Enable inverted text (default: False)
""" |
fg_format = None
if isinstance( foreground, int ):
fg_format = ANSI_FORMAT_FOREGROUND_XTERM_CMD.format( foreground )
else:
fg_rgba = colour.normalise_rgba( foreground )
if fg_rgba[3] != 0:
fg_format = ANSI_FORMAT_FOREGROUND_CMD.format( *fg_rgba[:3] )
bg_format = None
if isinstance( background, int ):
bg_format = ANSI_FORMAT_BACKGROUND_XTERM_CMD.format( background )
else:
bg_rgba = colour.normalise_rgba( background )
if bg_rgba[3] != 0:
bg_format = ANSI_FORMAT_BACKGROUND_CMD.format( *bg_rgba[:3] )
colour_format = []
if fg_format is not None:
colour_format.append( fg_format )
if bg_format is not None:
colour_format.append( bg_format )
if bold:
colour_format.append( ANSI_FORMAT_BOLD_CMD )
if faint:
colour_format.append( ANSI_FORMAT_FAINT_CMD )
if italic:
colour_format.append( ANSI_FORMAT_ITALIC_CMD )
if underline:
colour_format.append( ANSI_FORMAT_UNDERLINE_CMD )
if blink:
colour_format.append( ANSI_FORMAT_BLINK_CMD )
if inverted:
colour_format.append( ANSI_FORMAT_INVERTED_CMD )
colour_format = ANSI_FORMAT_BASE.format( ';'.join( colour_format ) )
return colour_format |
<SYSTEM_TASK:>
Returns a Unicode string formatted with an ANSI escape sequence.
<END_TASK>
<USER_TASK:>
Description:
def format_string( string, foreground=None, background=None, reset=True, bold=False,
faint=False, italic=False, underline=False, blink=False, inverted=False ):
"""Returns a Unicode string formatted with an ANSI escape sequence.
string
String to format
foreground
Foreground colour to use. Accepted types: None, int (xterm
palette ID), tuple (RGB, RGBA), Colour
background
Background colour to use. Accepted types: None, int (xterm
palette ID), tuple (RGB, RGBA), Colour
reset
Reset the formatting at the end (default: True)
bold
Enable bold text (default: False)
faint
Enable faint text (default: False)
italic
Enable italic text (default: False)
underline
Enable underlined text (default: False)
blink
Enable blinky text (default: False)
inverted
Enable inverted text (default: False)
""" |
colour_format = format_escape( foreground, background, bold, faint,
italic, underline, blink, inverted )
reset_format = '' if not reset else ANSI_FORMAT_RESET
return '{}{}{}'.format( colour_format, string, reset_format ) |
<SYSTEM_TASK:>
Return the ANSI escape sequence to render two vertically-stacked pixels as a
<END_TASK>
<USER_TASK:>
Description:
def format_pixels( top, bottom, reset=True, repeat=1 ):
"""Return the ANSI escape sequence to render two vertically-stacked pixels as a
single monospace character.
top
Top colour to use. Accepted types: None, int (xterm
palette ID), tuple (RGB, RGBA), Colour
bottom
Bottom colour to use. Accepted types: None, int (xterm
palette ID), tuple (RGB, RGBA), Colour
reset
Reset the formatting at the end (default: True)
repeat
Number of horizontal pixels to render (default: 1)
""" |
top_src = None
if isinstance( top, int ):
top_src = top
else:
top_rgba = colour.normalise_rgba( top )
if top_rgba[3] != 0:
top_src = top_rgba
bottom_src = None
if isinstance( bottom, int ):
bottom_src = bottom
else:
bottom_rgba = colour.normalise_rgba( bottom )
if bottom_rgba[3] != 0:
bottom_src = bottom_rgba
# short circuit for empty pixel
if (top_src is None) and (bottom_src is None):
return ' '*repeat
string = '▀'*repeat;
colour_format = []
if top_src == bottom_src:
string = '█'*repeat
elif (top_src is None) and (bottom_src is not None):
string = '▄'*repeat
if (top_src is None) and (bottom_src is not None):
if isinstance( bottom_src, int ):
colour_format.append( ANSI_FORMAT_FOREGROUND_XTERM_CMD.format( bottom_src ) )
else:
colour_format.append( ANSI_FORMAT_FOREGROUND_CMD.format( *bottom_src[:3] ) )
else:
if isinstance( top_src, int ):
colour_format.append( ANSI_FORMAT_FOREGROUND_XTERM_CMD.format( top_src ) )
else:
colour_format.append( ANSI_FORMAT_FOREGROUND_CMD.format( *top_src[:3] ) )
if top_src is not None and bottom_src is not None and top_src != bottom_src:
if isinstance( top_src, int ):
colour_format.append( ANSI_FORMAT_BACKGROUND_XTERM_CMD.format( bottom_src ) )
else:
colour_format.append( ANSI_FORMAT_BACKGROUND_CMD.format( *bottom_src[:3] ) )
colour_format = ANSI_FORMAT_BASE.format( ';'.join( colour_format ) )
reset_format = '' if not reset else ANSI_FORMAT_RESET
return '{}{}{}'.format( colour_format, string, reset_format ) |
<SYSTEM_TASK:>
Return the ANSI escape sequence to render a bitmap image.
<END_TASK>
<USER_TASK:>
Description:
def format_image_iter( data_fetch, x_start=0, y_start=0, width=32, height=32, frame=0, columns=1, downsample=1 ):
"""Return the ANSI escape sequence to render a bitmap image.
data_fetch
Function that takes three arguments (x position, y position, and frame) and returns
a Colour corresponding to the pixel stored there, or Transparent if the requested
pixel is out of bounds.
x_start
Offset from the left of the image data to render from. Defaults to 0.
y_start
Offset from the top of the image data to render from. Defaults to 0.
width
Width of the image data to render. Defaults to 32.
height
Height of the image data to render. Defaults to 32.
frame
Single frame number/object, or a list to render in sequence. Defaults to frame 0.
columns
Number of frames to render per line (useful for printing tilemaps!). Defaults to 1.
downsample
Shrink larger images by printing every nth pixel only. Defaults to 1.
""" |
frames = []
try:
frame_iter = iter( frame )
frames = [f for f in frame_iter]
except TypeError:
frames = [frame]
rows = math.ceil( len( frames )/columns )
for r in range( rows ):
for y in range( 0, height, 2*downsample ):
result = []
for c in range( min( (len( frames )-r*columns), columns ) ):
row = []
for x in range( 0, width, downsample ):
fr = frames[r*columns + c]
c1 = data_fetch( x_start+x, y_start+y, fr )
c2 = data_fetch( x_start+x, y_start+y+downsample, fr )
row.append( (c1, c2) )
prev_pixel = None
pointer = 0
while pointer < len( row ):
start = pointer
pixel = row[pointer]
while pointer < len( row ) and (row[pointer] == pixel):
pointer += 1
result.append( format_pixels( pixel[0], pixel[1], repeat=pointer-start ) )
yield ''.join( result )
return |
<SYSTEM_TASK:>
Write a Python object into a byte array, using the field definition.
<END_TASK>
<USER_TASK:>
Description:
def update_buffer_with_value( self, value, buffer, parent=None ):
"""Write a Python object into a byte array, using the field definition.
value
Input Python object to process.
buffer
Output byte array to encode value into.
parent
Parent block object where this Field is defined. Used for e.g.
evaluating Refs.
""" |
assert common.is_bytes( buffer )
self.validate( value, parent )
return |
<SYSTEM_TASK:>
Return the end offset of the Field's data. Useful for chainloading.
<END_TASK>
<USER_TASK:>
Description:
def get_end_offset( self, value, parent=None, index=None ):
"""Return the end offset of the Field's data. Useful for chainloading.
value
Input Python object to process.
parent
Parent block object where this Field is defined. Used for e.g.
evaluating Refs.
index
Index of the Python object to measure from. Used if the Field
takes a list of objects.
""" |
return self.get_start_offset( value, parent, index ) + self.get_size( value, parent, index ) |
<SYSTEM_TASK:>
Return a thread emiting `state_changed` between each sub-requests.
<END_TASK>
<USER_TASK:>
Description:
def thread_with_callback(on_error, on_done, requete_with_callback):
"""
Return a thread emiting `state_changed` between each sub-requests.
:param on_error: callback str -> None
:param on_done: callback object -> None
:param requete_with_callback: Job to execute. monitor_callable -> None
:return: Non started thread
""" |
class C(THREAD):
error = SIGNAL(str)
done = SIGNAL(object)
state_changed = SIGNAL(int, int)
def __del__(self):
self.wait()
def run(self):
try:
r = requete_with_callback(self.state_changed.emit)
except (ConnexionError, StructureError) as e:
self.error.emit(str(e))
else:
self.done.emit(r)
th = C()
th.error.connect(on_error)
th.done.connect(on_done)
return th |
<SYSTEM_TASK:>
adds a new repository file for apt
<END_TASK>
<USER_TASK:>
Description:
def apt_add_repository_from_apt_string(apt_string, apt_file):
""" adds a new repository file for apt """ |
apt_file_path = '/etc/apt/sources.list.d/%s' % apt_file
if not file_contains(apt_file_path, apt_string.lower(), use_sudo=True):
file_append(apt_file_path, apt_string.lower(), use_sudo=True)
with hide('running', 'stdout'):
sudo("DEBIAN_FRONTEND=noninteractive apt-get update") |
<SYSTEM_TASK:>
returns the current cpu archictecture
<END_TASK>
<USER_TASK:>
Description:
def arch():
""" returns the current cpu archictecture """ |
with settings(hide('warnings', 'running', 'stdout', 'stderr'),
warn_only=True, capture=True):
result = sudo('rpm -E %dist').strip()
return result |
<SYSTEM_TASK:>
Set 'UseDNS no' in openssh config to disable rDNS lookups
<END_TASK>
<USER_TASK:>
Description:
def disable_openssh_rdns(distribution):
"""
Set 'UseDNS no' in openssh config to disable rDNS lookups
On each request for a new channel openssh defaults to an
rDNS lookup on the client IP. This can be slow, if it fails
for instance, adding 10s of overhead to every request
for a new channel (not connection). This can add a lot of
time to a process that opens lots of channels (e.g. running
several commands via fabric.)
This function will disable rDNS lookups in the openssh
config and reload ssh to adjust the running instance.
:param bytes distribution: the name of the distribution
running on the node.
""" |
log_green('Disabling openssh reverse dns lookups')
openssh_config_file = '/etc/ssh/sshd_config'
dns_config = 'UseDNS no'
if not file_contains(openssh_config_file, dns_config, use_sudo=True):
file_append(openssh_config_file, dns_config, use_sudo=True)
service_name = 'sshd'
if 'ubuntu' in distribution:
service_name = 'ssh'
sudo('service {} reload'.format(service_name)) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.