text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clear_layout(layout: QLayout) -> None: """Clear the layout off all its components"""
if layout is not None: while layout.count(): item = layout.takeAt(0) widget = item.widget() if widget is not None: widget.deleteLater() else: clear_layout(item.layout())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_hangul_syllable_type(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param hangul_syllable: Unicode scalar value representing a Hangul syllable :return: Returns a string representing its Hangul_Syllable_Type property ("L", "V", "T", "LV" or "LVT") """
if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value 0x%0.4x does not represent a Hangul syllable!" % hangul_syllable) if not _hangul_syllable_types: _load_hangul_syllable_types() return _hangul_syllable_types[hangul_syllable]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_jamo_short_name(jamo): """ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property """
if not _is_jamo(jamo): raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo) if not _jamo_short_names: _load_jamo_short_names() return _jamo_short_names[jamo]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compose_hangul_syllable(jamo): """ Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul syllable. If the values in the list or tuple passed in are not in the ranges of Jamo, a ValueError will be raised. The algorithm for doing the composition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior." Example: (U+1111, U+1171) -> U+D4CC (U+D4CC, U+11B6) -> U+D4DB (U+1111, U+1171, U+11B6) -> U+D4DB :param jamo: Tuple of list of Jamo to compose :return: Composed Hangul syllable """
fmt_str_invalid_sequence = "{0} does not represent a valid sequence of Jamo!" if len(jamo) == 3: l_part, v_part, t_part = jamo if not (l_part in range(0x1100, 0x1112 + 1) and v_part in range(0x1161, 0x1175 + 1) and t_part in range(0x11a8, 0x11c2 + 1)): raise ValueError(fmt_str_invalid_sequence.format(jamo)) l_index = l_part - L_BASE v_index = v_part - V_BASE t_index = t_part - T_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index + t_index elif len(jamo) == 2: if jamo[0] in range(0x1100, 0x1112 + 1) and jamo[1] in range(0x1161, 0x1175 + 1): l_part, v_part = jamo l_index = l_part - L_BASE v_index = v_part - V_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index elif _get_hangul_syllable_type(jamo[0]) == "LV" and jamo[1] in range(0x11a8, 0x11c2 + 1): lv_part, t_part = jamo t_index = t_part - T_BASE return lv_part + t_index else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) else: raise ValueError(fmt_str_invalid_sequence.format(jamo))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_hangul_syllable_name(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information. :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert :return: String representing its syllable name as transformed according to naming rule NR1. """
if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True) result = '' for j in jamo: if j is not None: result += _get_jamo_short_name(j) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate_client_callers(spec, timeout, error_callback, local, app): """Return a dict mapping method names to anonymous functions that will call the server's endpoint of the corresponding name as described in the api defined by the swagger dict and bravado spec"""
callers_dict = {} def mycallback(endpoint): if not endpoint.handler_client: return callers_dict[endpoint.handler_client] = _generate_client_caller(spec, endpoint, timeout, error_callback, local, app) spec.call_on_each_endpoint(mycallback) return callers_dict
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def calculate_new_length(gene_split, gene_results, hit): ''' Function for calcualting new length if the gene is split on several contigs ''' # Looping over splitted hits and calculate new length first = 1 for split in gene_split[hit['sbjct_header']]: new_start = int(gene_results[split]['sbjct_start']) new_end = int(gene_results[split]['sbjct_end']) # Get the frist HSP if first == 1: new_length = int(gene_results[split]['HSP_length']) old_start = new_start old_end = new_end first = 0 continue if new_start < old_start: new_length = new_length + (old_start - new_start) old_start = new_start if new_end > old_end: new_length = new_length + (new_end - old_end) old_end = new_end return(new_length)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stream_to_packet(data): """ Chop a stream of data into MODBUS packets. :param data: stream of data :returns: a tuple of the data that is a packet with the remaining data, or ``None`` """
if len(data) < 6: return None # unpack the length pktlen = struct.unpack(">H", data[4:6])[0] + 6 if (len(data) < pktlen): return None return (data[:pktlen], data[pktlen:])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_primitive(value, convert_instances=False, convert_datetime=True, level=0, max_depth=3): """Convert a complex object into primitives. Handy for JSON serialization. We can optionally handle instances, but since this is a recursive function, we could have cyclical data structures. To handle cyclical data structures we could track the actual objects visited in a set, but not all objects are hashable. Instead we just track the depth of the object inspections and don't go too deep. """
# handle obvious types first - order of basic types determined by running # full tests on nova project, resulting in the following counts: # 572754 <type 'NoneType'> # 460353 <type 'int'> # 379632 <type 'unicode'> # 274610 <type 'str'> # 199918 <type 'dict'> # 114200 <type 'datetime.datetime'> # 51817 <type 'bool'> # 26164 <type 'list'> # 6491 <type 'float'> # 283 <type 'tuple'> # 19 <type 'long'> if isinstance(value, _simple_types): return value if isinstance(value, datetime.datetime): if convert_datetime: return timeutils.strtime(value) else: return value # value of itertools.count doesn't get caught by nasty_type_tests # and results in infinite loop when list(value) is called. if type(value) == itertools.count: return six.text_type(value) # FIXME(vish): Workaround for LP bug 852095. Without this workaround, # tests that raise an exception in a mocked method that # has a @wrap_exception with a notifier will fail. If # we up the dependency to 0.5.4 (when it is released) we # can remove this workaround. if getattr(value, '__module__', None) == 'mox': return 'mock' if level > max_depth: return '?' # The try block may not be necessary after the class check above, # but just in case ... try: recursive = functools.partial(to_primitive, convert_instances=convert_instances, convert_datetime=convert_datetime, level=level, max_depth=max_depth) if isinstance(value, dict): return dict((k, recursive(v)) for k, v in six.iteritems(value)) elif isinstance(value, (list, tuple)): return [recursive(lv) for lv in value] # It's not clear why xmlrpclib created their own DateTime type, but # for our purposes, make it a datetime type which is explicitly # handled if xmlrpclib and isinstance(value, xmlrpclib.DateTime): value = datetime.datetime(*tuple(value.timetuple())[:6]) if convert_datetime and isinstance(value, datetime.datetime): return timeutils.strtime(value) elif isinstance(value, gettextutils.Message): return value.data elif hasattr(value, 'iteritems'): return recursive(dict(value.iteritems()), level=level + 1) elif hasattr(value, '__iter__'): return recursive(list(value)) elif convert_instances and hasattr(value, '__dict__'): # Likely an instance of something. Watch for cycles. # Ignore class member vars. return recursive(value.__dict__, level=level + 1) elif netaddr and isinstance(value, netaddr.IPAddress): return six.text_type(value) else: if any(test(value) for test in _nasty_type_tests): return six.text_type(value) return value except TypeError: # Class objects are tricky since they may define something like # __iter__ defined but it isn't callable as list(). return six.text_type(value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def stress(syllabified_simplex_word): '''Assign primary and secondary stress to 'syllabified_simplex_word'.''' syllables = syllabified_simplex_word.split('.') stressed = '\'' + syllables[0] # primary stress try: n = 0 medial = syllables[1:-1] for i, syll in enumerate(medial): if (i + n) % 2 == 0: stressed += '.' + syll else: try: if is_light(syll) and is_heavy(medial[i + 1]): stressed += '.' + syll n += 1 continue except IndexError: pass # secondary stress stressed += '.`' + syll except IndexError: pass if len(syllables) > 1: stressed += '.' + syllables[-1] return stressed
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def confindr_reporter(self, analysistype='confindr'): """ Creates a final report of all the ConFindr results """
# Initialise the data strings data = 'Strain,Genus,NumContamSNVs,ContamStatus,PercentContam,PercentContamSTD\n' with open(os.path.join(self.reportpath, analysistype + '.csv'), 'w') as report: # Iterate through all the results for sample in self.runmetadata.samples: data += '{str},{genus},{numcontamsnv},{status},{pc},{pcs}\n'.format( str=sample.name, genus=sample.confindr.genus, numcontamsnv=sample.confindr.num_contaminated_snvs, status=sample.confindr.contam_status, pc=sample.confindr.percent_contam, pcs=sample.confindr.percent_contam_std ) # Write the string to the report report.write(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def methodreporter(self): """ Create final reports collating results from all the individual iterations through the method pipeline """
# Ensure that the analyses are set to complete self.analysescomplete = True # Reset the report path to original value self.reportpath = os.path.join(self.path, 'reports') # Clear the runmetadata - it will be populated with all the metadata from completemetadata self.runmetadata = MetadataObject() self.runmetadata.samples = list() # As the samples were entered into self.completemetadata depending on when they passed the quality threshold, # this list is not ordered numerically/alphabetically like the original runmetadata. Reset the order. for strain in self.samples: for sample in self.completemetadata: if sample.name == strain: # Append the sample to the ordered list of objects self.runmetadata.samples.append(sample) # Create the reports self.reporter() self.genusspecific() self.sixteensreporter() self.gdcsreporter() self.confindr_reporter()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(self): """ Run the methods required to create the genesippr report summary image """
self.dataframe_setup() self.figure_populate(self.outputfolder, self.image_report, self.header_list, self.samples, 'genesippr', 'report', fail=self.fail)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dir_exists(location, use_sudo=False): """Tells if there is a remote directory at the given location."""
with settings(hide('running', 'stdout', 'stderr'), warn_only=True): if use_sudo: # convert return code 0 to True return not bool(sudo('test -d %s' % (location)).return_code) else: return not bool(run('test -d %s' % (location)).return_code)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def disable_requiretty_on_sudoers(log=False): """ allow sudo calls through ssh without a tty """
if log: bookshelf2.logging_helpers.log_green( 'disabling requiretty on sudo calls') comment_line('/etc/sudoers', '^Defaults.*requiretty', use_sudo=True) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def linux_distribution(): """ returns the linux distribution in lower case """
with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True, capture=True): data = os_release() return(data['ID'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def systemd(service, start=True, enabled=True, unmask=False, restart=False): """ manipulates systemd services """
with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True, capture=True): if restart: sudo('systemctl restart %s' % service) else: if start: sudo('systemctl start %s' % service) else: sudo('systemctl stop %s' % service) if enabled: sudo('systemctl enable %s' % service) else: sudo('systemctl disable %s' % service) if unmask: sudo('systemctl unmask %s' % service)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def install_os_updates(distribution, force=False): """ installs OS updates """
if ('centos' in distribution or 'rhel' in distribution or 'redhat' in distribution): bookshelf2.logging_helpers.log_green('installing OS updates') sudo("yum -y --quiet clean all") sudo("yum group mark convert") sudo("yum -y --quiet update") if ('ubuntu' in distribution or 'debian' in distribution): with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=False, capture=True): sudo("DEBIAN_FRONTEND=noninteractive apt-get update") if force: sudo("sudo DEBIAN_FRONTEND=noninteractive apt-get -y -o " "Dpkg::Options::='--force-confdef' " "-o Dpkg::Options::='--force-confold' upgrade --force-yes") else: sudo("sudo DEBIAN_FRONTEND=noninteractive apt-get -y -o " "Dpkg::Options::='--force-confdef' -o " "Dpkg::Options::='--force-confold' upgrade")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def put(self): """Updates this task whitelist on the saltant server. Returns: :class:`saltant.models.task_whitelist.TaskWhitelist`: A task whitelist model instance representing the task whitelist just updated. """
return self.manager.put( id=self.id, name=self.name, description=self.description, whitelisted_container_task_types=( self.whitelisted_container_task_types ), whitelisted_executable_task_types=( self.whitelisted_executable_task_types ), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create( self, name, description="", whitelisted_container_task_types=None, whitelisted_executable_task_types=None, ): """Create a task whitelist. Args: name (str): The name of the task whitelist. description (str, optional): A description of the task whitelist. whitelisted_container_task_types (list, optional): A list of whitelisted container task type IDs. whitelisted_executable_task_types (list, optional): A list of whitelisted executable task type IDs. Returns: :class:`saltant.models.task_whitelist.TaskWhitelist`: A task whitelist model instance representing the task whitelist just created. """
# Translate whitelists None to [] if necessary if whitelisted_container_task_types is None: whitelisted_container_task_types = [] if whitelisted_executable_task_types is None: whitelisted_executable_task_types = [] # Create the object request_url = self._client.base_api_url + self.list_url data_to_post = { "name": name, "description": description, "whitelisted_container_task_types": whitelisted_container_task_types, "whitelisted_executable_task_types": whitelisted_executable_task_types, } response = self._client.session.post(request_url, data=data_to_post) # Validate that the request was successful self.validate_request_success( response_text=response.text, request_url=request_url, status_code=response.status_code, expected_status_code=HTTP_201_CREATED, ) # Return a model instance representing the task instance return self.response_data_to_model_instance(response.json())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def patch( self, id, name=None, description=None, whitelisted_container_task_types=None, whitelisted_executable_task_types=None, ): """Partially updates a task whitelist on the saltant server. Args: id (int): The ID of the task whitelist. name (str, optional): The name of the task whitelist. description (str, optional): A description of the task whitelist. whitelisted_container_task_types (list, optional): A list of whitelisted container task type IDs. whitelisted_executable_task_types (list, optional): A list of whitelisted executable task type IDs. Returns: :class:`saltant.models.task_whitelist.TaskWhitelist`: A task whitelist model instance representing the task whitelist just updated. """
# Update the object request_url = self._client.base_api_url + self.detail_url.format(id=id) data_to_patch = {} if name is not None: data_to_patch["name"] = name if description is not None: data_to_patch["description"] = description if whitelisted_container_task_types is not None: data_to_patch[ "whitelisted_container_task_types" ] = whitelisted_container_task_types if whitelisted_executable_task_types is not None: data_to_patch[ "whitelisted_executable_task_types" ] = whitelisted_executable_task_types response = self._client.session.patch(request_url, data=data_to_patch) # Validate that the request was successful self.validate_request_success( response_text=response.text, request_url=request_url, status_code=response.status_code, expected_status_code=HTTP_200_OK, ) # Return a model instance representing the task instance return self.response_data_to_model_instance(response.json())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def enable_logging( level='WARNING' ): """Enable sending logs to stderr. Useful for shell sessions. level Logging threshold, as defined in the logging module of the Python standard library. Defaults to 'WARNING'. """
log = logging.getLogger( 'mrcrowbar' ) log.setLevel( level ) out = logging.StreamHandler() out.setLevel( level ) form = logging.Formatter( '[%(levelname)s] %(name)s - %(message)s' ) out.setFormatter( form ) log.addHandler( out )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_all_iter( source, substring, start=None, end=None, overlap=False ): """Iterate through every location a substring can be found in a source string. source The source string to search. start Start offset to read from (default: start) end End offset to stop reading at (default: end) overlap Whether to return overlapping matches (default: false) """
data = source base = 0 if end is not None: data = data[:end] if start is not None: data = data[start:] base = start pointer = 0 increment = 1 if overlap else (len( substring ) or 1) while True: pointer = data.find( substring, pointer ) if pointer == -1: return yield base+pointer pointer += increment
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_all( source, substring, start=None, end=None, overlap=False ): """Return every location a substring can be found in a source string. source The source string to search. start Start offset to read from (default: start) end End offset to stop reading at (default: end) overlap Whether to return overlapping matches (default: false) """
return [x for x in find_all_iter( source, substring, start, end, overlap )]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pixdump_iter( source, start=None, end=None, length=None, width=64, height=None, palette=None ): """Return the contents of a byte string as a 256 colour image. source The byte string to print. start Start offset to read from (default: start) end End offset to stop reading at (default: end) length Length to read in (optional replacement for end) width Width of image to render in pixels (default: 64) height Height of image to render in pixels (default: auto) palette List of Colours to use (default: test palette) """
assert is_bytes( source ) if not palette: palette = colour.TEST_PALETTE start = 0 if (start is None) else start if (end is not None) and (length is not None): raise ValueError( 'Can\'t define both an end and a length!' ) elif (length is not None): end = start+length elif (end is not None): pass else: end = len( source ) start = max( start, 0 ) end = min( end, len( source ) ) if len( source ) == 0 or (start == end == 0): return iter(()) if height is None: height = math.ceil( (end-start)/width ) def data_fetch( x_pos, y_pos, frame ): index = y_pos*width + x_pos + start if index >= end: return (0, 0, 0, 0) return palette[source[index]] return ansi.format_image_iter( data_fetch, width=width, height=height )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pixdump( source, start=None, end=None, length=None, width=64, height=None, palette=None ): """Print the contents of a byte string as a 256 colour image. source The byte string to print. start Start offset to read from (default: start) end End offset to stop reading at (default: end) length Length to read in (optional replacement for end) width Width of image to render in pixels (default: 64) height Height of image to render in pixels (default: auto) palette List of Colours to use (default: test palette) """
for line in pixdump_iter( source, start, end, length, width, height, palette ): print( line )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def put_bits( self, value, count ): """Push bits into the target. value Integer containing bits to push, ordered from least-significant bit to most-significant bit. count Number of bits to push to the target. """
for _ in range( count ): # bits are retrieved from the source LSB first bit = (value & 1) value >>= 1 # however, bits are put into the result based on the rule if self.bits_reverse: if self.insert_at_msb: self.current_bits |= (bit << (self.bits_remaining-1)) else: self.current_bits <<= 1 self.current_bits |= bit else: if self.insert_at_msb: self.current_bits >>= 1 self.current_bits |= (bit << 7) else: self.current_bits |= (bit << (8-self.bits_remaining)) self.bits_remaining -= 1 if self.bits_remaining <= 0: self.output.append( self.current_bits ) self.current_bits = 0 self.bits_remaining = 8
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_buffer( self ): """Return a byte string containing the target as currently written."""
last_byte = self.current_bits if (self.bits_remaining < 8) else None result = self.output if last_byte is not None: result = bytearray( result ) result.append( last_byte ) if self.bytes_reverse: return bytes( reversed( result ) ) else: return bytes( result )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def convert_case(name): """Converts name from CamelCase to snake_case"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def table_name(self): """Pluralises the class_name using utterly simple algo and returns as table_name"""
if not self.class_name: raise ValueError else: tbl_name = ModelCompiler.convert_case(self.class_name) last_letter = tbl_name[-1] if last_letter in ("y",): return "{}ies".format(tbl_name[:-1]) elif last_letter in ("s",): return "{}es".format(tbl_name) else: return "{}s".format(tbl_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def types(self): """All the unique types found in user supplied model"""
res = [] for column in self.column_definitions: tmp = column.get('type', None) res.append(ModelCompiler.get_column_type(tmp)) if tmp else False res = list(set(res)) return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def basic_types(self): """Returns non-postgres types referenced in user supplied model """
if not self.foreign_key_definitions: return self.standard_types else: tmp = self.standard_types tmp.append('ForeignKey') return tmp
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def primary_keys(self): """Returns the primary keys referenced in user supplied model"""
res = [] for column in self.column_definitions: if 'primary_key' in column.keys(): tmp = column.get('primary_key', None) res.append(column['name']) if tmp else False return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compiled_columns(self): """Returns compiled column definitions"""
def get_column_args(column): tmp = [] for arg_name, arg_val in column.items(): if arg_name not in ('name', 'type'): if arg_name in ('server_default', 'server_onupdate'): arg_val = '"{}"'.format(arg_val) tmp.append(ALCHEMY_TEMPLATES.column_arg.safe_substitute(arg_name=arg_name, arg_val=arg_val)) return ", ".join(tmp) res = [] for column in self.column_definitions: column_args = get_column_args(column) column_type, type_params = ModelCompiler.get_col_type_info(column.get('type')) column_name = column.get('name') if column_type in MUTABLE_DICT_TYPES: column_type = ALCHEMY_TEMPLATES.mutable_dict_type.safe_substitute(type=column_type, type_params=type_params) type_params = '' res.append( ALCHEMY_TEMPLATES.column_definition.safe_substitute(column_name=column_name, column_type=column_type, column_args=column_args, type_params=type_params)) join_string = "\n" + self.tab return join_string.join(res)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compiled_foreign_keys(self): """Returns compiled foreign key definitions"""
def get_column_args(column): tmp = [] for arg_name, arg_val in column.items(): if arg_name not in ('name', 'type', 'reference'): if arg_name in ('server_default', 'server_onupdate'): arg_val = '"{}"'.format(arg_val) tmp.append(ALCHEMY_TEMPLATES.column_arg.safe_substitute(arg_name=arg_name, arg_val=arg_val)) return ", ".join(tmp) def get_fkey_args(column): table = column['reference']['table'] column = column['reference']['column'] return ALCHEMY_TEMPLATES.foreign_key_arg.safe_substitute(reference_table=table, reference_column=column) res = [] for column in self.foreign_key_definitions: column_args = get_column_args(column) column_type, type_params = ModelCompiler.get_col_type_info(column.get('type')) column_name = column.get('name') reference = get_fkey_args(column) if column_type in MUTABLE_DICT_TYPES: column_type = ALCHEMY_TEMPLATES.mutable_dict_type.safe_substitute(type=column_type, type_params=type_params) type_params = '' res.append( ALCHEMY_TEMPLATES.foreign_key.safe_substitute(column_name=column_name, column_type=column_type, column_args=column_args, foreign_key_args=reference, type_params=type_params)) join_string = "\n" + self.tab return join_string.join(res)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compiled_relationships(self): """Returns compiled relationship definitions"""
def get_column_args(column): tmp = [] for arg_name, arg_val in column.items(): if arg_name not in ('name', 'type', 'reference', 'class'): if arg_name in ('back_populates', ): arg_val = "'{}'".format(arg_val) tmp.append(ALCHEMY_TEMPLATES.column_arg.safe_substitute(arg_name=arg_name, arg_val=arg_val)) return ", ".join(tmp) res = [] for column in self.relationship_definitions: column_args = get_column_args(column) column_name = column.get('name') cls_name = column.get("class") res.append( ALCHEMY_TEMPLATES.relationship.safe_substitute(column_name=column_name, column_args=column_args, class_name=cls_name)) join_string = "\n" + self.tab return join_string.join(res)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compiled_init_func(self): """Returns compiled init function"""
def get_column_assignment(column_name): return ALCHEMY_TEMPLATES.col_assignment.safe_substitute(col_name=column_name) def get_compiled_args(arg_name): return ALCHEMY_TEMPLATES.func_arg.safe_substitute(arg_name=arg_name) join_string = "\n" + self.tab + self.tab column_assignments = join_string.join([get_column_assignment(n) for n in self.columns]) init_args = ", ".join(get_compiled_args(n) for n in self.columns) return ALCHEMY_TEMPLATES.init_function.safe_substitute(col_assignments=column_assignments, init_args=init_args)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compiled_update_func(self): """Returns compiled update function"""
def get_not_none_col_assignment(column_name): return ALCHEMY_TEMPLATES.not_none_col_assignment.safe_substitute(col_name=column_name) def get_compiled_args(arg_name): return ALCHEMY_TEMPLATES.func_arg.safe_substitute(arg_name=arg_name) join_string = "\n" + self.tab + self.tab columns = [n for n in self.columns if n not in self.primary_keys] not_none_col_assignments = join_string.join([get_not_none_col_assignment(n) for n in columns]) update_args = ", ".join(get_compiled_args(n) for n in columns) return ALCHEMY_TEMPLATES.update_function.safe_substitute(not_none_col_assignments=not_none_col_assignments, update_args=update_args, class_name=self.class_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compiled_hash_func(self): """Returns compiled hash function based on hash of stringified primary_keys. This isn't the most efficient way"""
def get_primary_key_str(pkey_name): return "str(self.{})".format(pkey_name) hash_str = "+ ".join([get_primary_key_str(n) for n in self.primary_keys]) return ALCHEMY_TEMPLATES.hash_function.safe_substitute(concated_primary_key_strs=hash_str)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compiled_model(self): """Returns compile ORM class for the user supplied model"""
return ALCHEMY_TEMPLATES.model.safe_substitute(class_name=self.class_name, table_name=self.table_name, column_definitions=self.compiled_columns, init_function=self.compiled_init_func, update_function=self.compiled_update_func, hash_function=self.compiled_hash_func, eq_function=self.compiled_eq_func, neq_function=self.compiled_neq_func, str_function=self.compiled_str_func, unicode_function=self.compiled_unicode_func, repr_function=self.compiled_repr_func, types=", ".join(self.basic_types), username=self.username, foreign_keys=self.compiled_foreign_keys, relationships=self.compiled_relationships, named_imports=self.compiled_named_imports, orm_imports=self.compiled_orm_imports, get_proxy_cls_function=self.compiled_proxy_cls_func, add_function=ALCHEMY_TEMPLATES.add_function.template, delete_function=ALCHEMY_TEMPLATES.delete_function.template, to_dict_function=ALCHEMY_TEMPLATES.to_dict_function.template, to_proxy_function=ALCHEMY_TEMPLATES.to_proxy_function.template, from_proxy_function=ALCHEMY_TEMPLATES.from_proxy_function.template)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def put(self): """Updates this task queue on the saltant server. Returns: :class:`saltant.models.task_queue.TaskQueue`: A task queue model instance representing the task queue just updated. """
return self.manager.put( id=self.id, name=self.name, description=self.description, private=self.private, runs_executable_tasks=self.runs_executable_tasks, runs_docker_container_tasks=self.runs_docker_container_tasks, runs_singularity_container_tasks=self.runs_singularity_container_tasks, active=self.active, whitelists=self.whitelists, )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, id=None, name=None): """Get a task queue. Either the id xor the name of the task type must be specified. Args: id (int, optional): The id of the task type to get. name (str, optional): The name of the task type to get. Returns: :class:`saltant.models.task_queue.TaskQueue`: A task queue model instance representing the task queue requested. Raises: ValueError: Neither id nor name were set *or* both id and name were set. """
# Validate arguments - use an xor if not (id is None) ^ (name is None): raise ValueError("Either id or name must be set (but not both!)") # If it's just ID provided, call the parent function if id is not None: return super(TaskQueueManager, self).get(id=id) # Try getting the task queue by name return self.list(filters={"name": name})[0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create( self, name, description="", private=False, runs_executable_tasks=True, runs_docker_container_tasks=True, runs_singularity_container_tasks=True, active=True, whitelists=None, ): """Create a task queue. Args: name (str): The name of the task queue. description (str, optional): A description of the task queue. private (bool, optional): A boolean specifying whether the queue is exclusive to its creator. Defaults to False. runs_executable_tasks (bool, optional): A Boolean specifying whether the queue runs executable tasks. Defaults to True. runs_docker_container_tasks (bool, optional): A Boolean specifying whether the queue runs container tasks that run in Docker containers. Defaults to True. runs_singularity_container_tasks (bool, optional): A Boolean specifying whether the queue runs container tasks that run in Singularity containers. Defaults to True. active (bool, optional): A boolean specifying whether the queue is active. Default to True. whitelists (list, optional): A list of task whitelist IDs. Defaults to None (which gets translated to []). Returns: :class:`saltant.models.task_queue.TaskQueue`: A task queue model instance representing the task queue just created. """
# Translate whitelists None to [] if necessary if whitelists is None: whitelists = [] # Create the object request_url = self._client.base_api_url + self.list_url data_to_post = { "name": name, "description": description, "private": private, "runs_executable_tasks": runs_executable_tasks, "runs_docker_container_tasks": runs_docker_container_tasks, "runs_singularity_container_tasks": runs_singularity_container_tasks, "active": active, "whitelists": whitelists, } response = self._client.session.post(request_url, data=data_to_post) # Validate that the request was successful self.validate_request_success( response_text=response.text, request_url=request_url, status_code=response.status_code, expected_status_code=HTTP_201_CREATED, ) # Return a model instance representing the task instance return self.response_data_to_model_instance(response.json())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def patch( self, id, name=None, description=None, private=None, runs_executable_tasks=None, runs_docker_container_tasks=None, runs_singularity_container_tasks=None, active=None, whitelists=None, ): """Partially updates a task queue on the saltant server. Args: id (int): The ID of the task queue. name (str, optional): The name of the task queue. description (str, optional): The description of the task queue. private (bool, optional): A Booleon signalling whether the queue can only be used by its associated user. runs_executable_tasks (bool, optional): A Boolean specifying whether the queue runs executable tasks. runs_docker_container_tasks (bool, optional): A Boolean specifying whether the queue runs container tasks that run in Docker containers. runs_singularity_container_tasks (bool, optional): A Boolean specifying whether the queue runs container tasks that run in Singularity containers. active (bool, optional): A Booleon signalling whether the queue is active. whitelists (list, optional): A list of task whitelist IDs. Returns: :class:`saltant.models.task_queue.TaskQueue`: A task queue model instance representing the task queue just updated. """
# Update the object request_url = self._client.base_api_url + self.detail_url.format(id=id) data_to_patch = {} if name is not None: data_to_patch["name"] = name if description is not None: data_to_patch["description"] = description if private is not None: data_to_patch["private"] = private if runs_executable_tasks is not None: data_to_patch["runs_executable_tasks"] = runs_executable_tasks if runs_docker_container_tasks is not None: data_to_patch[ "runs_docker_container_tasks" ] = runs_docker_container_tasks if runs_singularity_container_tasks is not None: data_to_patch[ "runs_singularity_container_tasks" ] = runs_singularity_container_tasks if active is not None: data_to_patch["active"] = active if whitelists is not None: data_to_patch["whitelists"] = whitelists response = self._client.session.patch(request_url, data=data_to_patch) # Validate that the request was successful self.validate_request_success( response_text=response.text, request_url=request_url, status_code=response.status_code, expected_status_code=HTTP_200_OK, ) # Return a model instance representing the task instance return self.response_data_to_model_instance(response.json())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def put( self, id, name, description, private, runs_executable_tasks, runs_docker_container_tasks, runs_singularity_container_tasks, active, whitelists, ): """Updates a task queue on the saltant server. Args: id (int): The ID of the task queue. name (str): The name of the task queue. description (str): The description of the task queue. private (bool): A Booleon signalling whether the queue can only be used by its associated user. runs_executable_tasks (bool): A Boolean specifying whether the queue runs executable tasks. runs_docker_container_tasks (bool): A Boolean specifying whether the queue runs container tasks that run in Docker containers. runs_singularity_container_tasks (bool): A Boolean specifying whether the queue runs container tasks that run in Singularity containers. active (bool): A Booleon signalling whether the queue is active. whitelists (list): A list of task whitelist IDs. Returns: :class:`saltant.models.task_queue.TaskQueue`: A task queue model instance representing the task queue just updated. """
# Update the object request_url = self._client.base_api_url + self.detail_url.format(id=id) data_to_put = { "name": name, "description": description, "private": private, "runs_executable_tasks": runs_executable_tasks, "runs_docker_container_tasks": runs_docker_container_tasks, "runs_singularity_container_tasks": runs_singularity_container_tasks, "active": active, "whitelists": whitelists, } response = self._client.session.put(request_url, data=data_to_put) # Validate that the request was successful self.validate_request_success( response_text=response.text, request_url=request_url, status_code=response.status_code, expected_status_code=HTTP_200_OK, ) # Return a model instance representing the task instance return self.response_data_to_model_instance(response.json())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(self): """ Run the required methods in the appropriate order """
self.targets() self.bait(k=49) self.reversebait(maskmiddle='t', k=19) self.subsample_reads()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def targets(self): """ Using the data from the BLAST analyses, set the targets folder, and create the 'mapping file'. This is the genera-specific FASTA file that will be used for all the reference mapping; it replaces the 'bait file' in the code """
logging.info('Performing analysis with {} targets folder'.format(self.analysistype)) for sample in self.runmetadata: if sample.general.bestassemblyfile != 'NA': sample[self.analysistype].targetpath = \ os.path.join(self.targetpath, 'genera', sample[self.analysistype].genus, '') # There is a relatively strict databasing scheme necessary for the custom targets. Eventually, # there will be a helper script to combine individual files into a properly formatted combined file try: sample[self.analysistype].mappingfile = glob('{}*.fa' .format(sample[self.analysistype].targetpath))[0] # If the fasta file is missing, raise a custom error except IndexError as e: # noinspection PyPropertyAccess e.args = ['Cannot find the combined fasta file in {}. Please note that the file must have a ' '.fasta extension'.format(sample[self.analysistype].targetpath)] if os.path.isdir(sample[self.analysistype].targetpath): raise else: sample.general.bestassemblyfile = 'NA'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def subsample(self): """ Subsample 1000 reads from the baited files """
# Create the threads for the analysis logging.info('Subsampling FASTQ reads') for _ in range(self.cpus): threads = Thread(target=self.subsamplethreads, args=()) threads.setDaemon(True) threads.start() with progressbar(self.runmetadata.samples) as bar: for sample in bar: if sample.general.bestassemblyfile != 'NA': # Set the name of the subsampled FASTQ file sample[self.analysistype].subsampledfastq = \ os.path.splitext(sample[self.analysistype].baitedfastq)[0] + '_subsampled.fastq' # Set the system call sample[self.analysistype].seqtkcall = 'reformat.sh in={} out={} samplereadstarget=1000'\ .format(sample[self.analysistype].baitedfastq, sample[self.analysistype].subsampledfastq) # Add the sample to the queue self.samplequeue.put(sample) self.samplequeue.join()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fasta(self): """ Convert the subsampled reads to FASTA format using reformat.sh """
logging.info('Converting FASTQ files to FASTA format') # Create the threads for the analysis for _ in range(self.cpus): threads = Thread(target=self.fastathreads, args=()) threads.setDaemon(True) threads.start() with progressbar(self.runmetadata.samples) as bar: for sample in bar: if sample.general.bestassemblyfile != 'NA': # Set the name as the FASTA file - the same as the FASTQ, but with .fa file extension sample[self.analysistype].fasta = \ os.path.splitext(sample[self.analysistype].subsampledfastq)[0] + '.fa' # Set the system call sample[self.analysistype].reformatcall = 'reformat.sh in={fastq} out={fasta}'\ .format(fastq=sample[self.analysistype].subsampledfastq, fasta=sample[self.analysistype].fasta) # Add the sample to the queue self.fastaqueue.put(sample) self.fastaqueue.join()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def blast(self): """ Run BLAST analyses of the subsampled FASTQ reads against the NCBI 16S reference database """
logging.info('BLASTing FASTA files against {} database'.format(self.analysistype)) for _ in range(self.cpus): threads = Thread(target=self.blastthreads, args=()) threads.setDaemon(True) threads.start() with progressbar(self.runmetadata.samples) as bar: for sample in bar: if sample.general.bestassemblyfile != 'NA': # Set the name of the BLAST report sample[self.analysistype].blastreport = os.path.join( sample[self.analysistype].outputdir, '{}_{}_blastresults.csv'.format(sample.name, self.analysistype)) # Use the NCBI BLASTn command line wrapper module from BioPython to set the parameters of the search blastn = NcbiblastnCommandline(query=sample[self.analysistype].fasta, db=os.path.splitext(sample[self.analysistype].baitfile)[0], max_target_seqs=1, num_threads=self.threads, outfmt="'6 qseqid sseqid positive mismatch gaps evalue " "bitscore slen length qstart qend qseq sstart send sseq'", out=sample[self.analysistype].blastreport) # Add a string of the command to the metadata object sample[self.analysistype].blastcall = str(blastn) # Add the object and the command to the BLAST queue self.blastqueue.put((sample, blastn)) self.blastqueue.join()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def blastparse(self): """ Parse the blast results, and store necessary data in dictionaries in sample object """
logging.info('Parsing BLAST results') # Load the NCBI 16S reference database as a dictionary for sample in self.runmetadata.samples: if sample.general.bestassemblyfile != 'NA': # Load the NCBI 16S reference database as a dictionary dbrecords = SeqIO.to_dict(SeqIO.parse(sample[self.analysistype].baitfile, 'fasta')) # Allow for no BLAST results if os.path.isfile(sample[self.analysistype].blastreport): # Initialise a dictionary to store the number of times a genus is the best hit sample[self.analysistype].frequency = dict() # Open the sequence profile file as a dictionary blastdict = DictReader(open(sample[self.analysistype].blastreport), fieldnames=self.fieldnames, dialect='excel-tab') recorddict = dict() for record in blastdict: # Create the subject id. It will look like this: gi|1018196593|ref|NR_136472.1| subject = record['subject_id'] # Extract the genus name. Use the subject id as a key in the dictionary of the reference db. # It will return the full record e.g. gi|1018196593|ref|NR_136472.1| Escherichia marmotae # strain HT073016 16S ribosomal RNA, partial sequence # This full description can be manipulated to extract the genus e.g. Escherichia genus = dbrecords[subject].description.split('|')[-1].split()[0] # Increment the number of times this genus was found, or initialise the dictionary with this # genus the first time it is seen try: sample[self.analysistype].frequency[genus] += 1 except KeyError: sample[self.analysistype].frequency[genus] = 1 try: recorddict[dbrecords[subject].description] += 1 except KeyError: recorddict[dbrecords[subject].description] = 1 # Sort the dictionary based on the number of times a genus is seen sample[self.analysistype].sortedgenera = sorted(sample[self.analysistype].frequency.items(), key=operator.itemgetter(1), reverse=True) try: # Extract the top result, and set it as the genus of the sample sample[self.analysistype].genus = sample[self.analysistype].sortedgenera[0][0] # Previous code relies on having the closest refseq genus, so set this as above # sample.general.closestrefseqgenus = sample[self.analysistype].genus except IndexError: # Populate attributes with 'NA' sample[self.analysistype].sortedgenera = 'NA' sample[self.analysistype].genus = 'NA' # sample.general.closestrefseqgenus = 'NA' else: # Populate attributes with 'NA' sample[self.analysistype].sortedgenera = 'NA' sample[self.analysistype].genus = 'NA' # sample.general.closestrefseqgenus = 'NA' else: # Populate attributes with 'NA' sample[self.analysistype].sortedgenera = 'NA' sample[self.analysistype].genus = 'NA'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def load_json(json_object): ''' Load json from file or file name ''' content = None if isinstance(json_object, str) and os.path.exists(json_object): with open_(json_object) as f: try: content = json.load(f) except Exception as e: debug.log("Warning: Content of '%s' file is not json."%f.name) elif hasattr(json_object, 'read'): try: content = json.load(json_object) except Exception as e: debug.log("Warning: Content of '%s' file is not json."%json_object.name) else: debug.log("%s\nWarning: Object type invalid!"%json_object) return content
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sort2groups(array, gpat=['_R1','_R2']): """ Sort an array of strings to groups by patterns """
groups = [REGroup(gp) for gp in gpat] unmatched = [] for item in array: matched = False for m in groups: if m.match(item): matched = True break if not matched: unmatched.append(item) return [sorted(m.list) for m in groups], sorted(unmatched)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sort_and_distribute(array, splits=2): """ Sort an array of strings to groups by alphabetically continuous distribution """
if not isinstance(array, (list,tuple)): raise TypeError("array must be a list") if not isinstance(splits, int): raise TypeError("splits must be an integer") remaining = sorted(array) if sys.version_info < (3, 0): myrange = xrange(splits) else: myrange = range(splits) groups = [[] for i in myrange] while len(remaining) > 0: for i in myrange: if len(remaining) > 0: groups[i].append(remaining.pop(0)) return groups
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def file_zipper(root_dir): """ This function will zip the files created in the runroot directory and subdirectories """
# FINDING AND ZIPPING UNZIPPED FILES for root, dirs, files in os.walk(root_dir, topdown=False): if root != "": if root[-1] != '/': root += '/' for current_file in files: filepath = "%s/%s"%(root, current_file) try: file_size = os.path.getsize(filepath) except Exception as e: file_size = 0 debug.log('Error: file_zipper failed to zip following file '+filepath, e) # Excluding small files, gzipped files and links if ( file_size > 50 and current_file[-3:] != ".gz" and not os.path.islink(filepath) ): if current_file[-4:] == ".zip": # Unzip file ec = Popen('unzip -qq "%s" -d %s > /dev/null 2>&1'%(filepath, root), shell=True).wait() if ec > 0: debug.log('Error: fileZipper failed to unzip following file %s'%filepath) continue else: ec = Popen('rm -f "%s" > /dev/null 2>&1'%(filepath), shell=True).wait() if ec > 0: debug.log('Error: fileZipper failed to delete the original zip file (%s)'%filepath) filepath = filepath[:-4] # Saving a gzipped version with open_(filepath, 'rb') as f, open_(filepath+".gz", 'wb', 9) as gz: gz.writelines(f) # Deleting old (non-zipped) file try: os.remove(filepath) except OSError as e: debug.log(("WARNING! The file %s could not be " "removed!\n%s")%(current_file, e))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def file_unzipper(directory): """ This function will unzip all files in the runroot directory and subdirectories """
debug.log("Unzipping directory (%s)..."%directory) #FINDING AND UNZIPPING ZIPPED FILES for root, dirs, files in os.walk(directory, topdown=False): if root != "": orig_dir = os.getcwd() os.chdir(directory) Popen('gunzip -q -f *.gz > /dev/null 2>&1', shell=True).wait() Popen('unzip -qq -o "*.zip" > /dev/null 2>&1', shell=True).wait() Popen('rm -f *.zip > /dev/null 2>&1', shell=True).wait() os.chdir(orig_dir)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def move_file(src, dst): """ this function will simply move the file from the source path to the dest path given as input """
# Sanity checkpoint src = re.sub('[^\w/\-\.\*]', '', src) dst = re.sub('[^\w/\-\.\*]', '', dst) if len(re.sub('[\W]', '', src)) < 5 or len(re.sub('[\W]', '', dst)) < 5: debug.log("Error: Moving file failed. Provided paths are invalid! src='%s' dst='%s'"%(src, dst)) else: # Check destination check = False if dst[-1] == '/': if os.path.exists(dst): check = True # Valid Dir else: debug.log("Error: Moving file failed. Destination directory does not exist (%s)"%(dst)) #DEBUG elif os.path.exists(dst): if os.path.isdir(dst): check = True # Valid Dir dst += '/' # Add missing slash else: debug.log("Error: Moving file failed. %s exists!"%dst) elif os.path.exists(os.path.dirname(dst)): check = True # Valid file path else: debug.log("Error: Moving file failed. %s is an invalid distination!"%dst) if check: # Check source files = glob.glob(src) if len(files) != 0: debug.log("Moving File(s)...", "Move from %s"%src, "to %s"%dst) for file_ in files: # Check if file contains invalid symbols: invalid_chars = re.findall('[^\w/\-\.\*]', os.path.basename(file_)) if invalid_chars: debug.graceful_exit(("Error: File %s contains invalid " "characters %s!" )%(os.path.basename(file_), invalid_chars)) continue # Check file exists if os.path.isfile(file_): debug.log("Moving file: %s"%file_) shutil.move(file_, dst) else: debug.log("Error: Moving file failed. %s is not a regular file!"%file_) else: debug.log("Error: Moving file failed. No files were found! (%s)"%src)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def log(self, *lst): """ Print list of strings to the predefined logfile if debug is set. and sets the caught_error message if an error is found """
self.print2file(self.logfile, self.debug, True, *lst) if 'Error' in '\n'.join([str(x) for x in lst]): self.caught_error = '\n'.join([str(x) for x in lst])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def log_no_newline(self, msg): """ print the message to the predefined log file without newline """
self.print2file(self.logfile, False, False, msg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def graceful_exit(self, msg): """ This function Tries to update the MSQL database before exiting. """
# Print stored errors to stderr if self.caught_error: self.print2file(self.stderr, False, False, self.caught_error) # Kill process with error message self.log(msg) sys.exit(1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def match(self, s): """ Matches the string to the stored regular expression, and stores all groups in mathches. Returns False on negative match. """
self.matches = self.re.search(s) return self.matches
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def reporter(self): """ Create the MASH report """
logging.info('Creating {} report'.format(self.analysistype)) make_path(self.reportpath) header = 'Strain,ReferenceGenus,ReferenceFile,ReferenceGenomeMashDistance,Pvalue,NumMatchingHashes\n' data = '' for sample in self.metadata: try: data += '{},{},{},{},{},{}\n'.format(sample.name, sample[self.analysistype].closestrefseqgenus, sample[self.analysistype].closestrefseq, sample[self.analysistype].mashdistance, sample[self.analysistype].pvalue, sample[self.analysistype].nummatches) except AttributeError: data += '{}\n'.format(sample.name) # Create the report file reportfile = os.path.join(self.reportpath, 'mash.csv') with open(reportfile, 'w') as report: report.write(header) report.write(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def edges(self): """ Return the edge characters of this node. """
edge_str = ctypes.create_string_buffer(MAX_CHARS) cgaddag.gdg_edges(self.gdg, self.node, edge_str) return [char for char in edge_str.value.decode("ascii")]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def letter_set(self): """ Return the letter set of this node. """
end_str = ctypes.create_string_buffer(MAX_CHARS) cgaddag.gdg_letter_set(self.gdg, self.node, end_str) return [char for char in end_str.value.decode("ascii")]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_end(self, char): """ Return `True` if this `char` is part of this node's letter set, `False` otherwise. """
char = char.lower() return bool(cgaddag.gdg_is_end(self.gdg, self.node, char.encode("ascii")))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def follow(self, chars): """ Traverse the GADDAG to the node at the end of the given characters. Args: chars: An string of characters to traverse in the GADDAG. Returns: The Node which is found by traversing the tree. """
chars = chars.lower() node = self.node for char in chars: node = cgaddag.gdg_follow_edge(self.gdg, node, char.encode("ascii")) if not node: raise KeyError(char) return Node(self.gdg, node)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read(alias_name, allow_none=False): """Get the raw docker link value. Get the raw environment variable for the docker link Args: alias_name: The environment variable name default: The default value if the link isn't available allow_none: If the return value can be `None` (i.e. optional) """
warnings.warn('Will be removed in v1.0', DeprecationWarning, stacklevel=2) return core.read('{0}_PORT'.format(alias_name), default=None, allow_none=allow_none)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def isset(alias_name): """Return a boolean if the docker link is set or not and is a valid looking docker link value. Args: alias_name: The link alias name """
warnings.warn('Will be removed in v1.0', DeprecationWarning, stacklevel=2) raw_value = read(alias_name, allow_none=True) if raw_value: if re.compile(r'.+://.+:\d+').match(raw_value): return True else: warnings.warn('"{0}_PORT={1}" does not look like a docker link.'.format(alias_name, raw_value), stacklevel=2) return False return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def protocol(alias_name, default=None, allow_none=False): """Get the protocol from the docker link alias or return the default. Args: alias_name: The docker link alias default: The default value if the link isn't available allow_none: If the return value can be `None` (i.e. optional) Examples: Assuming a Docker link was created with ``docker --link postgres:db`` and the resulting environment variable is ``DB_PORT=tcp://172.17.0.82:5432``. tcp """
warnings.warn('Will be removed in v1.0', DeprecationWarning, stacklevel=2) try: return _split_docker_link(alias_name)[0] except KeyError as err: if default or allow_none: return default else: raise err
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def port(alias_name, default=None, allow_none=False): """Get the port from the docker link alias or return the default. Args: alias_name: The docker link alias default: The default value if the link isn't available allow_none: If the return value can be `None` (i.e. optional) Examples: Assuming a Docker link was created with ``docker --link postgres:db`` and the resulting environment variable is ``DB_PORT=tcp://172.17.0.82:5432``. 5432 """
warnings.warn('Will be removed in v1.0', DeprecationWarning, stacklevel=2) try: return int(_split_docker_link(alias_name)[2]) except KeyError as err: if default or allow_none: return default else: raise err
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def attributer(self): """ Parses the 16S target files to link accession numbers stored in the .fai and metadata files to the genera stored in the target file """
from Bio import SeqIO import operator for sample in self.runmetadata.samples: # Load the records from the target file into a dictionary record_dict = SeqIO.to_dict(SeqIO.parse(sample[self.analysistype].baitfile, "fasta")) sample[self.analysistype].classification = set() sample[self.analysistype].genera = dict() # Add all the genera with hits into the set of genera for result in sample[self.analysistype].results: genus, species = record_dict[result].description.split('|')[-1].split()[:2] sample[self.analysistype].classification.add(genus) sample[self.analysistype].genera[result] = genus # Convert the set to a list for easier JSON serialisation sample[self.analysistype].classification = list(sample[self.analysistype].classification) # If there is a mixed sample, then further analyses will be complicated if len(sample[self.analysistype].classification) > 1: # print('multiple: ', sample.name, sample[self.analysistype].classification) sample.general.closestrefseqgenus = sample[self.analysistype].classification # sample.general.bestassemblyfile = 'NA' sample[self.analysistype].multiple = True else: sample[self.analysistype].multiple = False try: # Recreate the results dictionary with the percent identity as a float rather than a string sample[self.analysistype].intresults = \ {key: float(value) for key, value in sample[self.analysistype].results.items()} # Set the best hit to be the top entry from the sorted results sample[self.analysistype].besthit = sorted(sample[self.analysistype].intresults.items(), key=operator.itemgetter(1), reverse=True)[0] sample.general.closestrefseqgenus = sample[self.analysistype].classification[0] except IndexError: sample.general.bestassemblyfile = 'NA'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def spawn_server_api(api_name, app, api_spec, error_callback, decorator): """Take a a Flask app and a swagger file in YAML format describing a REST API, and populate the app with routes handling all the paths and methods declared in the swagger file. Also handle marshaling and unmarshaling between json and object instances representing the definitions from the swagger file. """
def mycallback(endpoint): handler_func = get_function(endpoint.handler_server) # Generate api endpoint around that handler handler_wrapper = _generate_handler_wrapper(api_name, api_spec, endpoint, handler_func, error_callback, decorator) # Bind handler to the API path log.info("Binding %s %s ==> %s" % (endpoint.method, endpoint.path, endpoint.handler_server)) endpoint_name = '_'.join([endpoint.method, endpoint.path]).replace('/', '_') app.add_url_rule(endpoint.path, endpoint_name, handler_wrapper, methods=[endpoint.method]) api_spec.call_on_each_endpoint(mycallback) # Add custom error handlers to the app add_error_handlers(app)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _responsify(api_spec, error, status): """Take a bravado-core model representing an error, and return a Flask Response with the given error code and error instance as body"""
result_json = api_spec.model_to_json(error) r = jsonify(result_json) r.status_code = status return r
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_escape( foreground=None, background=None, bold=False, faint=False, italic=False, underline=False, blink=False, inverted=False ): """Returns the ANSI escape sequence to set character formatting. foreground Foreground colour to use. Accepted types: None, int (xterm palette ID), tuple (RGB, RGBA), Colour background Background colour to use. Accepted types: None, int (xterm palette ID), tuple (RGB, RGBA), Colour bold Enable bold text (default: False) faint Enable faint text (default: False) italic Enable italic text (default: False) underline Enable underlined text (default: False) blink Enable blinky text (default: False) inverted Enable inverted text (default: False) """
fg_format = None if isinstance( foreground, int ): fg_format = ANSI_FORMAT_FOREGROUND_XTERM_CMD.format( foreground ) else: fg_rgba = colour.normalise_rgba( foreground ) if fg_rgba[3] != 0: fg_format = ANSI_FORMAT_FOREGROUND_CMD.format( *fg_rgba[:3] ) bg_format = None if isinstance( background, int ): bg_format = ANSI_FORMAT_BACKGROUND_XTERM_CMD.format( background ) else: bg_rgba = colour.normalise_rgba( background ) if bg_rgba[3] != 0: bg_format = ANSI_FORMAT_BACKGROUND_CMD.format( *bg_rgba[:3] ) colour_format = [] if fg_format is not None: colour_format.append( fg_format ) if bg_format is not None: colour_format.append( bg_format ) if bold: colour_format.append( ANSI_FORMAT_BOLD_CMD ) if faint: colour_format.append( ANSI_FORMAT_FAINT_CMD ) if italic: colour_format.append( ANSI_FORMAT_ITALIC_CMD ) if underline: colour_format.append( ANSI_FORMAT_UNDERLINE_CMD ) if blink: colour_format.append( ANSI_FORMAT_BLINK_CMD ) if inverted: colour_format.append( ANSI_FORMAT_INVERTED_CMD ) colour_format = ANSI_FORMAT_BASE.format( ';'.join( colour_format ) ) return colour_format
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_string( string, foreground=None, background=None, reset=True, bold=False, faint=False, italic=False, underline=False, blink=False, inverted=False ): """Returns a Unicode string formatted with an ANSI escape sequence. string String to format foreground Foreground colour to use. Accepted types: None, int (xterm palette ID), tuple (RGB, RGBA), Colour background Background colour to use. Accepted types: None, int (xterm palette ID), tuple (RGB, RGBA), Colour reset Reset the formatting at the end (default: True) bold Enable bold text (default: False) faint Enable faint text (default: False) italic Enable italic text (default: False) underline Enable underlined text (default: False) blink Enable blinky text (default: False) inverted Enable inverted text (default: False) """
colour_format = format_escape( foreground, background, bold, faint, italic, underline, blink, inverted ) reset_format = '' if not reset else ANSI_FORMAT_RESET return '{}{}{}'.format( colour_format, string, reset_format )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_pixels( top, bottom, reset=True, repeat=1 ): """Return the ANSI escape sequence to render two vertically-stacked pixels as a single monospace character. top Top colour to use. Accepted types: None, int (xterm palette ID), tuple (RGB, RGBA), Colour bottom Bottom colour to use. Accepted types: None, int (xterm palette ID), tuple (RGB, RGBA), Colour reset Reset the formatting at the end (default: True) repeat Number of horizontal pixels to render (default: 1) """
top_src = None if isinstance( top, int ): top_src = top else: top_rgba = colour.normalise_rgba( top ) if top_rgba[3] != 0: top_src = top_rgba bottom_src = None if isinstance( bottom, int ): bottom_src = bottom else: bottom_rgba = colour.normalise_rgba( bottom ) if bottom_rgba[3] != 0: bottom_src = bottom_rgba # short circuit for empty pixel if (top_src is None) and (bottom_src is None): return ' '*repeat string = '▀'*repeat; colour_format = [] if top_src == bottom_src: string = '█'*repeat elif (top_src is None) and (bottom_src is not None): string = '▄'*repeat if (top_src is None) and (bottom_src is not None): if isinstance( bottom_src, int ): colour_format.append( ANSI_FORMAT_FOREGROUND_XTERM_CMD.format( bottom_src ) ) else: colour_format.append( ANSI_FORMAT_FOREGROUND_CMD.format( *bottom_src[:3] ) ) else: if isinstance( top_src, int ): colour_format.append( ANSI_FORMAT_FOREGROUND_XTERM_CMD.format( top_src ) ) else: colour_format.append( ANSI_FORMAT_FOREGROUND_CMD.format( *top_src[:3] ) ) if top_src is not None and bottom_src is not None and top_src != bottom_src: if isinstance( top_src, int ): colour_format.append( ANSI_FORMAT_BACKGROUND_XTERM_CMD.format( bottom_src ) ) else: colour_format.append( ANSI_FORMAT_BACKGROUND_CMD.format( *bottom_src[:3] ) ) colour_format = ANSI_FORMAT_BASE.format( ';'.join( colour_format ) ) reset_format = '' if not reset else ANSI_FORMAT_RESET return '{}{}{}'.format( colour_format, string, reset_format )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_image_iter( data_fetch, x_start=0, y_start=0, width=32, height=32, frame=0, columns=1, downsample=1 ): """Return the ANSI escape sequence to render a bitmap image. data_fetch Function that takes three arguments (x position, y position, and frame) and returns a Colour corresponding to the pixel stored there, or Transparent if the requested pixel is out of bounds. x_start Offset from the left of the image data to render from. Defaults to 0. y_start Offset from the top of the image data to render from. Defaults to 0. width Width of the image data to render. Defaults to 32. height Height of the image data to render. Defaults to 32. frame Single frame number/object, or a list to render in sequence. Defaults to frame 0. columns Number of frames to render per line (useful for printing tilemaps!). Defaults to 1. downsample Shrink larger images by printing every nth pixel only. Defaults to 1. """
frames = [] try: frame_iter = iter( frame ) frames = [f for f in frame_iter] except TypeError: frames = [frame] rows = math.ceil( len( frames )/columns ) for r in range( rows ): for y in range( 0, height, 2*downsample ): result = [] for c in range( min( (len( frames )-r*columns), columns ) ): row = [] for x in range( 0, width, downsample ): fr = frames[r*columns + c] c1 = data_fetch( x_start+x, y_start+y, fr ) c2 = data_fetch( x_start+x, y_start+y+downsample, fr ) row.append( (c1, c2) ) prev_pixel = None pointer = 0 while pointer < len( row ): start = pointer pixel = row[pointer] while pointer < len( row ) and (row[pointer] == pixel): pointer += 1 result.append( format_pixels( pixel[0], pixel[1], repeat=pointer-start ) ) yield ''.join( result ) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_buffer_with_value( self, value, buffer, parent=None ): """Write a Python object into a byte array, using the field definition. value Input Python object to process. buffer Output byte array to encode value into. parent Parent block object where this Field is defined. Used for e.g. evaluating Refs. """
assert common.is_bytes( buffer ) self.validate( value, parent ) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_end_offset( self, value, parent=None, index=None ): """Return the end offset of the Field's data. Useful for chainloading. value Input Python object to process. parent Parent block object where this Field is defined. Used for e.g. evaluating Refs. index Index of the Python object to measure from. Used if the Field takes a list of objects. """
return self.get_start_offset( value, parent, index ) + self.get_size( value, parent, index )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def nonalpha_split(string): '''Split 'string' along any punctuation or whitespace.''' return re.findall(r'[%s]+|[^%s]+' % (A, A), string, flags=FLAGS)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def extract_words(string): '''Extract all alphabetic syllabified forms from 'string'.''' return re.findall(r'[%s]+[%s\.]*[%s]+' % (A, A, A), string, flags=FLAGS)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_threads(t=None, s=None): """Should define dummyThread class and dummySignal class"""
global THREAD, SIGNAL THREAD = t or dummyThread SIGNAL = s or dummySignal
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def thread_with_callback(on_error, on_done, requete_with_callback): """ Return a thread emiting `state_changed` between each sub-requests. :param on_error: callback str -> None :param on_done: callback object -> None :param requete_with_callback: Job to execute. monitor_callable -> None :return: Non started thread """
class C(THREAD): error = SIGNAL(str) done = SIGNAL(object) state_changed = SIGNAL(int, int) def __del__(self): self.wait() def run(self): try: r = requete_with_callback(self.state_changed.emit) except (ConnexionError, StructureError) as e: self.error.emit(str(e)) else: self.done.emit(r) th = C() th.error.connect(on_error) th.done.connect(on_done) return th
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_parser(parser: argparse.ArgumentParser) -> None: """Build a parser for CLI arguments and options."""
parser.add_argument( '--delimiter', help='a delimiter for the samples (teeth) in the key', default=' ', ) parser.add_argument( '--encoding', help='the encoding of the population file', default='utf-8', ) parser.add_argument( '--nsamples', '-n', help='the number of random samples to take', type=int, default=6, dest='nteeth', ) parser.add_argument( '--population', '-p', help='{0}, or a path to a file of line-delimited items'.format( ', '.join(POPULATIONS.keys()), ), default='/usr/share/dict/words', ) parser.add_argument( '--stats', help='show statistics for the key', default=False, action='store_true', ) parser.add_argument( '--version', action='version', version='%(prog)s {0}'.format(__version__), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def default_parser() -> argparse.ArgumentParser: """Create a parser for CLI arguments and options."""
parser = argparse.ArgumentParser( prog=CONSOLE_SCRIPT, formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) build_parser(parser) return parser
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def key( seq: Sequence, tooth: Callable[[Sequence], str] = ( lambda seq: str(random.SystemRandom().choice(seq)).strip() ), nteeth: int = 6, delimiter: str = ' ', ) -> str: """Concatenate strings generated by the tooth function."""
return delimiter.join(tooth(seq) for _ in range(nteeth))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(argv: Sequence[str] = SYS_ARGV) -> int: """Execute CLI commands."""
args = default_parser().parse_args(argv) try: seq = POPULATIONS[args.population] # type: Sequence except KeyError: try: with open(args.population, 'r', encoding=args.encoding) as file_: seq = list(file_) except (OSError, UnicodeError) as ex: print(ex, file=sys.stderr) return 1 main_key = key(seq=seq, nteeth=args.nteeth, delimiter=args.delimiter) print(main_key) if args.stats: print('*', len(main_key), 'characters') print('*', args.nteeth, 'samples from a population of', len(seq)) print( '* entropy {sign} {nbits} bits'.format( sign='~' if args.delimiter else '<', nbits=round(math.log(len(seq), 2) * args.nteeth, 2), ), ) return 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def apt_add_repository_from_apt_string(apt_string, apt_file): """ adds a new repository file for apt """
apt_file_path = '/etc/apt/sources.list.d/%s' % apt_file if not file_contains(apt_file_path, apt_string.lower(), use_sudo=True): file_append(apt_file_path, apt_string.lower(), use_sudo=True) with hide('running', 'stdout'): sudo("DEBIAN_FRONTEND=noninteractive apt-get update")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def arch(): """ returns the current cpu archictecture """
with settings(hide('warnings', 'running', 'stdout', 'stderr'), warn_only=True, capture=True): result = sudo('rpm -E %dist').strip() return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def disable_openssh_rdns(distribution): """ Set 'UseDNS no' in openssh config to disable rDNS lookups On each request for a new channel openssh defaults to an rDNS lookup on the client IP. This can be slow, if it fails for instance, adding 10s of overhead to every request for a new channel (not connection). This can add a lot of time to a process that opens lots of channels (e.g. running several commands via fabric.) This function will disable rDNS lookups in the openssh config and reload ssh to adjust the running instance. :param bytes distribution: the name of the distribution running on the node. """
log_green('Disabling openssh reverse dns lookups') openssh_config_file = '/etc/ssh/sshd_config' dns_config = 'UseDNS no' if not file_contains(openssh_config_file, dns_config, use_sudo=True): file_append(openssh_config_file, dns_config, use_sudo=True) service_name = 'sshd' if 'ubuntu' in distribution: service_name = 'ssh' sudo('service {} reload'.format(service_name))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect_to_ec2(region, access_key_id, secret_access_key): """ returns a connection object to AWS EC2 """
conn = boto.ec2.connect_to_region(region, aws_access_key_id=access_key_id, aws_secret_access_key=secret_access_key) return conn
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect_to_rackspace(region, access_key_id, secret_access_key): """ returns a connection object to Rackspace """
pyrax.set_setting('identity_type', 'rackspace') pyrax.set_default_region(region) pyrax.set_credentials(access_key_id, secret_access_key) nova = pyrax.connect_to_cloudservers(region=region) return nova
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_gce_image(zone, project, instance_name, name, description): """ Shuts down the instance and creates and image from the disk. Assumes that the disk name is the same as the instance_name (this is the default behavior for boot disks on GCE). """
disk_name = instance_name try: down_gce(instance_name=instance_name, project=project, zone=zone) except HttpError as e: if e.resp.status == 404: log_yellow("the instance {} is already down".format(instance_name)) else: raise e body = { "rawDisk": {}, "name": name, "sourceDisk": "projects/{}/zones/{}/disks/{}".format( project, zone, disk_name ), "description": description } compute = _get_gce_compute() gce_wait_until_done( compute.images().insert(project=project, body=body).execute() ) return name
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_image(cloud, **kwargs): """ proxy call for ec2, rackspace create ami backend functions """
if cloud == 'ec2': return create_ami(**kwargs) if cloud == 'rackspace': return create_rackspace_image(**kwargs) if cloud == 'gce': return create_gce_image(**kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_server(cloud, **kwargs): """ Create a new instance """
if cloud == 'ec2': _create_server_ec2(**kwargs) elif cloud == 'rackspace': _create_server_rackspace(**kwargs) elif cloud == 'gce': _create_server_gce(**kwargs) else: raise ValueError("Unknown cloud type: {}".format(cloud))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def gce_wait_until_done(operation): """ Perform a GCE operation, blocking until the operation completes. This function will then poll the operation until it reaches state 'DONE' or times out, and then returns the final operation resource dict. :param operation: A dict representing a pending GCE operation resource. :returns dict: A dict representing the concluded GCE operation resource. """
operation_name = operation['name'] if 'zone' in operation: zone_url_parts = operation['zone'].split('/') project = zone_url_parts[-3] zone = zone_url_parts[-1] def get_zone_operation(): return _get_gce_compute().zoneOperations().get( project=project, zone=zone, operation=operation_name ) update = get_zone_operation else: project = operation['selfLink'].split('/')[-4] def get_global_operation(): return _get_gce_compute().globalOperations().get( project=project, operation=operation_name ) update = get_global_operation done = False latest_operation = None start = time() timeout = 5*60 # seconds while not done: latest_operation = update().execute() log_yellow("waiting for operation") if (latest_operation['status'] == 'DONE' or time() - start > timeout): done = True else: sleep(10) print "waiting for operation" return latest_operation
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def startup_gce_instance(instance_name, project, zone, username, machine_type, image, public_key, disk_name=None): """ For now, jclouds is broken for GCE and we will have static slaves in Jenkins. Use this to boot them. """
log_green("Started...") log_yellow("...Creating GCE Jenkins Slave Instance...") instance_config = get_gce_instance_config( instance_name, project, zone, machine_type, image, username, public_key, disk_name ) operation = _get_gce_compute().instances().insert( project=project, zone=zone, body=instance_config ).execute() result = gce_wait_until_done(operation) if not result: raise RuntimeError("Creation of VM timed out or returned no result") log_green("Instance has booted")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_server_ec2(region, access_key_id, secret_access_key, disk_name, disk_size, ami, key_pair, instance_type, username, tags={}, security_groups=None): """ Creates EC2 Instance and saves it state in a local json file """
conn = connect_to_ec2(region, access_key_id, secret_access_key) log_green("Started...") log_yellow("...Creating EC2 instance...") # we need a larger boot device to store our cached images ebs_volume = EBSBlockDeviceType() ebs_volume.size = disk_size bdm = BlockDeviceMapping() bdm[disk_name] = ebs_volume # get an ec2 ami image object with our choosen ami image = conn.get_all_images(ami)[0] # start a new instance reservation = image.run(1, 1, key_name=key_pair, security_groups=security_groups, block_device_map=bdm, instance_type=instance_type) # and get our instance_id instance = reservation.instances[0] # and loop and wait until ssh is available while instance.state == u'pending': log_yellow("Instance state: %s" % instance.state) sleep(10) instance.update() log_green("Instance state: %s" % instance.state) wait_for_ssh(instance.public_dns_name) # update the EBS volumes to be deleted on instance termination for dev, bd in instance.block_device_mapping.items(): instance.modify_attribute('BlockDeviceMapping', ["%s=%d" % (dev, 1)]) # add a tag to our instance conn.create_tags([instance.id], tags) log_green("Public dns: %s" % instance.public_dns_name) # finally save the details or our new instance into the local state file save_ec2_state_locally(instance_id=instance.id, region=region, username=username, access_key_id=access_key_id, secret_access_key=secret_access_key)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def enable_marathon_basic_authentication(principal, password): """ configures marathon to start with authentication """
upstart_file = '/etc/init/marathon.conf' with hide('running', 'stdout'): sudo('echo -n "{}" > /etc/marathon-mesos.credentials'.format(password)) boot_args = ' '.join(['exec', '/usr/bin/marathon', '--http_credentials', '"{}:{}"'.format(principal, password), '--mesos_authentication_principal', principal, '--mesos_authentication_secret_file', '/etc/marathon-mesos.credentials']) # check if the init conf file contains the exact user and password if not file_contains(upstart_file, boot_args, use_sudo=True): sed(upstart_file, 'exec /usr/bin/marathon.*', boot_args, use_sudo=True) file_attribs(upstart_file, mode=700, sudo=True) restart_service('marathon')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def enable_mesos_basic_authentication(principal, password): """ enables and adds a new authorized principal """
restart = False secrets_file = '/etc/mesos/secrets' secrets_entry = '%s %s' % (principal, password) if not file_contains(filename=secrets_file, text=secrets_entry, use_sudo=True): file_append(filename=secrets_file, text=secrets_entry, use_sudo=True) file_attribs(secrets_file, mode=700, sudo=True) restart = True # set new startup parameters for mesos-master with quiet(): if secrets_file not in sudo('cat /etc/mesos-master/credentials'): sudo('echo %s > /etc/mesos-master/credentials' % secrets_file) restart = True if not exists('/etc/mesos-master/\?authenticate', use_sudo=True): sudo('touch /etc/mesos-master/\?authenticate') file_attribs('/etc/mesos-master/\?authenticate', mode=700, sudo=True) restart = True if restart: restart_service('mesos-master')