text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def by_lookup(self, style_key, style_value): """Return a processor that extracts the style from `mapping`. Parameters style_key : str A style key. style_value : dict A dictionary with a "lookup" key whose value is a "mapping" style value that maps a field value to either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. Returns ------- A function. """
style_attr = style_key if self.style_types[style_key] is bool else None mapping = style_value["lookup"] def proc(value, result): try: lookup_value = mapping[value] except (KeyError, TypeError): # ^ TypeError is included in case the user passes non-hashable # values. return result if not lookup_value: return result return self.render(style_attr or lookup_value, result) return proc
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def by_re_lookup(self, style_key, style_value, re_flags=0): """Return a processor for a "re_lookup" style value. Parameters style_key : str A style key. style_value : dict A dictionary with a "re_lookup" style value that consists of a sequence of items where each item should have the form `(regexp, x)`, where regexp is a regular expression to match against the field value and x is either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. re_flags : int Passed through as flags argument to re.compile. Returns ------- A function. """
style_attr = style_key if self.style_types[style_key] is bool else None regexps = [(re.compile(r, flags=re_flags), v) for r, v in style_value["re_lookup"]] def proc(value, result): if not isinstance(value, six.string_types): return result for r, lookup_value in regexps: if r.search(value): if not lookup_value: return result return self.render(style_attr or lookup_value, result) return result return proc
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def by_interval_lookup(self, style_key, style_value): """Return a processor for an "interval" style value. Parameters style_key : str A style key. style_value : dict A dictionary with an "interval" key whose value consists of a sequence of tuples where each tuple should have the form `(start, end, x)`, where start is the start of the interval (inclusive), end is the end of the interval, and x is either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. Returns ------- A function. """
style_attr = style_key if self.style_types[style_key] is bool else None intervals = style_value["interval"] def proc(value, result): try: value = float(value) except TypeError: return result for start, end, lookup_value in intervals: if start is None: start = float("-inf") if end is None: end = float("inf") if start <= value < end: if not lookup_value: return result return self.render(style_attr or lookup_value, result) return result return proc
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post_from_style(self, column_style): """Yield post-format processors based on `column_style`. Parameters column_style : dict A style where the top-level keys correspond to style attributes such as "bold" or "color". Returns ------- A generator object. """
flanks = Flanks() yield flanks.split_flanks fns = {"simple": self.by_key, "lookup": self.by_lookup, "re_lookup": self.by_re_lookup, "interval": self.by_interval_lookup} for key in self.style_types: if key not in column_style: continue vtype = value_type(column_style[key]) fn = fns[vtype] args = [key, column_style[key]] if vtype == "re_lookup": args.append(sum(getattr(re, f) for f in column_style.get("re_flags", []))) yield fn(*args) yield flanks.join_flanks
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def split_flanks(self, _, result): """Return `result` without flanking whitespace. """
if not result.strip(): self.left, self.right = "", "" return result match = self.flank_re.match(result) assert match, "This regexp should always match" self.left, self.right = match.group(1), match.group(3) return match.group(2)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render(self, style_attr, value): """Prepend terminal code for `key` to `value`. Parameters style_attr : str A style attribute (e.g., "bold" or "blue"). value : str The value to render. Returns ------- The code for `key` (e.g., "\x1b[1m" for bold) plus the original value. """
if not value.strip(): # We've got an empty string. Don't bother adding any # codes. return value return six.text_type(getattr(self.term, style_attr)) + value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post_from_style(self, column_style): """A Terminal-specific reset to StyleProcessors.post_from_style. """
for proc in super(TermProcessors, self).post_from_style(column_style): if proc.__name__ == "join_flanks": # Reset any codes before adding back whitespace. yield self._maybe_reset() yield proc
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_subscribers(self): """Get per-instance subscribers from the signal. """
data = self.signal.instance_subscribers if self.instance not in data: data[self.instance] = MethodAwareWeakList() return data[self.instance]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect(self, cback, subscribers=None, instance=None): """Add a function or a method as an handler of this signal. Any handler added can be a coroutine. :param cback: the callback (or *handler*) to be added to the set :returns: ``None`` or the value returned by the corresponding wrapper """
if subscribers is None: subscribers = self.subscribers # wrapper if self._fconnect is not None: def _connect(cback): self._connect(subscribers, cback) notify = partial(self._notify_one, instance) if instance is not None: result = self._fconnect(instance, cback, subscribers, _connect, notify) else: result = self._fconnect(cback, subscribers, _connect, notify) if inspect.isawaitable(result): result = pull_result(result) else: self._connect(subscribers, cback) result = None return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def disconnect(self, cback, subscribers=None, instance=None): """Remove a previously added function or method from the set of the signal's handlers. :param cback: the callback (or *handler*) to be added to the set :returns: ``None`` or the value returned by the corresponding wrapper """
if subscribers is None: subscribers = self.subscribers # wrapper if self._fdisconnect is not None: def _disconnect(cback): self._disconnect(subscribers, cback) notify = partial(self._notify_one, instance) if instance is not None: result = self._fdisconnect(instance, cback, subscribers, _disconnect, notify) else: result = self._fdisconnect(cback, subscribers, _disconnect, notify) if inspect.isawaitable(result): result = pull_result(result) else: self._disconnect(subscribers, cback) result = None return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ext_publish(self, instance, loop, *args, **kwargs): """If 'external_signaller' is defined, calls it's publish method to notify external event systems. This is for internal usage only, but it's doumented because it's part of the interface with external notification systems. """
if self.external_signaller is not None: # Assumes that the loop is managed by the external handler return self.external_signaller.publish_signal(self, instance, loop, args, kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def configure_logging( filename=None, filemode="a", datefmt=FMT_DATE, fmt=FMT, stdout_fmt=FMT_STDOUT, level=logging.DEBUG, stdout_level=logging.WARNING, initial_file_message="", max_size=1048576, rotations_number=5, remove_handlers=True, ): """Configure logging module. Args: filename (str): Specifies a filename to log to. filemode (str): Specifies the mode to open the log file. Values: ``'a'``, ``'w'``. *Default:* ``a``. datefmt (str): Use the specified date/time format. fmt (str): Format string for the file handler. stdout_fmt (str): Format string for the stdout handler. level (int): Log level for the file handler. Log levels are the same as the log levels from the standard :mod:`logging` module. *Default:* ``logging.DEBUG`` stdout_level (int): Log level for the stdout handler. Log levels are the same as the log levels from the standard :mod:`logging` module. *Default:* ``logging.WARNING`` initial_file_message (str): First log entry written in file. max_size (int): Maximal size of the logfile. If the size of the file exceed the maximal size it will be rotated. rotations_number (int): Number of rotations to save. remove_handlers (bool): Remove all existing handlers. """
logger = logging.getLogger() logger.level = logging.NOTSET # Remove all handlers if remove_handlers: while len(logger.handlers) > 0: hdlr = logger.handlers[0] hdlr.close() logger.removeHandler(hdlr) # Create stdout handler if stdout_level is not None: stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setLevel(stdout_level) stdout_formatter = logging.Formatter(stdout_fmt, datefmt) # stdoutFormatter.converter = time.gmtime stdout_handler.setFormatter(stdout_formatter) logger.addHandler(stdout_handler) # Create file handler if filename is provided if filename is not None: # Check if filename directory exists and creates it if it doesn't directory = os.path.abspath(os.path.dirname(filename)) if not os.path.isdir(directory): shell.mkdir(directory) # Create file handler file_handler = RotatingFileHandler( filename, filemode, max_size, rotations_number ) file_handler.setLevel(level) file_formatter = logging.Formatter(fmt, datefmt) file_formatter.converter = time.gmtime file_handler.setFormatter(file_formatter) logger.addHandler(file_handler) if initial_file_message: message = " %s " % initial_file_message file_handler.stream.write("\n" + message.center(100, "=") + "\n\n")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_plan(existing_users=None, proposed_users=None, purge_undefined=None, protected_users=None, allow_non_unique_id=None, manage_home=True, manage_keys=True): """Determine what changes are required. args: existing_users (Users): List of discovered users proposed_users (Users): List of proposed users purge_undefined (bool): Remove discovered users that have not been defined in proposed users list protected_users (list): List of users' names that should not be evaluated as part of the plan creation process allow_non_unique_id (bool): Allow more than one user to have the same uid manage_home (bool): Create/remove users' home directories manage_keys (bool): Add/update/remove users' keys (manage_home must also be true) returns: list: Differences between discovered and proposed users with a list of operations that will achieve the desired state. """
plan = list() proposed_usernames = list() if not purge_undefined: purge_undefined = constants.PURGE_UNDEFINED if not protected_users: protected_users = constants.PROTECTED_USERS if not allow_non_unique_id: allow_non_unique_id = constants.ALLOW_NON_UNIQUE_ID # Create list of modifications to make based on proposed users compared to existing users for proposed_user in proposed_users: proposed_usernames.append(proposed_user.name) user_matching_name = existing_users.describe_users(users_filter=dict(name=proposed_user.name)) user_matching_id = get_user_by_uid(uid=proposed_user.uid, users=existing_users) # If user does not exist if not allow_non_unique_id and user_matching_id and not user_matching_name: plan.append( dict(action='fail', error='uid_clash', proposed_user=proposed_user, state='existing', result=None)) elif not user_matching_name: plan.append( dict(action='add', proposed_user=proposed_user, state='missing', result=None, manage_home=manage_home, manage_keys=manage_keys)) # If they do, then compare else: user_comparison = compare_user(passed_user=proposed_user, user_list=existing_users) if user_comparison.get('result'): plan.append( dict(action='update', proposed_user=proposed_user, state='existing', user_comparison=user_comparison, manage_home=manage_home, manage_keys=manage_keys)) # Application of the proposed user list will not result in deletion of users that need to be removed # If 'PURGE_UNDEFINED' then look for existing users that are not defined in proposed usernames and mark for removal if purge_undefined: for existing_user in existing_users: if existing_user.name not in proposed_usernames: if existing_user.name not in protected_users: plan.append( dict(action='delete', username=existing_user.name, state='existing', manage_home=manage_home, manage_keys=manage_keys)) return plan
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def execute_plan(plan=None): """Create, Modify or Delete, depending on plan item."""
execution_result = list() for task in plan: action = task['action'] if action == 'delete': command = generate_delete_user_command(username=task.get('username'), manage_home=task['manage_home']) command_output = execute_command(command) execution_result.append(dict(task=task, command_output=command_output)) remove_sudoers_entry(username=task.get('username')) elif action == 'add': command = generate_add_user_command(proposed_user=task.get('proposed_user'), manage_home=task['manage_home']) command_output = execute_command(command) if task['proposed_user'].public_keys and task['manage_home'] and task['manage_keys']: write_authorized_keys(task['proposed_user']) if task['proposed_user'].sudoers_entry: write_sudoers_entry(username=task['proposed_user'].name, sudoers_entry=task['proposed_user'].sudoers_entry) execution_result.append(dict(task=task, command_output=command_output)) elif action == 'update': result = task['user_comparison'].get('result') # Don't modify user if only keys have changed action_count = 0 for k, _ in iteritems(result): if '_action' in k: action_count += 1 command_output = None if task['manage_home'] and task['manage_keys'] and action_count == 1 and 'public_keys_action' in result: write_authorized_keys(task['proposed_user']) elif action_count == 1 and 'sudoers_entry_action' in result: write_sudoers_entry(username=task['proposed_user'].name, sudoers_entry=task['user_comparison']['result']['replacement_sudoers_entry']) else: command = generate_modify_user_command(task=task) command_output = execute_command(command) if task['manage_home'] and task['manage_keys'] and result.get('public_keys_action'): write_authorized_keys(task['proposed_user']) if result.get('sudoers_entry_action'): write_sudoers_entry(username=task['proposed_user'].name, sudoers_entry=task['user_comparison']['result']['replacement_sudoers_entry']) execution_result.append(dict(task=task, command_output=command_output))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, variable_path: str, default: t.Optional[t.Any] = None, coerce_type: t.Optional[t.Type] = None, coercer: t.Optional[t.Callable] = None, **kwargs): """ Reads a value of ``variable_path`` from environment. If ``coerce_type`` is ``bool`` and no ``coercer`` specified, ``coerces`` forced to be :func:`~django_docker_helpers.utils.coerce_str_to_bool` :param variable_path: a delimiter-separated path to a nested value :param default: default value if there's no object by specified path :param coerce_type: cast a type of a value to a specified one :param coercer: perform a type casting with specified callback :param kwargs: additional arguments inherited parser may need :return: value or default """
var_name = self.get_env_var_name(variable_path) val = self.env.get(var_name, self.sentinel) if val is self.sentinel: return default # coerce to bool with default env coercer if no coercer specified if coerce_type and coerce_type is bool and not coercer: coercer = coerce_str_to_bool return self.coerce(val, coerce_type=coerce_type, coercer=coercer)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unzip(archive, destination, filenames=None): """Unzip a zip archive into destination directory. It unzips either the whole archive or specific file(s) from the archive. Usage: Args: archive (zipfile.ZipFile or str): Zipfile object to extract from or path to the zip archive. destination (str): Path to the output directory. filenames (str or list of str or None): Path(s) to the filename(s) inside the zip archive that you want to extract. """
close = False try: if not isinstance(archive, zipfile.ZipFile): archive = zipfile.ZipFile(archive, "r", allowZip64=True) close = True logger.info("Extracting: %s -> %s" % (archive.filename, destination)) if isinstance(filenames, str): filenames = [filenames] if filenames is None: # extract all filenames = archive.namelist() for filename in filenames: if filename.endswith("/"): # it's a directory shell.mkdir(os.path.join(destination, filename)) else: if not _extract_file(archive, destination, filename): raise Exception() logger.info('Extracting zip archive "%s" succeeded' % archive.filename) return True except Exception: logger.exception("Error while unzipping archive %s" % archive.filename) return False finally: if close: archive.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def mkzip(archive, items, mode="w", save_full_paths=False): """Recursively zip a directory. Args: archive (zipfile.ZipFile or str): ZipFile object add to or path to the output zip archive. items (str or list of str): Single item or list of items (files and directories) to be added to zipfile. mode (str): w for create new and write a for append to. save_full_paths (bool): Preserve full paths. """
close = False try: if not isinstance(archive, zipfile.ZipFile): archive = zipfile.ZipFile(archive, mode, allowZip64=True) close = True logger.info("mkdzip: Creating %s, from: %s", archive.filename, items) if isinstance(items, str): items = [items] for item in items: item = os.path.abspath(item) basename = os.path.basename(item) if os.path.isdir(item): for root, directoires, filenames in os.walk(item): for filename in filenames: path = os.path.join(root, filename) if save_full_paths: archive_path = path.encode("utf-8") else: archive_path = os.path.join( basename, path.replace(item, "").strip("\\/") ).encode("utf-8") archive.write(path, archive_path) elif os.path.isfile(item): if save_full_paths: archive_name = item.encode("utf-8") else: archive_name = basename.encode("utf-8") archive.write(item, archive_name) # , zipfile.ZIP_DEFLATED) return True except Exception as e: logger.error("Error occurred during mkzip: %s" % e) return False finally: if close: archive.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def seven_zip(archive, items, self_extracting=False): """Create a 7z archive."""
if not isinstance(items, (list, tuple)): items = [items] if self_extracting: return er(_get_sz(), "a", "-ssw", "-sfx", archive, *items) else: return er(_get_sz(), "a", "-ssw", archive, *items)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def migrate(*argv) -> bool: """ Runs Django migrate command. :return: always ``True`` """
wf('Applying migrations... ', False) execute_from_command_line(['./manage.py', 'migrate'] + list(argv)) wf('[+]\n') return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def output(self, output, accepts, set_http_code, set_content_type): """ Formats a response from a WSGI app to handle any RDF graphs If a view function returns a single RDF graph, serialize it based on Accept header If it's not an RDF graph, return it without any special handling """
graph = Decorator._get_graph(output) if graph is not None: # decide the format output_mimetype, output_format = self.format_selector.decide(accepts, graph.context_aware) # requested content couldn't find anything if output_mimetype is None: set_http_code("406 Not Acceptable") return ['406 Not Acceptable'.encode('utf-8')] # explicitly mark text mimetypes as utf-8 if 'text' in output_mimetype: output_mimetype = output_mimetype + '; charset=utf-8' # format the new response serialized = graph.serialize(format=output_format) set_content_type(output_mimetype) return [serialized] else: return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decorate(self, app): """ Wraps a WSGI application to return formatted RDF graphs Uses content negotiation to serialize the graph to the client-preferred format Passes other content through unmodified """
from functools import wraps @wraps(app) def decorated(environ, start_response): # capture any start_response from the app app_response = {} app_response['status'] = "200 OK" app_response['headers'] = [] app_response['written'] = BytesIO() def custom_start_response(status, headers, *args, **kwargs): app_response['status'] = status app_response['headers'] = headers app_response['args'] = args app_response['kwargs'] = kwargs return app_response['written'].write returned = app(environ, custom_start_response) # callbacks from the serialization def set_http_code(status): app_response['status'] = str(status) def set_header(header, value): app_response['headers'] = [(h,v) for (h,v) in app_response['headers'] if h.lower() != header.lower()] app_response['headers'].append((header, value)) def set_content_type(content_type): set_header('Content-Type', content_type) # do the serialization accept = environ.get('HTTP_ACCEPT', '') new_return = self.output(returned, accept, set_http_code, set_content_type) # set the Vary header vary_headers = (v for (h,v) in app_response['headers'] if h.lower() == 'vary') vary_elements = list(itertools.chain(*[v.split(',') for v in vary_headers])) vary_elements = list(set([v.strip() for v in vary_elements])) if '*' not in vary_elements and 'accept' not in (v.lower() for v in vary_elements): vary_elements.append('Accept') set_header('Vary', ', '.join(vary_elements)) # pass on the result to the parent WSGI server parent_writer = start_response(app_response['status'], app_response['headers'], *app_response.get('args', []), **app_response.get('kwargs', {})) written = app_response['written'].getvalue() if len(written) > 0: parent_writer(written) return new_return return decorated
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_handler(cls, name, value): """Detect an handler and return its wanted signal name."""
signal_name = False config = None if callable(value) and hasattr(value, SPEC_CONTAINER_MEMBER_NAME): spec = getattr(value, SPEC_CONTAINER_MEMBER_NAME) if spec['kind'] == 'handler': signal_name = spec['name'] config = spec['config'] return signal_name, config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _build_inheritance_chain(cls, bases, *names, merge=False): """For all of the names build a ChainMap containing a map for every base class."""
result = [] for name in names: maps = [] for base in bases: bmap = getattr(base, name, None) if bmap is not None: assert isinstance(bmap, (dict, ChainMap)) if len(bmap): if isinstance(bmap, ChainMap): maps.extend(bmap.maps) else: maps.append(bmap) result.append(ChainMap({}, *maps)) if merge: result = [dict(map) for map in result] if len(names) == 1: return result[0] return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _build_instance_handler_mapping(cls, instance, handle_d): """For every unbound handler, get the bound version."""
res = {} for member_name, sig_name in handle_d.items(): if sig_name in res: sig_handlers = res[sig_name] else: sig_handlers = res[sig_name] = [] sig_handlers.append(getattr(instance, member_name)) return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_local_handlers(cls, signals, handlers, namespace, configs): """For every marked handler, see if there is a suitable signal. If not, raise an error."""
for aname, sig_name in handlers.items(): # WARN: this code doesn't take in account the case where a new # method with the same name of an handler in a base class is # present in this class but it isn't an handler (so the handler # with the same name should be removed from the handlers) if sig_name not in signals: disable_check = configs[aname].get('disable_check', False) if not disable_check: raise SignalError("Cannot find a signal named '%s'" % sig_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_class_handlers(cls, signal_name, instance): """Returns the handlers registered at class level. """
handlers = cls._signal_handlers_sorted[signal_name] return [getattr(instance, hname) for hname in handlers]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _sort_handlers(cls, signals, handlers, configs): """Sort class defined handlers to give precedence to those declared at lower level. ``config`` can contain two keys ``begin`` or ``end`` that will further reposition the handler at the two extremes. """
def macro_precedence_sorter(flags, hname): """The default is to sort 'bottom_up', with lower level getting executed first, but sometimes you need them reversed.""" data = configs[hname] topdown_sort = SignalOptions.SORT_TOPDOWN in flags if topdown_sort: level = levels_count - 1 - data['level'] else: level = data['level'] if 'begin' in data: return (-1, level, hname) elif 'end' in data: return (1, level, hname) else: return (0, level, hname) levels_count = len(handlers.maps) per_signal = defaultdict(list) for level, m in enumerate(reversed(handlers.maps)): for hname, sig_name in m.items(): sig_handlers = per_signal[sig_name] if hname not in sig_handlers: configs[hname]['level'] = level sig_handlers.append(hname) for sig_name, sig_handlers in per_signal.items(): if sig_name in signals: # it may be on a mixin flags = signals[sig_name].flags sig_handlers.sort(key=partial(macro_precedence_sorter, flags)) return per_signal
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def instance_signals_and_handlers(cls, instance): """Calculate per-instance signals and handlers."""
isignals = cls._signals.copy() ihandlers = cls._build_instance_handler_mapping( instance, cls._signal_handlers ) return isignals, ihandlers
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def __insert(self, key, value): ''' Insert a new key to database ''' if key in self: getLogger().warning("Cache entry exists, cannot insert a new entry with key='{key}'".format(key=key)) return False with self.get_conn() as conn: try: c = conn.cursor() c.execute("INSERT INTO cache_entries (key, value) VALUES (?,?)", (key, value)) conn.commit() return True except Exception as e: # NOTE: A cache error can be forgiven, no? getLogger().debug("Cache Error: Cannot insert | Detail = %s" % (e,)) return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def __delete(self, key): ''' Delete file key from database ''' with self.get_conn() as conn: try: c = conn.cursor() c.execute("DELETE FROM cache_entries WHERE key = ?", (key,)) conn.commit() except: getLogger().exception("Cannot delete") return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def summarize(self, rows): """Return summary rows for `rows`. Parameters rows : list of dicts Normalized rows to summarize. Returns ------- A list of summary rows. Each row is a tuple where the first item is the data and the second is a dict of keyword arguments that can be passed to StyleFields.render. """
columns = list(rows[0].keys()) agg_styles = {c: self.style[c]["aggregate"] for c in columns if "aggregate" in self.style[c]} summaries = {} for col, agg_fn in agg_styles.items(): lgr.debug("Summarizing column %r with %r", col, agg_fn) colvals = filter(lambda x: not isinstance(x, Nothing), (row[col] for row in rows)) summaries[col] = agg_fn(list(colvals)) # The rest is just restructuring the summaries into rows that are # compatible with pyout.Content. Most the complexity below comes from # the fact that a summary function is allowed to return either a single # item or a list of items. maxlen = max(len(v) if isinstance(v, list) else 1 for v in summaries.values()) summary_rows = [] for rowidx in range(maxlen): sumrow = {} for column, values in summaries.items(): if isinstance(values, list): if rowidx >= len(values): continue sumrow[column] = values[rowidx] elif rowidx == 0: sumrow[column] = values for column in columns: if column not in sumrow: sumrow[column] = "" summary_rows.append((sumrow, {"style": self.style.get("aggregate_"), "adopt": False})) return summary_rows
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _init(self, style, streamer, processors=None): """Do writer-specific setup. Parameters style : dict Style, as passed to __init__. streamer : interface.Stream A stream interface that takes __init__'s `stream` and `interactive` arguments into account. processors : field.StyleProcessors, optional A writer-specific processors instance. Defaults to field.PlainProcessors(). """
self._stream = streamer if streamer.interactive: if streamer.supports_updates: self.mode = "update" else: self.mode = "incremental" else: self.mode = "final" if style and "width_" not in style and self._stream.width: style["width_"] = self._stream.width self._content = ContentWithSummary( StyleFields(style, processors or PlainProcessors()))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ids(self): """A list of unique IDs used to identify a row. If not explicitly set, it defaults to the first column name. """
if self._ids is None: if self._columns: if isinstance(self._columns, OrderedDict): return [list(self._columns.keys())[0]] return [self._columns[0]] else: return self._ids
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def wait(self): """Wait for asynchronous calls to return. """
if self._pool is None: return self._pool.close() self._pool.join()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _write_lock(self): """Acquire and release the lock around output calls. This should allow multiple threads or processes to write output reliably. Code that modifies the `_content` attribute should also do so within this context. """
if self._lock: lgr.debug("Acquiring write lock") self._lock.acquire() try: yield finally: if self._lock: lgr.debug("Releasing write lock") self._lock.release()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _start_callables(self, row, callables): """Start running `callables` asynchronously. """
id_vals = {c: row[c] for c in self.ids} def callback(tab, cols, result): if isinstance(result, Mapping): pass elif isinstance(result, tuple): result = dict(zip(cols, result)) elif len(cols) == 1: # Don't bother raising an exception if cols != 1 # because it would be lost in the thread. result = {cols[0]: result} result.update(id_vals) tab._write(result) if self._pool is None: self._pool = Pool() if self._lock is None: self._lock = multiprocessing.Lock() for cols, fn in callables: cb_func = partial(callback, self, cols) gen = None if inspect.isgeneratorfunction(fn): gen = fn() elif inspect.isgenerator(fn): gen = fn if gen: def callback_for_each(): for i in gen: cb_func(i) self._pool.apply_async(callback_for_each) else: self._pool.apply_async(fn, callback=cb_func)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tables(self): """ Get a listing of all tables - if schema specified on connect, return unqualifed table names in that schema - in no schema specified on connect, return all tables, with schema prefixes """
if self.schema: return self.tables_in_schema(self.schema) else: tables = [] for schema in self.schemas: tables = tables + [ schema + "." + t for t in self.tables_in_schema(schema) ] return tables
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_query(self, sql, lookup): """ Modify table and field name variables in a sql string with a dict. This seems to be discouraged by psycopg2 docs but it makes small adjustments to large sql strings much easier, making prepped queries much more versatile. USAGE sql = 'SELECT $myInputField FROM $myInputTable' lookup = {'myInputField':'customer_id', 'myInputTable':'customers'} sql = db.build_query(sql, lookup) """
for key, val in six.iteritems(lookup): sql = sql.replace("$" + key, val) return sql
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_table_name(self, table): """Parse schema qualified table name """
if "." in table: schema, table = table.split(".") else: schema = None return (schema, table)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_table(self, table): """Loads a table. Returns None if the table does not already exist in db """
table = self._valid_table_name(table) schema, table = self.parse_table_name(table) if not schema: schema = self.schema tables = self.tables else: tables = self.tables_in_schema(schema) if table in tables: return Table(self, schema, table) else: return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def mogrify(self, sql, params): """Return the query string with parameters added """
conn = self.engine.raw_connection() cursor = conn.cursor() return cursor.mogrify(sql, params)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def execute(self, sql, params=None): """Just a pointer to engine.execute """
# wrap in a transaction to ensure things are committed # https://github.com/smnorris/pgdata/issues/3 with self.engine.begin() as conn: result = conn.execute(sql, params) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def query_one(self, sql, params=None): """Grab just one record """
r = self.engine.execute(sql, params) return r.fetchone()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_schema(self, schema): """Create specified schema if it does not already exist """
if schema not in self.schemas: sql = "CREATE SCHEMA " + schema self.execute(sql)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def drop_schema(self, schema, cascade=False): """Drop specified schema """
if schema in self.schemas: sql = "DROP SCHEMA " + schema if cascade: sql = sql + " CASCADE" self.execute(sql)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_table(self, table, columns): """Creates a table """
schema, table = self.parse_table_name(table) table = self._valid_table_name(table) if not schema: schema = self.schema if table in self.tables: return Table(self, schema, table) else: return Table(self, schema, table, columns)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ogr2pg( self, in_file, in_layer=None, out_layer=None, schema="public", s_srs=None, t_srs="EPSG:3005", sql=None, dim=2, cmd_only=False, index=True ): """ Load a layer to provided pgdata database connection using OGR2OGR -sql option is like an ESRI where_clause or the ogr2ogr -where option, but to increase flexibility, it is in SQLITE dialect: SELECT * FROM <in_layer> WHERE <sql> """
# if not provided a layer name, use the name of the input file if not in_layer: in_layer = os.path.splitext(os.path.basename(in_file))[0] if not out_layer: out_layer = in_layer.lower() command = [ "ogr2ogr", "-t_srs", t_srs, "-f", "PostgreSQL", "PG:host={h} user={u} dbname={db} password={pwd}".format( h=self.host, u=self.user, db=self.database, pwd=self.password ), "-lco", "OVERWRITE=YES", "-overwrite", "-lco", "SCHEMA={schema}".format(schema=schema), "-lco", "GEOMETRY_NAME=geom", "-dim", "{d}".format(d=dim), "-nlt", "PROMOTE_TO_MULTI", "-nln", out_layer, in_file ] if sql: command.insert( len(command), "-sql" ) command.insert( len(command), "SELECT * FROM {} WHERE {}".format(in_layer, sql) ) command.insert(len(command), "-dialect") command.insert(len(command), "SQLITE") # only add output layer name if sql not included (it gets ignored) if not sql: command.insert( len(command), in_layer ) if s_srs: command.insert(len(command), "-s_srs") command.insert(len(command), s_srs) if not index: command.insert(len(command), "-lco") command.insert(len(command), "SPATIAL_INDEX=NO") if cmd_only: return " ".join(command) else: subprocess.run(command)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def setup_logging(filename, log_dir=None, force_setup=False): ''' Try to load logging configuration from a file. Set level to INFO if failed. ''' if not force_setup and ChirpCLI.SETUP_COMPLETED: logging.debug("Master logging has been setup. This call will be ignored.") return if log_dir and not os.path.exists(log_dir): os.makedirs(log_dir) if os.path.isfile(filename): with open(filename) as config_file: try: config = json.load(config_file) logging.config.dictConfig(config) logging.info("logging was setup using {}".format(filename)) ChirpCLI.SETUP_COMPLETED = True except Exception as e: logging.exception("Could not load logging config") # default logging config logging.basicConfig(level=logging.INFO) else: logging.basicConfig(level=logging.INFO)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def config_logging(args): ''' Override root logger's level ''' if args.quiet: logging.getLogger().setLevel(logging.CRITICAL) elif args.verbose: logging.getLogger().setLevel(logging.DEBUG)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def add_task(self, task, func=None, **kwargs): ''' Add a task parser ''' if not self.__tasks: raise Exception("Tasks subparsers is disabled") if 'help' not in kwargs: if func.__doc__: kwargs['help'] = func.__doc__ task_parser = self.__tasks.add_parser(task, **kwargs) if self.__add_vq: self.add_vq(task_parser) if func is not None: task_parser.set_defaults(func=func) return task_parser
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def add_vq(self, parser): ''' Add verbose & quiet options ''' group = parser.add_mutually_exclusive_group() group.add_argument("-v", "--verbose", action="store_true") group.add_argument("-q", "--quiet", action="store_true")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def run(self, func=None): ''' Run the app ''' args = self.parser.parse_args() if self.__add_vq is not None and self.__config_logging: self.__config_logging(args) if self.__show_version_func and args.version and callable(self.__show_version_func): self.__show_version_func(self, args) elif args.func is not None: args.func(self, args) elif func is not None: func(self, args) else: self.parser.print_help()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def header(*msg, level='h1', separator=" ", print_out=print): ''' Print header block in text mode ''' out_string = separator.join(str(x) for x in msg) if level == 'h0': # box_len = 80 if len(msg) < 80 else len(msg) box_len = 80 print_out('+' + '-' * (box_len + 2)) print_out("| %s" % out_string) print_out('+' + '-' * (box_len + 2)) elif level == 'h1': print_out("") print_out(out_string) print_out('-' * 60) elif level == 'h2': print_out('\t%s' % out_string) print_out('\t' + ('-' * 40)) else: print_out('\t\t%s' % out_string) print_out('\t\t' + ('-' * 20))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def fetch(self, value_obj=None): ''' Fetch the next two values ''' val = None try: val = next(self.__iterable) except StopIteration: return None if value_obj is None: value_obj = Value(value=val) else: value_obj.value = val return value_obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def content(self): ''' Return report content as a string if mode == STRINGIO else an empty string ''' if isinstance(self.__report_file, io.StringIO): return self.__report_file.getvalue() else: return ''
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def format(self): ''' Format table to print out ''' self.max_lengths = [] for row in self.rows: if len(self.max_lengths) < len(row): self.max_lengths += [0] * (len(row) - len(self.max_lengths)) for idx, val in enumerate(row): len_cell = len(str(val)) if val else 0 if self.max_lengths[idx] < len_cell: self.max_lengths[idx] = len_cell return self.max_lengths
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def replace_name(file_path, new_name): ''' Change the file name in a path but keep the extension ''' if not file_path: raise Exception("File path cannot be empty") elif not new_name: raise Exception("New name cannot be empty") dirname = os.path.dirname(file_path) ext = os.path.splitext(os.path.basename(file_path))[1] return os.path.join(dirname, new_name + ext)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_child_folders(path): ''' Get all child folders of a folder ''' path = FileHelper.abspath(path) return [dirname for dirname in os.listdir(path) if os.path.isdir(os.path.join(path, dirname))]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_child_files(path): ''' Get all child files of a folder ''' path = FileHelper.abspath(path) return [filename for filename in os.listdir(path) if os.path.isfile(os.path.join(path, filename))]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def remove_file(filepath): ''' Delete a file ''' try: os.remove(os.path.abspath(os.path.expanduser(filepath))) except OSError as e: if e.errno != errno.ENOENT: raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _ptn2fn(self, pattern): ''' Pattern to filename ''' return [pattern.format(wd=self.working_dir, n=self.__name, mode=self.__mode), pattern.format(wd=self.working_dir, n='{}.{}'.format(self.__name, self.__mode), mode=self.__mode)]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def add_potential(self, *patterns): ''' Add a potential config file pattern ''' for ptn in patterns: self.__potential.extend(self._ptn2fn(ptn))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def locate_config(self): ''' Locate config file ''' for f in self.__potential: f = FileHelper.abspath(f) if os.path.isfile(f): return f return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def config(self): ''' Read config automatically if required ''' if self.__config is None: config_path = self.locate_config() if config_path: self.__config = self.read_file(config_path) self.__config_path = config_path return self.__config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def read_file(self, file_path): ''' Read a configuration file and return configuration data ''' getLogger().info("Loading app config from {} file: {}".format(self.__mode, file_path)) if self.__mode == AppConfig.JSON: return json.loads(FileHelper.read(file_path), object_pairs_hook=OrderedDict) elif self.__mode == AppConfig.INI: config = configparser.ConfigParser(allow_no_value=True) config.read(file_path) return config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def load(self, file_path): ''' Load configuration from a specific file ''' self.clear() self.__config = self.read_file(file_path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def execute_and_report(command, *args, **kwargs): """Execute a command with arguments and wait for output. If execution was successful function will return True, if not, it will log the output using standard logging and return False. """
logging.info("Execute: %s %s" % (command, " ".join(args))) try: status, out, err = execute(command, *args, **kwargs) if status == 0: logging.info( "%s Finished successfully. Exit Code: 0.", os.path.basename(command), ) return True else: try: logging.error( "%s failed! Exit Code: %s\nOut: %s\nError: %s", os.path.basename(command), status, out, err, ) except Exception as e: # This fails when some non ASCII characters are returned # from the application logging.error( "%s failed [%s]! Exit Code: %s\nOut: %s\nError: %s", e, os.path.basename(command), status, repr(out), repr(err), ) return False except Exception: logging.exception( "%s failed! Exception thrown!", os.path.basename(command) ) return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_authorized_keys(username=None): """Read public keys from specified user's authorized_keys file. args: username (str): username. returns: list: Authorised keys for the specified user. """
authorized_keys_path = '{0}/.ssh/authorized_keys'.format(os.path.expanduser('~{0}'.format(username))) rnd_chars = random_string(length=RANDOM_FILE_EXT_LENGTH) tmp_authorized_keys_path = '/tmp/authorized_keys_{0}_{1}'.format(username, rnd_chars) authorized_keys = list() copy_result = execute_command( shlex.split(str('{0} cp {1} {2}'.format(sudo_check(), authorized_keys_path, tmp_authorized_keys_path)))) result_message = copy_result[0][1].decode('UTF-8') if 'you must have a tty to run sudo' in result_message: # pragma: no cover raise OSError("/etc/sudoers is blocked sudo. Remove entry: 'Defaults requiretty'.") elif 'No such file or directory' not in result_message: execute_command(shlex.split(str('{0} chmod 755 {1}'.format(sudo_check(), tmp_authorized_keys_path)))) with open(tmp_authorized_keys_path) as keys_file: for key in keys_file: authorized_keys.append(PublicKey(raw=key)) execute_command(shlex.split(str('{0} rm {1}'.format(sudo_check(), tmp_authorized_keys_path)))) return authorized_keys
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write_authorized_keys(user=None): """Write public keys back to authorized_keys file. Create keys directory if it doesn't already exist. args: user (User): Instance of User containing keys. returns: list: Authorised keys for the specified user. """
authorized_keys = list() authorized_keys_dir = '{0}/.ssh'.format(os.path.expanduser('~{0}'.format(user.name))) rnd_chars = random_string(length=RANDOM_FILE_EXT_LENGTH) authorized_keys_path = '{0}/authorized_keys'.format(authorized_keys_dir) tmp_authorized_keys_path = '/tmp/authorized_keys_{0}_{1}'.format(user.name, rnd_chars) if not os.path.isdir(authorized_keys_dir): execute_command(shlex.split(str('{0} mkdir -p {1}'.format(sudo_check(), authorized_keys_dir)))) for key in user.public_keys: authorized_keys.append('{0}\n'.format(key.raw)) with open(tmp_authorized_keys_path, mode=text_type('w+')) as keys_file: keys_file.writelines(authorized_keys) execute_command( shlex.split(str('{0} cp {1} {2}'.format(sudo_check(), tmp_authorized_keys_path, authorized_keys_path)))) execute_command(shlex.split(str('{0} chown -R {1} {2}'.format(sudo_check(), user.name, authorized_keys_dir)))) execute_command(shlex.split(str('{0} chmod 700 {1}'.format(sudo_check(), authorized_keys_dir)))) execute_command(shlex.split(str('{0} chmod 600 {1}'.format(sudo_check(), authorized_keys_path)))) execute_command(shlex.split(str('{0} rm {1}'.format(sudo_check(), tmp_authorized_keys_path))))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def b64encoded(self): """Return a base64 encoding of the key. returns: str: base64 encoding of the public key """
if self._b64encoded: return text_type(self._b64encoded).strip("\r\n") else: return base64encode(self.raw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def raw(self): """Return raw key. returns: str: raw key """
if self._raw: return text_type(self._raw).strip("\r\n") else: return text_type(base64decode(self._b64encoded)).strip("\r\n")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, variable_path: str, default: t.Optional[t.Any] = None, coerce_type: t.Optional[t.Type] = None, coercer: t.Optional[t.Callable] = None, **kwargs): """ Reads a value of ``variable_path`` from consul kv storage. :param variable_path: a delimiter-separated path to a nested value :param default: default value if there's no object by specified path :param coerce_type: cast a type of a value to a specified one :param coercer: perform a type casting with specified callback :param kwargs: additional arguments inherited parser may need :return: value or default :raises config.exceptions.KVStorageKeyDoestNotExist: if specified ``endpoint`` does not exists :raises config.exceptions.KVStorageValueIsEmpty: if specified ``endpoint`` does not contain a config """
return self.inner_parser.get( variable_path, default=default, coerce_type=coerce_type, coercer=coercer, **kwargs, )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_default_mimetype(self): """ Returns the default mimetype """
mimetype = self.default_mimetype if mimetype is None: # class inherits from module default mimetype = DEFAULT_MIMETYPE if mimetype is None: # module is set to None? mimetype = 'application/rdf+xml' return mimetype
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_serialize_format(self, mimetype): """ Get the serialization format for the given mimetype """
format = self.formats.get(mimetype, None) if format is None: format = formats.get(mimetype, None) return format
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def wants_rdf(self, accepts): """ Returns whether this client's Accept header indicates that the client wants to receive RDF """
mimetype = mimeparse.best_match(all_mimetypes + self.all_mimetypes + [WILDCARD], accepts) return mimetype and mimetype != WILDCARD
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def send_http(session, method, url, *, retries=1, interval=1, backoff=2, http_status_codes_to_retry=HTTP_STATUS_CODES_TO_RETRY, fn=lambda x:x, **kwargs): """ Sends a HTTP request and implements a retry logic. Arguments: session (obj): A client aiohttp session object method (str): Method to use url (str): URL for the request retries (int): Number of times to retry in case of failure interval (float): Time to wait before retries backoff (int): Multiply interval by this factor after each failure http_status_codes_to_retry (List[int]): List of status codes to retry fn (Callable[[x],x]: Function to call on successful connection """
backoff_interval = interval raised_exc = None attempt = 0 if method not in ['get', 'patch', 'post']: raise ValueError if retries == -1: # -1 means retry indefinitely attempt = -1 elif retries == 0: # Zero means don't retry attempt = 1 else: # any other value means retry N times attempt = retries + 1 while attempt != 0: if raised_exc: logger.error('Caught "%s" url:%s method:%s, remaining tries %s, ' 'sleeping %.2fsecs', raised_exc, method.upper(), url, attempt, backoff_interval) await asyncio.sleep(backoff_interval) # bump interval for the next possible attempt backoff_interval *= backoff # logger.info('sending %s %s with %s', method.upper(), url, kwargs) try: async with await getattr(session, method)(url, **kwargs) as response: if response.status == 200: return await fn(response) elif response.status in http_status_codes_to_retry: logger.error( 'Received invalid response code:%s error:%s' ' response:%s url:%s', response.status, '', response.reason, url) raise aiohttp.ClientResponseError( code=response.status, message=response.reason, request_info=response.request_info, history=response.history) else: raise FailedRequest( code=response.status, message='Non-retryable response code', raised='aiohttp.ClientResponseError', url=url) except aiohttp.ClientError as exc: try: code = exc.code except AttributeError: code = '' raised_exc = FailedRequest(code=code, message=exc, raised='%s.%s' % (exc.__class__.__module__, exc.__class__.__qualname__), url=url) except asyncio.TimeoutError as exc: raised_exc = FailedRequest(code='', message='asyncio.TimeoutError', raised='%s.%s' % (exc.__class__.__module__, exc.__class__.__qualname__), url=url) else: raised_exc = None break attempt -= 1 if raised_exc: raise raised_exc
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_missing_commands(_platform): """Check I can identify the necessary commands for managing users."""
missing = list() if _platform in ('Linux', 'OpenBSD'): if not LINUX_CMD_USERADD: missing.append('useradd') if not LINUX_CMD_USERMOD: missing.append('usermod') if not LINUX_CMD_USERDEL: missing.append('userdel') if not LINUX_CMD_GROUP_ADD: missing.append('groupadd') if not LINUX_CMD_GROUP_DEL: missing.append('groupdel') elif _platform == 'FreeBSD': # pragma: FreeBSD # FREEBSD COMMANDS if not FREEBSD_CMD_PW: missing.append('pw') if missing: print('\nMISSING = {0}'.format(missing)) return missing
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def execute_command(command=None): """Execute a command and return the stdout and stderr."""
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stdin = process.communicate() process.wait() return (stdout, stdin), process.returncode
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def base64encode(_input=None): """Return base64 encoded representation of a string."""
if PY2: # pragma: no cover return base64.b64encode(_input) elif PY3: # pragma: no cover if isinstance(_input, bytes): return base64.b64encode(_input).decode('UTF-8') elif isinstance(_input, str): return base64.b64encode(bytearray(_input, encoding='UTF-8')).decode('UTF-8')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def base64decode(_input=None): """Take a base64 encoded string and return the decoded string."""
missing_padding = 4 - len(_input) % 4 if missing_padding: _input += '=' * missing_padding if PY2: # pragma: no cover return base64.decodestring(_input) elif PY3: # pragma: no cover if isinstance(_input, bytes): return base64.b64decode(_input).decode('UTF-8') elif isinstance(_input, str): return base64.b64decode(bytearray(_input, encoding='UTF-8')).decode('UTF-8')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_sudoers(): """ Read the sudoers entry for the specified user. args: username (str): username. returns:`r str: sudoers entry for the specified user. """
sudoers_path = '/etc/sudoers' rnd_chars = random_string(length=RANDOM_FILE_EXT_LENGTH) tmp_sudoers_path = '/tmp/sudoers_{0}'.format(rnd_chars) sudoers_entries = list() copy_result = execute_command( shlex.split(str('{0} cp {1} {2}'.format(sudo_check(), sudoers_path, tmp_sudoers_path)))) result_message = copy_result[0][1].decode('UTF-8') if 'No such file or directory' not in result_message: execute_command(shlex.split(str('{0} chmod 755 {1}'.format(sudo_check(), tmp_sudoers_path)))) with open(tmp_sudoers_path) as tmp_sudoers_file: for line in tmp_sudoers_file: stripped = line.strip().replace(os.linesep, '') if stripped and not stripped.startswith('#'): sudoers_entries.append(stripped) execute_command(shlex.split(str('{0} rm {1}'.format(sudo_check(), tmp_sudoers_path)))) return sudoers_entries
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write_sudoers_entry(username=None, sudoers_entry=None): """Write sudoers entry. args: user (User): Instance of User containing sudoers entry. returns: str: sudoers entry for the specified user. """
sudoers_path = '/etc/sudoers' rnd_chars = random_string(length=RANDOM_FILE_EXT_LENGTH) tmp_sudoers_path = '/tmp/sudoers_{0}'.format(rnd_chars) execute_command( shlex.split(str('{0} cp {1} {2}'.format(sudo_check(), sudoers_path, tmp_sudoers_path)))) execute_command( shlex.split(str('{0} chmod 777 {1}'.format(sudo_check(), tmp_sudoers_path)))) with open(tmp_sudoers_path, mode=text_type('r')) as tmp_sudoers_file: sudoers_entries = tmp_sudoers_file.readlines() sudoers_output = list() for entry in sudoers_entries: if entry and not entry.startswith(username): sudoers_output.append(entry) if sudoers_entry: sudoers_output.append('{0} {1}'.format(username, sudoers_entry)) sudoers_output.append('\n') with open(tmp_sudoers_path, mode=text_type('w+')) as tmp_sudoers_file: tmp_sudoers_file.writelines(sudoers_output) sudoers_check_result = execute_command( shlex.split(str('{0} {1} -cf {2}'.format(sudo_check(), LINUX_CMD_VISUDO, tmp_sudoers_path)))) if sudoers_check_result[1] > 0: raise ValueError(sudoers_check_result[0][1]) execute_command( shlex.split(str('{0} cp {1} {2}'.format(sudo_check(), tmp_sudoers_path, sudoers_path)))) execute_command(shlex.split(str('{0} chown root:root {1}'.format(sudo_check(), sudoers_path)))) execute_command(shlex.split(str('{0} chmod 440 {1}'.format(sudo_check(), sudoers_path)))) execute_command(shlex.split(str('{0} rm {1}'.format(sudo_check(), tmp_sudoers_path))))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sudoers_entry(username=None, sudoers_entries=None): """ Find the sudoers entry in the sudoers file for the specified user. args: username (str): username. sudoers_entries (list): list of lines from the sudoers file. returns:`r str: sudoers entry for the specified user. """
for entry in sudoers_entries: if entry.startswith(username): return entry.replace(username, '').strip()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def docstring(documentation, prepend=False, join=""): r"""Prepend or append a string to the current documentation of the function. This decorator should be robust even if ``func.__doc__`` is None (for example, if -OO was passed to the interpreter). Usage:: @docstring('Appended this line') def func(): "This docstring will have a line below." pass This docstring will have a line below. Appended this line Args: documentation (str): Documentation string that should be added, appended or prepended to the current documentation string. prepend (bool): Prepend the documentation string to the current documentation if ``True`` else append. default=``False`` join (str): String used to separate docstrings. default='\n' """
def decorator(func): current = (func.__doc__ if func.__doc__ else "").strip() doc = documentation.strip() new = "\n".join( [doc, join, current] if prepend else [current, join, doc] ) lines = len(new.strip().splitlines()) if lines == 1: # If it's a one liner keep it that way and strip whitespace func.__doc__ = new.strip() else: # Else strip whitespace from the beginning and add a newline # at the end func.__doc__ = new.strip() + "\n" return func return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_gunicorn(application: WSGIHandler, gunicorn_module_name: str = 'gunicorn_prod'): """ Runs gunicorn with a specified config. :param application: Django uwsgi application :param gunicorn_module_name: gunicorn settings module name :return: ``Application().run()`` """
from gunicorn.app.base import Application class DjangoApplication(Application): def init(self, parser, opts, args): cfg = self.get_config_from_module_name(gunicorn_module_name) clean_cfg = {} for k, v in cfg.items(): # Ignore unknown names if k not in self.cfg.settings: continue clean_cfg[k.lower()] = v return clean_cfg def load(self) -> WSGIHandler: return application return DjangoApplication().run()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _colorize_single_line(line, regexp, color_def): """Print single line to console with ability to colorize parts of it."""
match = regexp.match(line) groupdict = match.groupdict() groups = match.groups() if not groupdict: # no named groups, just colorize whole line color = color_def[0] dark = color_def[1] cprint("%s\n" % line, color, fg_dark=dark) else: rev_groups = {v: k for k, v in groupdict.items()} for part in groups: if part in rev_groups and rev_groups[part] in color_def: group_name = rev_groups[part] cprint( part, color_def[group_name][0], fg_dark=color_def[group_name][1], ) else: cprint(part) cprint("\n")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def height(self): """Terminal height. """
if self.interactive: if self._height is None: self._height = self.term.height return self._height
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clear_last_lines(self, n): """Clear last N lines of terminal output. """
self.term.stream.write( self.term.move_up * n + self.term.clear_eos) self.term.stream.flush()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def overwrite_line(self, n, text): """Move back N lines and overwrite line with `text`. """
with self._moveback(n): self.term.stream.write(text)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def move_to(self, n): """Move back N lines in terminal. """
self.term.stream.write(self.term.move_up * n)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, variable_path: str, default: t.Optional[t.Any] = None, coerce_type: t.Optional[t.Type] = None, coercer: t.Optional[t.Callable] = None, required: bool = False, **kwargs): """ Tries to read a ``variable_path`` from each of the passed parsers. It stops if read was successful and returns a retrieved value. If none of the parsers contain a value for the specified path it returns ``default``. :param variable_path: a path to variable in config :param default: a default value if ``variable_path`` is not present anywhere :param coerce_type: cast a result to a specified type :param coercer: perform the type casting with specified callback :param required: raise ``RequiredValueIsEmpty`` if no ``default`` and no result :param kwargs: additional options to all parsers :return: **the first successfully read** value from the list of parser instances or ``default`` :raises config.exceptions.RequiredValueIsEmpty: if nothing is read,``required`` flag is set, and there's no ``default`` specified """
for p in self.parsers: try: val = p.get( variable_path, default=self.sentinel, coerce_type=coerce_type, coercer=coercer, **kwargs ) if val != self.sentinel: self.enqueue(variable_path, p, val) return val except Exception as e: if not self.silent: raise if self.suppress_logs: continue self.logger.error('Parser {0} cannot get key `{1}`: {2}'.format( p.__class__.__name__, variable_path, str(e) )) self.enqueue(variable_path, value=default) if not default and required: raise exceptions.RequiredValueIsEmpty( 'No default provided and no value read for `{0}`'.format(variable_path)) return default
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_config_read_queue(self, use_color: bool = False, max_col_width: int = 50) -> str: """ Prepares a string with pretty printed config read queue. :param use_color: use terminal colors :param max_col_width: limit column width, ``50`` by default :return: """
try: from terminaltables import SingleTable except ImportError: import warnings warnings.warn('Cannot display config read queue. Install terminaltables first.') return '' col_names_order = ['path', 'value', 'type', 'parser'] pretty_bundles = [[self._colorize(name, name.capitalize(), use_color=use_color) for name in col_names_order]] for config_read_item in self.config_read_queue: pretty_attrs = [ config_read_item.variable_path, config_read_item.value, config_read_item.type, config_read_item.parser_name ] pretty_attrs = [self._pformat(pa, max_col_width) for pa in pretty_attrs] if config_read_item.is_default: pretty_attrs[0] = '*' + pretty_attrs[0] if use_color: pretty_attrs = [self._colorize(column_name, pretty_attr, use_color=use_color) for column_name, pretty_attr in zip(col_names_order, pretty_attrs)] pretty_bundles.append(pretty_attrs) table = SingleTable(pretty_bundles) table.title = self._colorize('title', 'CONFIG READ QUEUE', use_color=use_color) table.justify_columns[0] = 'right' # table.inner_row_border = True return str(table.table)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_graph(cls, response): """ Given a Flask response, find the rdflib Graph """
if cls.is_graph(response): # single graph object return response if hasattr(response, '__getitem__'): # indexable tuple if len(response) > 0 and \ cls.is_graph(response[0]): # graph object return response[0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def replace_graph(cls, response, serialized): """ Replace the rdflib Graph in a Flask response """
if cls.is_graph(response): # single graph object return serialized if hasattr(response, '__getitem__'): # indexable tuple if len(response) > 0 and \ cls.is_graph(response[0]): # graph object return (serialized,) + response[1:] return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _from_hex_digest(digest): """Convert hex digest to sequence of bytes."""
return "".join( [chr(int(digest[x : x + 2], 16)) for x in range(0, len(digest), 2)] )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def encrypt(data, digest=True): """Perform encryption of provided data."""
alg = get_best_algorithm() enc = implementations["encryption"][alg]( data, implementations["get_key"]() ) return "%s$%s" % (alg, (_to_hex_digest(enc) if digest else enc))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decrypt(data, digest=True): """Decrypt provided data."""
alg, _, data = data.rpartition("$") if not alg: return data data = _from_hex_digest(data) if digest else data try: return implementations["decryption"][alg]( data, implementations["get_key"]() ) except KeyError: raise CryptError("Can not decrypt key for algorithm: %s" % alg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def gen_vocab(cli, args): ''' Generate vocabulary list from a tokenized file ''' if args.topk and args.topk <= 0: topk = None cli.logger.warning("Invalid k will be ignored (k should be greater than or equal to 1)") else: topk = args.topk if args.stopwords: with open(args.stopwords, 'r') as swfile: stopwords = swfile.read().splitlines() else: stopwords = [] if os.path.isfile(args.input): cli.logger.info("Generating vocabulary list from file {}".format(args.input)) with codecs.open(args.input, encoding='utf-8') as infile: if args.output: cli.logger.info("Output: {}".format(args.output)) rp = TextReport(args.output) lines = infile.read().splitlines() c = Counter() for line in lines: words = line.split() c.update(w for w in words if w not in stopwords) # report vocab word_freq = c.most_common(topk) words = [k for k, v in word_freq] rp.header("Lexicon") rp.writeline("\n".join(textwrap.wrap(" ".join(w for w in words), width=70))) for k, v in word_freq: rp.print("{}: {}".format(k, v)) else: cli.logger.warning("File {} does not exist".format(args.input))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def main(): ''' ChirpText Tools main function ''' app = CLIApp(desc='ChirpText Tools', logger=__name__, show_version=show_version) # add tasks vocab_task = app.add_task('vocab', func=gen_vocab) vocab_task.add_argument('input', help='Input file') vocab_task.add_argument('--output', help='Output file', default=None) vocab_task.add_argument('--stopwords', help='Stop word to ignore', default=None) vocab_task.add_argument('-k', '--topk', help='Only select the top k frequent elements', default=None, type=int) # run app app.run()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def add_attachment(message, attachment, rfc2231=True): '''Attach an attachment to a message as a side effect. Arguments: message: MIMEMultipart instance. attachment: Attachment instance. ''' data = attachment.read() part = MIMEBase('application', 'octet-stream') part.set_payload(data) encoders.encode_base64(part) filename = attachment.name if rfc2231 else Header(attachment.name).encode() part.add_header('Content-Disposition', 'attachment', filename=filename) message.attach(part)