text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Returns HTTP headers of specified WebDAV actions. <END_TASK> <USER_TASK:> Description: def get_headers(self, action, headers_ext=None): """Returns HTTP headers of specified WebDAV actions. :param action: the identifier of action. :param headers_ext: (optional) the addition headers list witch sgould be added to basic HTTP headers for the specified action. :return: the dictionary of headers for specified action. """
if action in Client.http_header: try: headers = Client.http_header[action].copy() except AttributeError: headers = Client.http_header[action][:] else: headers = list() if headers_ext: headers.extend(headers_ext) if self.webdav.token: webdav_token = f'Authorization: OAuth {self.webdav.token}' headers.append(webdav_token) return dict([map(lambda s: s.strip(), i.split(':')) for i in headers])
<SYSTEM_TASK:> Generate request to WebDAV server for specified action and path and execute it. <END_TASK> <USER_TASK:> Description: def execute_request(self, action, path, data=None, headers_ext=None): """Generate request to WebDAV server for specified action and path and execute it. :param action: the action for WebDAV server which should be executed. :param path: the path to resource for action :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param headers_ext: (optional) the addition headers list witch should be added to basic HTTP headers for the specified action. :return: HTTP response of request. """
response = self.session.request( method=Client.requests[action], url=self.get_url(path), auth=self.webdav.auth, headers=self.get_headers(action, headers_ext), timeout=self.timeout, data=data ) if response.status_code == 507: raise NotEnoughSpace() if 499 < response.status_code < 600: raise ServerException(url=self.get_url(path), code=response.status_code, message=response.content) if response.status_code >= 400: raise ResponseErrorCode(url=self.get_url(path), code=response.status_code, message=response.content) return response
<SYSTEM_TASK:> Validates of WebDAV and proxy settings. <END_TASK> <USER_TASK:> Description: def valid(self): """Validates of WebDAV and proxy settings. :return: True in case settings are valid and False otherwise. """
return True if self.webdav.valid() and self.proxy.valid() else False
<SYSTEM_TASK:> Downloads file from WebDAV and writes it in buffer. <END_TASK> <USER_TASK:> Description: def download_from(self, buff, remote_path): """Downloads file from WebDAV and writes it in buffer. :param buff: buffer object for writing of downloaded file content. :param remote_path: path to file on WebDAV server. """
urn = Urn(remote_path) if self.is_dir(urn.path()): raise OptionNotValid(name='remote_path', value=remote_path) if not self.check(urn.path()): raise RemoteResourceNotFound(urn.path()) response = self.execute_request(action='download', path=urn.quote()) buff.write(response.content)
<SYSTEM_TASK:> Downloads directory and downloads all nested files and directories from remote WebDAV to local. <END_TASK> <USER_TASK:> Description: def download_directory(self, remote_path, local_path, progress=None): """Downloads directory and downloads all nested files and directories from remote WebDAV to local. If there is something on local path it deletes directories and files then creates new. :param remote_path: the path to directory for downloading form WebDAV server. :param local_path: the path to local directory for saving downloaded files and directories. :param progress: Progress function. Not supported now. """
urn = Urn(remote_path, directory=True) if not self.is_dir(urn.path()): raise OptionNotValid(name='remote_path', value=remote_path) if os.path.exists(local_path): shutil.rmtree(local_path) os.makedirs(local_path) for resource_name in self.list(urn.path()): _remote_path = f'{urn.path()}{resource_name}' _local_path = os.path.join(local_path, resource_name) self.download(local_path=_local_path, remote_path=_remote_path, progress=progress)
<SYSTEM_TASK:> Downloads remote resources from WebDAV server synchronously. <END_TASK> <USER_TASK:> Description: def download_sync(self, remote_path, local_path, callback=None): """Downloads remote resources from WebDAV server synchronously. :param remote_path: the path to remote resource on WebDAV server. Can be file and directory. :param local_path: the path to save resource locally. :param callback: the callback which will be invoked when downloading is complete. """
self.download(local_path=local_path, remote_path=remote_path) if callback: callback()
<SYSTEM_TASK:> Downloads remote resources from WebDAV server asynchronously <END_TASK> <USER_TASK:> Description: def download_async(self, remote_path, local_path, callback=None): """Downloads remote resources from WebDAV server asynchronously :param remote_path: the path to remote resource on WebDAV server. Can be file and directory. :param local_path: the path to save resource locally. :param callback: the callback which will be invoked when downloading is complete. """
target = (lambda: self.download_sync(local_path=local_path, remote_path=remote_path, callback=callback)) threading.Thread(target=target).start()
<SYSTEM_TASK:> Uploads directory to remote path on WebDAV server. <END_TASK> <USER_TASK:> Description: def upload_directory(self, remote_path, local_path, progress=None): """Uploads directory to remote path on WebDAV server. In case directory is exist on remote server it will delete it and then upload directory with nested files and directories. :param remote_path: the path to directory for uploading on WebDAV server. :param local_path: the path to local directory for uploading. :param progress: Progress function. Not supported now. """
urn = Urn(remote_path, directory=True) if not urn.is_dir(): raise OptionNotValid(name='remote_path', value=remote_path) if not os.path.isdir(local_path): raise OptionNotValid(name='local_path', value=local_path) if not os.path.exists(local_path): raise LocalResourceNotFound(local_path) if self.check(urn.path()): self.clean(urn.path()) self.mkdir(remote_path) for resource_name in listdir(local_path): _remote_path = f'{urn.path()}{resource_name}' _local_path = os.path.join(local_path, resource_name) self.upload(local_path=_local_path, remote_path=_remote_path, progress=progress)
<SYSTEM_TASK:> Uploads resource to remote path on WebDAV server synchronously. <END_TASK> <USER_TASK:> Description: def upload_sync(self, remote_path, local_path, callback=None): """Uploads resource to remote path on WebDAV server synchronously. In case resource is directory it will upload all nested files and directories. :param remote_path: the path for uploading resources on WebDAV server. Can be file and directory. :param local_path: the path to local resource for uploading. :param callback: the callback which will be invoked when downloading is complete. """
self.upload(local_path=local_path, remote_path=remote_path) if callback: callback()
<SYSTEM_TASK:> Uploads resource to remote path on WebDAV server asynchronously. <END_TASK> <USER_TASK:> Description: def upload_async(self, remote_path, local_path, callback=None): """Uploads resource to remote path on WebDAV server asynchronously. In case resource is directory it will upload all nested files and directories. :param remote_path: the path for uploading resources on WebDAV server. Can be file and directory. :param local_path: the path to local resource for uploading. :param callback: the callback which will be invoked when downloading is complete. """
target = (lambda: self.upload_sync(local_path=local_path, remote_path=remote_path, callback=callback)) threading.Thread(target=target).start()
<SYSTEM_TASK:> Parses of response content XML from WebDAV server and extract file and directory names. <END_TASK> <USER_TASK:> Description: def parse_get_list_response(content): """Parses of response content XML from WebDAV server and extract file and directory names. :param content: the XML content of HTTP response from WebDAV server for getting list of files by remote path. :return: list of extracted file or directory names. """
try: tree = etree.fromstring(content) hrees = [Urn.separate + unquote(urlsplit(hree.text).path) for hree in tree.findall('.//{DAV:}href')] return [Urn(hree) for hree in hrees] except etree.XMLSyntaxError: return list()
<SYSTEM_TASK:> Creates an XML for requesting of free space on remote WebDAV server. <END_TASK> <USER_TASK:> Description: def create_free_space_request_content(): """Creates an XML for requesting of free space on remote WebDAV server. :return: the XML string of request content. """
root = etree.Element('propfind', xmlns='DAV:') prop = etree.SubElement(root, 'prop') etree.SubElement(prop, 'quota-available-bytes') etree.SubElement(prop, 'quota-used-bytes') tree = etree.ElementTree(root) return WebDavXmlUtils.etree_to_string(tree)
<SYSTEM_TASK:> Parses of response content XML from WebDAV server and extract an amount of free space. <END_TASK> <USER_TASK:> Description: def parse_free_space_response(content, hostname): """Parses of response content XML from WebDAV server and extract an amount of free space. :param content: the XML content of HTTP response from WebDAV server for getting free space. :param hostname: the server hostname. :return: an amount of free space in bytes. """
try: tree = etree.fromstring(content) node = tree.find('.//{DAV:}quota-available-bytes') if node is not None: return int(node.text) else: raise MethodNotSupported(name='free', server=hostname) except TypeError: raise MethodNotSupported(name='free', server=hostname) except etree.XMLSyntaxError: return str()
<SYSTEM_TASK:> Creates an XML for requesting of getting a property value of remote WebDAV resource. <END_TASK> <USER_TASK:> Description: def create_get_property_request_content(option): """Creates an XML for requesting of getting a property value of remote WebDAV resource. :param option: the property attributes as dictionary with following keys: `namespace`: (optional) the namespace for XML property which will be get, `name`: the name of property which will be get. :return: the XML string of request content. """
root = etree.Element('propfind', xmlns='DAV:') prop = etree.SubElement(root, 'prop') etree.SubElement(prop, option.get('name', ''), xmlns=option.get('namespace', '')) tree = etree.ElementTree(root) return WebDavXmlUtils.etree_to_string(tree)
<SYSTEM_TASK:> Parses of response content XML from WebDAV server for getting metadata property value for some resource. <END_TASK> <USER_TASK:> Description: def parse_get_property_response(content, name): """Parses of response content XML from WebDAV server for getting metadata property value for some resource. :param content: the XML content of response as string. :param name: the name of property for finding a value in response :return: the value of property if it has been found or None otherwise. """
tree = etree.fromstring(content) return tree.xpath('//*[local-name() = $name]', name=name)[0].text
<SYSTEM_TASK:> Creates an XML for requesting of setting a property values for remote WebDAV resource in batch. <END_TASK> <USER_TASK:> Description: def create_set_property_batch_request_content(options): """Creates an XML for requesting of setting a property values for remote WebDAV resource in batch. :param options: the property attributes as list of dictionaries with following keys: `namespace`: (optional) the namespace for XML property which will be set, `name`: the name of property which will be set, `value`: (optional) the value of property which will be set. Defaults is empty string. :return: the XML string of request content. """
root_node = etree.Element('propertyupdate', xmlns='DAV:') set_node = etree.SubElement(root_node, 'set') prop_node = etree.SubElement(set_node, 'prop') for option in options: opt_node = etree.SubElement(prop_node, option['name'], xmlns=option.get('namespace', '')) opt_node.text = option.get('value', '') tree = etree.ElementTree(root_node) return WebDavXmlUtils.etree_to_string(tree)
<SYSTEM_TASK:> Creates string from lxml.etree.ElementTree with XML declaration and UTF-8 encoding. <END_TASK> <USER_TASK:> Description: def etree_to_string(tree): """Creates string from lxml.etree.ElementTree with XML declaration and UTF-8 encoding. :param tree: the instance of ElementTree :return: the string of XML. """
buff = BytesIO() tree.write(buff, xml_declaration=True, encoding='UTF-8') return buff.getvalue()
<SYSTEM_TASK:> Extracts single response for specified remote resource. <END_TASK> <USER_TASK:> Description: def extract_response_for_path(content, path, hostname): """Extracts single response for specified remote resource. :param content: raw content of response as string. :param path: the path to needed remote resource. :param hostname: the server hostname. :return: XML object of response for the remote resource defined by path. """
try: tree = etree.fromstring(content) responses = tree.findall('{DAV:}response') n_path = Urn.normalize_path(path) for resp in responses: href = resp.findtext('{DAV:}href') if Urn.compare_path(n_path, href) is True: return resp raise RemoteResourceNotFound(path) except etree.XMLSyntaxError: raise MethodNotSupported(name='is_dir', server=hostname)
<SYSTEM_TASK:> Remove temporary stderr and stdout files as well as the daemon socket. <END_TASK> <USER_TASK:> Description: def cleanup(config_dir): """Remove temporary stderr and stdout files as well as the daemon socket."""
stdout_path = os.path.join(config_dir, 'pueue.stdout') stderr_path = os.path.join(config_dir, 'pueue.stderr') if os._exists(stdout_path): os.remove(stdout_path) if os._exists(stderr_path): os.remove(stderr_path) socketPath = os.path.join(config_dir, 'pueue.sock') if os.path.exists(socketPath): os.remove(socketPath)
<SYSTEM_TASK:> Get the descriptor output and handle incorrect UTF-8 encoding of subprocess logs. <END_TASK> <USER_TASK:> Description: def get_descriptor_output(descriptor, key, handler=None): """Get the descriptor output and handle incorrect UTF-8 encoding of subprocess logs. In case an process contains valid UTF-8 lines as well as invalid lines, we want to preserve the valid and remove the invalid ones. To do this we need to get each line and check for an UnicodeDecodeError. """
line = 'stub' lines = '' while line != '': try: line = descriptor.readline() lines += line except UnicodeDecodeError: error_msg = "Error while decoding output of process {}".format(key) if handler: handler.logger.error("{} with command {}".format( error_msg, handler.queue[key]['command'])) lines += error_msg + '\n' return lines.replace('\n', '\n ')
<SYSTEM_TASK:> Query conversion server <END_TASK> <USER_TASK:> Description: def request(self, hash_, quickkey, doc_type, page=None, output=None, size_id=None, metadata=None, request_conversion_only=None): """Query conversion server hash_: 4 characters of file hash quickkey: File quickkey doc_type: "i" for image, "d" for documents page: The page to convert. If page is set to 'initial', the first 10 pages of the document will be provided. (document) output: "pdf", "img", or "swf" (document) size_id: 0,1,2 (document) 0-9, a-f, z (image) metadata: Set to 1 to get metadata dict request_conversion_only: Request conversion w/o content """
if len(hash_) > 4: hash_ = hash_[:4] query = QueryParams({ 'quickkey': quickkey, 'doc_type': doc_type, 'page': page, 'output': output, 'size_id': size_id, 'metadata': metadata, 'request_conversion_only': request_conversion_only }) url = API_ENDPOINT + '?' + hash_ + '&' + urlencode(query) response = self.http.get(url, stream=True) if response.status_code == 204: raise ConversionServerError("Unable to fulfill request. " "The document will not be converted.", response.status_code) response.raise_for_status() if response.headers['content-type'] == 'application/json': return response.json() return response
<SYSTEM_TASK:> Call every command that was set for the current active breakpoints. <END_TASK> <USER_TASK:> Description: def bp_commands(self, frame, breakpoint_hits): """Call every command that was set for the current active breakpoints. Returns True if the normal interaction function must be called, False otherwise."""
# Handle multiple breakpoints on the same line (issue 14789) effective_bp_list, temporaries = breakpoint_hits silent = True doprompt = False atleast_one_cmd = False for bp in effective_bp_list: if bp in self.commands: if not atleast_one_cmd: atleast_one_cmd = True self.setup(frame, None) lastcmd_back = self.lastcmd for line in self.commands[bp]: self.onecmd(line) self.lastcmd = lastcmd_back if not self.commands_silent[bp]: silent = False if self.commands_doprompt[bp]: doprompt = True # Delete the temporary breakpoints. tmp_to_delete = ' '.join(str(bp) for bp in temporaries) if tmp_to_delete: self.do_clear(tmp_to_delete) if atleast_one_cmd: return doprompt, silent return None
<SYSTEM_TASK:> Handle alias expansion and ';;' separator. <END_TASK> <USER_TASK:> Description: def precmd(self, line): """Handle alias expansion and ';;' separator."""
if not line.strip(): return line args = line.split() while args[0] in self.aliases: line = self.aliases[args[0]] ii = 1 for tmpArg in args[1:]: line = line.replace("%" + str(ii), tmpArg) ii += 1 line = line.replace("%*", ' '.join(args[1:])) args = line.split() # split into ';;' separated commands # unless it's an alias command if args[0] != 'alias': marker = line.find(';;') if marker >= 0: # queue up everything after marker next = line[marker+2:].lstrip() self.cmdqueue.append(next) line = line[:marker].rstrip() return line
<SYSTEM_TASK:> Interpret the argument as though it had been typed in response <END_TASK> <USER_TASK:> Description: def onecmd(self, line): """Interpret the argument as though it had been typed in response to the prompt. Checks whether this line is typed at the normal prompt or in a breakpoint command list definition. """
if not self.commands_defining: return cmd.Cmd.onecmd(self, line) else: return self.handle_command_def(line)
<SYSTEM_TASK:> Handles one command line during command list definition. <END_TASK> <USER_TASK:> Description: def handle_command_def(self, line): """Handles one command line during command list definition."""
cmd, arg, line = self.parseline(line) if not cmd: return if cmd == 'silent': self.commands_silent[self.commands_bnum] = True return # continue to handle other cmd def in the cmd list elif cmd == 'end': self.cmdqueue = [] return 1 # end of cmd list cmdlist = self.commands[self.commands_bnum] if arg: cmdlist.append(cmd+' '+arg) else: cmdlist.append(cmd) # Determine if we must stop try: func = getattr(self, 'do_' + cmd) except AttributeError: func = self.default # one of the resuming commands if func.__name__ in self.commands_resuming: self.commands_doprompt[self.commands_bnum] = False self.cmdqueue = [] return 1 return
<SYSTEM_TASK:> Produce a reasonable default. <END_TASK> <USER_TASK:> Description: def defaultFile(self): """Produce a reasonable default."""
filename = self.curframe.f_code.co_filename if filename == '<string>' and self.mainpyfile: filename = self.mainpyfile return filename
<SYSTEM_TASK:> p expression <END_TASK> <USER_TASK:> Description: def do_p(self, arg): """p expression Print the value of the expression. """
try: self.message(bdb.safe_repr(self._getval(arg))) except Exception: pass
<SYSTEM_TASK:> pp expression <END_TASK> <USER_TASK:> Description: def do_pp(self, arg): """pp expression Pretty-print the value of the expression. """
obj = self._getval(arg) try: repr(obj) except Exception: self.message(bdb.safe_repr(obj)) else: self.message(pprint.pformat(obj))
<SYSTEM_TASK:> longlist | ll <END_TASK> <USER_TASK:> Description: def do_longlist(self, arg): """longlist | ll List the whole source code for the current function or frame. """
filename = self.curframe.f_code.co_filename breaklist = self.get_file_breaks(filename) try: lines, lineno = getsourcelines(self.curframe, self.get_locals(self.curframe)) except IOError as err: self.error(err) return self._print_lines(lines, lineno, breaklist, self.curframe)
<SYSTEM_TASK:> source expression <END_TASK> <USER_TASK:> Description: def do_source(self, arg): """source expression Try to get source code for the given object and display it. """
try: obj = self._getval(arg) except Exception: return try: lines, lineno = getsourcelines(obj, self.get_locals(self.curframe)) except (IOError, TypeError) as err: self.error(err) return self._print_lines(lines, lineno)
<SYSTEM_TASK:> Print a range of lines. <END_TASK> <USER_TASK:> Description: def _print_lines(self, lines, start, breaks=(), frame=None): """Print a range of lines."""
if frame: current_lineno = frame.f_lineno exc_lineno = self.tb_lineno.get(frame, -1) else: current_lineno = exc_lineno = -1 for lineno, line in enumerate(lines, start): s = str(lineno).rjust(3) if len(s) < 4: s += ' ' if lineno in breaks: s += 'B' else: s += ' ' if lineno == current_lineno: s += '->' elif lineno == exc_lineno: s += '>>' self.message(s + '\t' + line.rstrip())
<SYSTEM_TASK:> whatis arg <END_TASK> <USER_TASK:> Description: def do_whatis(self, arg): """whatis arg Print the type of the argument. """
try: value = self._getval(arg) except Exception: # _getval() already printed the error return code = None # Is it a function? try: code = value.__code__ except Exception: pass if code: self.message('Function %s' % code.co_name) return # Is it an instance method? try: code = value.__func__.__code__ except Exception: pass if code: self.message('Method %s' % code.co_name) return # Is it a class? if value.__class__ is type: self.message('Class %s.%s' % (value.__module__, value.__name__)) return # None of the above... self.message(type(value))
<SYSTEM_TASK:> unalias name <END_TASK> <USER_TASK:> Description: def do_unalias(self, arg): """unalias name Delete the specified alias. """
args = arg.split() if len(args) == 0: return if args[0] in self.aliases: del self.aliases[args[0]]
<SYSTEM_TASK:> Seek to position in stream, see file.seek <END_TASK> <USER_TASK:> Description: def seek(self, offset, whence=os.SEEK_SET): """Seek to position in stream, see file.seek"""
pos = None if whence == os.SEEK_SET: pos = self.offset + offset elif whence == os.SEEK_CUR: pos = self.tell() + offset elif whence == os.SEEK_END: pos = self.offset + self.len + offset else: raise ValueError("invalid whence {}".format(whence)) if pos > self.offset + self.len or pos < self.offset: raise ValueError("seek position beyond chunk area") self.parent_fd.seek(pos, os.SEEK_SET)
<SYSTEM_TASK:> Close file, see file.close <END_TASK> <USER_TASK:> Description: def close(self): """Close file, see file.close"""
try: self.parent_fd.fileno() except io.UnsupportedOperation: logger.debug("Not closing parent_fd - reusing existing") else: self.parent_fd.close()
<SYSTEM_TASK:> Perform request to MediaFire API <END_TASK> <USER_TASK:> Description: def request(self, action, params=None, action_token_type=None, upload_info=None, headers=None): """Perform request to MediaFire API action -- "category/name" of method to call params -- dict of parameters or query string action_token_type -- action token to use: None, "upload", "image" upload_info -- in case of upload, dict of "fd" and "filename" headers -- additional headers to send (used for upload) session_token and signature generation/update is handled automatically """
uri = self._build_uri(action) if isinstance(params, six.text_type): query = params else: query = self._build_query(uri, params, action_token_type) if headers is None: headers = {} if upload_info is None: # Use request body for query data = query headers['Content-Type'] = FORM_MIMETYPE else: # Use query string for query since payload is file uri += '?' + query if "filename" in upload_info: data = MultipartEncoder( fields={'file': ( upload_info["filename"], upload_info["fd"], UPLOAD_MIMETYPE )} ) headers["Content-Type"] = data.content_type else: data = upload_info["fd"] headers["Content-Type"] = UPLOAD_MIMETYPE logger.debug("uri=%s query=%s", uri, query if not upload_info else None) try: # bytes from now on url = (API_BASE + uri).encode('utf-8') if isinstance(data, six.text_type): # request's data is bytes, dict, or filehandle data = data.encode('utf-8') response = self.http.post(url, data=data, headers=headers, stream=True) except RequestException as ex: logger.exception("HTTP request failed") raise MediaFireConnectionError( "RequestException: {}".format(ex)) return self._process_response(response)
<SYSTEM_TASK:> Set action tokens <END_TASK> <USER_TASK:> Description: def set_action_token(self, type_=None, action_token=None): """Set action tokens type_ -- either "upload" or "image" action_token -- string obtained from user/get_action_token, set None to remove the token """
if action_token is None: del self._action_tokens[type_] else: self._action_tokens[type_] = action_token
<SYSTEM_TASK:> Create a socket for the daemon, depending on the directory location. <END_TASK> <USER_TASK:> Description: def create_socket(self): """Create a socket for the daemon, depending on the directory location. Args: config_dir (str): The absolute path to the config directory used by the daemon. Returns: socket.socket: The daemon socket. Clients connect to this socket. """
socket_path = os.path.join(self.config_dir, 'pueue.sock') # Create Socket and exit with 1, if socket can't be created try: if os.path.exists(socket_path): os.remove(socket_path) self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind(socket_path) self.socket.setblocking(0) self.socket.listen(0) # Set file permissions os.chmod(socket_path, stat.S_IRWXU) except Exception: self.logger.error("Daemon couldn't socket. Aborting") self.logger.exception() sys.exit(1) return self.socket
<SYSTEM_TASK:> Create all directories needed for logs and configs. <END_TASK> <USER_TASK:> Description: def initialize_directories(self, root_dir): """Create all directories needed for logs and configs."""
if not root_dir: root_dir = os.path.expanduser('~') # Create config directory, if it doesn't exist self.config_dir = os.path.join(root_dir, '.config/pueue') if not os.path.exists(self.config_dir): os.makedirs(self.config_dir)
<SYSTEM_TASK:> Send an answer to the client. <END_TASK> <USER_TASK:> Description: def respond_client(self, answer, socket): """Send an answer to the client."""
response = pickle.dumps(answer, -1) socket.sendall(response) self.read_list.remove(socket) socket.close()
<SYSTEM_TASK:> Read a previous configuration file or create a new with default values. <END_TASK> <USER_TASK:> Description: def read_config(self): """Read a previous configuration file or create a new with default values."""
config_file = os.path.join(self.config_dir, 'pueue.ini') self.config = configparser.ConfigParser() # Try to get configuration file and return it # If this doesn't work, a new default config file will be created if os.path.exists(config_file): try: self.config.read(config_file) return except Exception: self.logger.error('Error while parsing config file. Deleting old config') self.logger.exception() self.config['default'] = { 'resumeAfterStart': False, 'maxProcesses': 1, 'customShell': 'default', } self.config['log'] = { 'logTime': 60*60*24*14, } self.write_config()
<SYSTEM_TASK:> Write the current configuration to the config file. <END_TASK> <USER_TASK:> Description: def write_config(self): """Write the current configuration to the config file."""
config_file = os.path.join(self.config_dir, 'pueue.ini') with open(config_file, 'w') as file_descriptor: self.config.write(file_descriptor)
<SYSTEM_TASK:> Kill current processes and initiate daemon shutdown. <END_TASK> <USER_TASK:> Description: def stop_daemon(self, payload=None): """Kill current processes and initiate daemon shutdown. The daemon will shut down after a last check on all killed processes. """
kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.running = False return {'message': 'Pueue daemon shutting down', 'status': 'success'}
<SYSTEM_TASK:> Update the current config depending on the payload and save it. <END_TASK> <USER_TASK:> Description: def set_config(self, payload): """Update the current config depending on the payload and save it."""
self.config['default'][payload['option']] = str(payload['value']) if payload['option'] == 'maxProcesses': self.process_handler.set_max(payload['value']) if payload['option'] == 'customShell': path = payload['value'] if os.path.isfile(path) and os.access(path, os.X_OK): self.process_handler.set_shell(path) elif path == 'default': self.process_handler.set_shell() else: return {'message': "File in path doesn't exist or is not executable.", 'status': 'error'} self.write_config() return {'message': 'Configuration successfully updated.', 'status': 'success'}
<SYSTEM_TASK:> Send something to stdin of a specific process. <END_TASK> <USER_TASK:> Description: def pipe_to_process(self, payload): """Send something to stdin of a specific process."""
message = payload['input'] key = payload['key'] if not self.process_handler.is_running(key): return {'message': 'No running process for this key', 'status': 'error'} self.process_handler.send_to_process(message, key) return {'message': 'Message sent', 'status': 'success'}
<SYSTEM_TASK:> Send the daemon status and the current queue for displaying. <END_TASK> <USER_TASK:> Description: def send_status(self, payload): """Send the daemon status and the current queue for displaying."""
answer = {} data = [] # Get daemon status if self.paused: answer['status'] = 'paused' else: answer['status'] = 'running' # Add current queue or a message, that queue is empty if len(self.queue) > 0: data = deepcopy(self.queue.queue) # Remove stderr and stdout output for transfer # Some outputs are way to big for the socket buffer # and this is not needed by the client for key, item in data.items(): if 'stderr' in item: del item['stderr'] if 'stdout' in item: del item['stdout'] else: data = 'Queue is empty' answer['data'] = data return answer
<SYSTEM_TASK:> Kill all processes, delete the queue and clean everything up. <END_TASK> <USER_TASK:> Description: def reset_everything(self, payload): """Kill all processes, delete the queue and clean everything up."""
kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.process_handler.wait_for_finish() self.reset = True answer = {'message': 'Resetting current queue', 'status': 'success'} return answer
<SYSTEM_TASK:> Clear queue from any `done` or `failed` entries. <END_TASK> <USER_TASK:> Description: def clear(self, payload): """Clear queue from any `done` or `failed` entries. The log will be rotated once. Otherwise we would loose all logs from thoes finished processes. """
self.logger.rotate(self.queue) self.queue.clear() self.logger.write(self.queue) answer = {'message': 'Finished entries have been removed.', 'status': 'success'} return answer
<SYSTEM_TASK:> Edit the command of a specific entry. <END_TASK> <USER_TASK:> Description: def edit_command(self, payload): """Edit the command of a specific entry."""
key = payload['key'] command = payload['command'] if self.queue[key]: if self.queue[key]['status'] in ['queued', 'stashed']: self.queue[key]['command'] = command answer = {'message': 'Command updated', 'status': 'error'} else: answer = {'message': "Entry is not 'queued' or 'stashed'", 'status': 'error'} else: answer = {'message': 'No entry with this key', 'status': 'error'} # Pause all processes and the daemon return answer
<SYSTEM_TASK:> Stash the specified processes. <END_TASK> <USER_TASK:> Description: def stash(self, payload): """Stash the specified processes."""
succeeded = [] failed = [] for key in payload['keys']: if self.queue.get(key) is not None: if self.queue[key]['status'] == 'queued': self.queue[key]['status'] = 'stashed' succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Stashed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo queued entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
<SYSTEM_TASK:> Pause the daemon and kill all processes or kill a specific process. <END_TASK> <USER_TASK:> Description: def kill_process(self, payload): """Pause the daemon and kill all processes or kill a specific process."""
# Kill specific processes, if `keys` is given in the payload kill_signal = signals[payload['signal'].lower()] kill_shell = payload.get('all', False) if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.kill_process(key, kill_signal, kill_shell) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += "Signal '{}' sent to processes: {}.".format(payload['signal'], ', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo running process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Kill all processes and the daemon else: self.process_handler.kill_all(kill_signal, kill_shell) if kill_signal == signal.SIGINT or \ kill_signal == signal.SIGTERM or \ kill_signal == signal.SIGKILL: self.paused = True answer = {'message': 'Signal send to all processes.', 'status': 'success'} return answer
<SYSTEM_TASK:> Remove specified entries from the queue. <END_TASK> <USER_TASK:> Description: def remove(self, payload): """Remove specified entries from the queue."""
succeeded = [] failed = [] for key in payload['keys']: running = self.process_handler.is_running(key) if not running: removed = self.queue.remove(key) if removed: succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Removed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nRunning or non-existing entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
<SYSTEM_TASK:> Switch the two specified entry positions in the queue. <END_TASK> <USER_TASK:> Description: def switch(self, payload): """Switch the two specified entry positions in the queue."""
first = payload['first'] second = payload['second'] running = self.process_handler.is_running(first) or self.process_handler.is_running(second) if running: answer = { 'message': "Can't switch running processes, " "please stop the processes before switching them.", 'status': 'error' } else: switched = self.queue.switch(first, second) if switched: answer = { 'message': 'Entries #{} and #{} switched'.format(first, second), 'status': 'success' } else: answer = {'message': "One or both entries do not exist or are not queued/stashed.", 'status': 'error'} return answer
<SYSTEM_TASK:> Restart the specified entries. <END_TASK> <USER_TASK:> Description: def restart(self, payload): """Restart the specified entries."""
succeeded = [] failed = [] for key in payload['keys']: restarted = self.queue.restart(key) if restarted: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Restarted entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo finished entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
<SYSTEM_TASK:> Add a new command to the daemon queue. <END_TASK> <USER_TASK:> Description: def execute_add(args, root_dir=None): """Add a new command to the daemon queue. Args: args['command'] (list(str)): The actual programm call. Something like ['ls', '-a'] or ['ls -al'] root_dir (string): The path to the root directory the daemon is running in. """
# We accept a list of strings. # This is done to create a better commandline experience with argparse. command = ' '.join(args['command']) # Send new instruction to daemon instruction = { 'command': command, 'path': os.getcwd() } print_command_factory('add')(instruction, root_dir)
<SYSTEM_TASK:> Edit a existing queue command in the daemon. <END_TASK> <USER_TASK:> Description: def execute_edit(args, root_dir=None): """Edit a existing queue command in the daemon. Args: args['key'] int: The key of the queue entry to be edited root_dir (string): The path to the root directory the daemon is running in. """
# Get editor EDITOR = os.environ.get('EDITOR', 'vim') # Get command from server key = args['key'] status = command_factory('status')({}, root_dir=root_dir) # Check if queue is not empty, the entry exists and is queued or stashed if not isinstance(status['data'], str) and key in status['data']: if status['data'][key]['status'] in ['queued', 'stashed']: command = status['data'][key]['command'] else: print("Entry is not 'queued' or 'stashed'") sys.exit(1) else: print('No entry with this key') sys.exit(1) with tempfile.NamedTemporaryFile(suffix=".tmp") as tf: tf.write(command.encode('utf-8')) tf.flush() call([EDITOR, tf.name]) # do the parsing with `tf` using regular File operations. # for instance: tf.seek(0) edited_command = tf.read().decode('utf-8') print_command_factory('edit')({ 'key': key, 'command': edited_command, }, root_dir=root_dir)
<SYSTEM_TASK:> A factory which returns functions for direct daemon communication. <END_TASK> <USER_TASK:> Description: def command_factory(command): """A factory which returns functions for direct daemon communication. This factory will create a function which sends a payload to the daemon and returns the unpickled object which is returned by the daemon. Args: command (string): The type of payload this should be. This determines as what kind of instruction this will be interpreted by the daemon. Returns: function: The created function. """
def communicate(body={}, root_dir=None): """Communicate with the daemon. This function sends a payload to the daemon and returns the unpickled object sent by the daemon. Args: body (dir): Any other arguments that should be put into the payload. root_dir (str): The root directory in which we expect the daemon. We need this to connect to the daemons socket. Returns: function: The returned payload. """ client = connect_socket(root_dir) body['mode'] = command # Delete the func entry we use to call the correct function with argparse # as functions can't be pickled and this shouldn't be send to the daemon. if 'func' in body: del body['func'] data_string = pickle.dumps(body, -1) client.send(data_string) # Receive message, unpickle and return it response = receive_data(client) return response return communicate
<SYSTEM_TASK:> Create file descriptors for process output. <END_TASK> <USER_TASK:> Description: def get_descriptor(self, number): """Create file descriptors for process output."""
# Create stdout file and get file descriptor stdout_path = os.path.join(self.config_dir, 'pueue_process_{}.stdout'.format(number)) if os.path.exists(stdout_path): os.remove(stdout_path) out_descriptor = open(stdout_path, 'w+') # Create stderr file and get file descriptor stderr_path = os.path.join(self.config_dir, 'pueue_process_{}.stderr'.format(number)) if os.path.exists(stderr_path): os.remove(stderr_path) err_descriptor = open(stderr_path, 'w+') self.descriptors[number] = {} self.descriptors[number]['stdout'] = out_descriptor self.descriptors[number]['stdout_path'] = stdout_path self.descriptors[number]['stderr'] = err_descriptor self.descriptors[number]['stderr_path'] = stderr_path return out_descriptor, err_descriptor
<SYSTEM_TASK:> Close file descriptor and remove underlying files. <END_TASK> <USER_TASK:> Description: def clean_descriptor(self, number): """Close file descriptor and remove underlying files."""
self.descriptors[number]['stdout'].close() self.descriptors[number]['stderr'].close() if os.path.exists(self.descriptors[number]['stdout_path']): os.remove(self.descriptors[number]['stdout_path']) if os.path.exists(self.descriptors[number]['stderr_path']): os.remove(self.descriptors[number]['stderr_path'])
<SYSTEM_TASK:> Poll all processes and handle any finished processes. <END_TASK> <USER_TASK:> Description: def check_finished(self): """Poll all processes and handle any finished processes."""
changed = False for key in list(self.processes.keys()): # Poll process and check if it finshed process = self.processes[key] process.poll() if process.returncode is not None: # If a process is terminated by `stop` or `kill` # we want to queue it again instead closing it as failed. if key not in self.stopping: # Get std_out and err_out output, error_output = process.communicate() descriptor = self.descriptors[key] descriptor['stdout'].seek(0) descriptor['stderr'].seek(0) output = get_descriptor_output(descriptor['stdout'], key, handler=self) error_output = get_descriptor_output(descriptor['stderr'], key, handler=self) # Mark queue entry as finished and save returncode self.queue[key]['returncode'] = process.returncode if process.returncode != 0: self.queue[key]['status'] = 'failed' else: self.queue[key]['status'] = 'done' # Add outputs to queue self.queue[key]['stdout'] = output self.queue[key]['stderr'] = error_output self.queue[key]['end'] = str(datetime.now().strftime("%H:%M")) self.queue.write() changed = True else: self.stopping.remove(key) if key in self.to_remove: self.to_remove.remove(key) del self.queue[key] else: if key in self.to_stash: self.to_stash.remove(key) self.queue[key]['status'] = 'stashed' else: self.queue[key]['status'] = 'queued' self.queue[key]['start'] = '' self.queue[key]['end'] = '' self.queue.write() self.clean_descriptor(key) del self.processes[key] # If anything should be logged we return True return changed
<SYSTEM_TASK:> Check if we can start a new process. <END_TASK> <USER_TASK:> Description: def check_for_new(self): """Check if we can start a new process."""
free_slots = self.max_processes - len(self.processes) for item in range(free_slots): key = self.queue.next() if key is not None: self.spawn_new(key)
<SYSTEM_TASK:> Spawn a new task and save it to the queue. <END_TASK> <USER_TASK:> Description: def spawn_new(self, key): """Spawn a new task and save it to the queue."""
# Check if path exists if not os.path.exists(self.queue[key]['path']): self.queue[key]['status'] = 'failed' error_msg = "The directory for this command doesn't exist anymore: {}".format(self.queue[key]['path']) self.logger.error(error_msg) self.queue[key]['stdout'] = '' self.queue[key]['stderr'] = error_msg else: # Get file descriptors stdout, stderr = self.get_descriptor(key) if self.custom_shell != 'default': # Create subprocess self.processes[key] = subprocess.Popen( [ self.custom_shell, '-i', '-c', self.queue[key]['command'], ], stdout=stdout, stderr=stderr, stdin=subprocess.PIPE, universal_newlines=True, preexec_fn=os.setsid, cwd=self.queue[key]['path'] ) else: # Create subprocess self.processes[key] = subprocess.Popen( self.queue[key]['command'], shell=True, stdout=stdout, stderr=stderr, stdin=subprocess.PIPE, universal_newlines=True, preexec_fn=os.setsid, cwd=self.queue[key]['path'] ) self.queue[key]['status'] = 'running' self.queue[key]['start'] = str(datetime.now().strftime("%H:%M")) self.queue.write()
<SYSTEM_TASK:> Kill all running processes. <END_TASK> <USER_TASK:> Description: def kill_all(self, kill_signal, kill_shell=False): """Kill all running processes."""
for key in self.processes.keys(): self.kill_process(key, kill_signal, kill_shell)
<SYSTEM_TASK:> Create a closure which creates a running daemon. <END_TASK> <USER_TASK:> Description: def daemon_factory(path): """Create a closure which creates a running daemon. We need to create a closure that contains the correct path the daemon should be started with. This is needed as the `Daemonize` library requires a callable function for daemonization and doesn't accept any arguments. This function cleans up sockets and output files in case we encounter any exceptions. """
def start_daemon(): root_dir = path config_dir = os.path.join(root_dir, '.config/pueue') try: daemon = Daemon(root_dir=root_dir) daemon.main() except KeyboardInterrupt: print('Keyboard interrupt. Shutting down') daemon.stop_daemon() except Exception: try: daemon.stop_daemon() except Exception: pass cleanup(config_dir) raise return start_daemon
<SYSTEM_TASK:> Register a pdb handler for signal 'signum'. <END_TASK> <USER_TASK:> Description: def register(host=DFLT_ADDRESS[0], port=DFLT_ADDRESS[1], signum=signal.SIGUSR1): """Register a pdb handler for signal 'signum'. The handler sets pdb to listen on the ('host', 'port') internet address and to start a remote debugging session on accepting a socket connection. """
_pdbhandler._register(host, port, signum)
<SYSTEM_TASK:> Return the handler as a named tuple. <END_TASK> <USER_TASK:> Description: def get_handler(): """Return the handler as a named tuple. The named tuple attributes are 'host', 'port', 'signum'. Return None when no handler has been registered. """
host, port, signum = _pdbhandler._registered() if signum: return Handler(host if host else DFLT_ADDRESS[0].encode(), port if port else DFLT_ADDRESS[1], signum)
<SYSTEM_TASK:> Return resource by remote path. <END_TASK> <USER_TASK:> Description: def get_resource_by_path(self, path, folder_key=None): """Return resource by remote path. path -- remote path Keyword arguments: folder_key -- what to use as the root folder (None for root) """
logger.debug("resolving %s", path) # remove empty path components path = posixpath.normpath(path) components = [t for t in path.split(posixpath.sep) if t != ''] if not components: # request for root return Folder( self.api.folder_get_info(folder_key)['folder_info'] ) resource = None for component in components: exists = False for item in self._folder_get_content_iter(folder_key): name = item['name'] if 'name' in item else item['filename'] if name == component: exists = True if components[-1] != component: # still have components to go through if 'filename' in item: # found a file, expected a directory raise NotAFolderError(item['filename']) folder_key = item['folderkey'] else: # found the leaf resource = item break if resource is not None: break if not exists: # intermediate component does not exist - bailing out break if resource is None: raise ResourceNotFoundError(path) if "quickkey" in resource: file_info = self.api.file_get_info( resource['quickkey'])['file_info'] result = File(file_info) elif "folderkey" in resource: folder_info = self.api.folder_get_info( resource['folderkey'])['folder_info'] result = Folder(folder_info) return result
<SYSTEM_TASK:> Create folder. <END_TASK> <USER_TASK:> Description: def create_folder(self, uri, recursive=False): """Create folder. uri -- MediaFire URI Keyword arguments: recursive -- set to True to create intermediate folders. """
logger.info("Creating %s", uri) # check that folder exists already try: resource = self.get_resource_by_uri(uri) if isinstance(resource, Folder): return resource else: raise NotAFolderError(uri) except ResourceNotFoundError: pass location = self._parse_uri(uri) folder_name = posixpath.basename(location) parent_uri = 'mf://' + posixpath.dirname(location) try: parent_node = self.get_resource_by_uri(parent_uri) if not isinstance(parent_node, Folder): raise NotAFolderError(parent_uri) parent_key = parent_node['folderkey'] except ResourceNotFoundError: if recursive: result = self.create_folder(parent_uri, recursive=True) parent_key = result['folderkey'] else: raise # We specify exact location, so don't allow duplicates result = self.api.folder_create( folder_name, parent_key=parent_key, action_on_duplicate='skip') logger.info("Created folder '%s' [mf:%s]", result['name'], result['folder_key']) return self.get_resource_by_key(result['folder_key'])
<SYSTEM_TASK:> Delete folder. <END_TASK> <USER_TASK:> Description: def delete_folder(self, uri, purge=False): """Delete folder. uri -- MediaFire folder URI Keyword arguments: purge -- delete the folder without sending it to Trash """
try: resource = self.get_resource_by_uri(uri) except ResourceNotFoundError: # Nothing to remove return None if not isinstance(resource, Folder): raise ValueError("Folder expected, got {}".format(type(resource))) if purge: func = self.api.folder_purge else: func = self.api.folder_delete try: result = func(resource['folderkey']) except MediaFireApiError as err: if err.code == 100: logger.debug( "Delete folder returns error 900 but folder is deleted: " "http://forum.mediafiredev.com/showthread.php?129") result = {} else: raise return result
<SYSTEM_TASK:> Delete file. <END_TASK> <USER_TASK:> Description: def delete_file(self, uri, purge=False): """Delete file. uri -- MediaFire file URI Keyword arguments: purge -- delete the file without sending it to Trash. """
try: resource = self.get_resource_by_uri(uri) except ResourceNotFoundError: # Nothing to remove return None if not isinstance(resource, File): raise ValueError("File expected, got {}".format(type(resource))) if purge: func = self.api.file_purge else: func = self.api.file_delete return func(resource['quickkey'])
<SYSTEM_TASK:> Delete file or folder <END_TASK> <USER_TASK:> Description: def delete_resource(self, uri, purge=False): """Delete file or folder uri -- mediafire URI Keyword arguments: purge -- delete the resource without sending it to Trash. """
try: resource = self.get_resource_by_uri(uri) except ResourceNotFoundError: # Nothing to remove return None if isinstance(resource, File): result = self.delete_file(uri, purge) elif isinstance(resource, Folder): result = self.delete_folder(uri, purge) else: raise ValueError('Unsupported resource: {}'.format(type(resource))) return result
<SYSTEM_TASK:> Download file from MediaFire. <END_TASK> <USER_TASK:> Description: def download_file(self, src_uri, target): """Download file from MediaFire. src_uri -- MediaFire file URI to download target -- download path or file-like object in write mode """
resource = self.get_resource_by_uri(src_uri) if not isinstance(resource, File): raise MediaFireError("Only files can be downloaded") quick_key = resource['quickkey'] result = self.api.file_get_links(quick_key=quick_key, link_type='direct_download') direct_download = result['links'][0]['direct_download'] # Force download over HTTPS direct_download = direct_download.replace('http:', 'https:') name = resource['filename'] target_is_filehandle = True if hasattr(target, 'write') else False if not target_is_filehandle: if (os.path.exists(target) and os.path.isdir(target)) or \ target.endswith("/"): target = os.path.join(target, name) if not os.path.isdir(os.path.dirname(target)): os.makedirs(os.path.dirname(target)) logger.info("Downloading %s to %s", src_uri, target) response = requests.get(direct_download, stream=True) try: if target_is_filehandle: out_fd = target else: out_fd = open(target, 'wb') checksum = hashlib.sha256() for chunk in response.iter_content(chunk_size=4096): if chunk: out_fd.write(chunk) checksum.update(chunk) checksum_hex = checksum.hexdigest().lower() if checksum_hex != resource['hash']: raise DownloadError("Hash mismatch ({} != {})".format( resource['hash'], checksum_hex)) logger.info("Download completed successfully") finally: if not target_is_filehandle: out_fd.close()
<SYSTEM_TASK:> Update file metadata. <END_TASK> <USER_TASK:> Description: def update_file_metadata(self, uri, filename=None, description=None, mtime=None, privacy=None): """Update file metadata. uri -- MediaFire file URI Supplying the following keyword arguments would change the metadata on the server side: filename -- rename file description -- set file description string mtime -- set file modification time privacy -- set file privacy - 'private' or 'public' """
resource = self.get_resource_by_uri(uri) if not isinstance(resource, File): raise ValueError('Expected File, got {}'.format(type(resource))) result = self.api.file_update(resource['quickkey'], filename=filename, description=description, mtime=mtime, privacy=privacy) return result
<SYSTEM_TASK:> Update folder metadata. <END_TASK> <USER_TASK:> Description: def update_folder_metadata(self, uri, foldername=None, description=None, mtime=None, privacy=None, privacy_recursive=None): """Update folder metadata. uri -- MediaFire file URI Supplying the following keyword arguments would change the metadata on the server side: filename -- rename file description -- set file description string mtime -- set file modification time privacy -- set file privacy - 'private' or 'public' recursive -- update folder privacy recursively """
resource = self.get_resource_by_uri(uri) if not isinstance(resource, Folder): raise ValueError('Expected Folder, got {}'.format(type(resource))) result = self.api.folder_update(resource['folderkey'], foldername=foldername, description=description, mtime=mtime, privacy=privacy, privacy_recursive=privacy_recursive) return result
<SYSTEM_TASK:> Print the current log file. <END_TASK> <USER_TASK:> Description: def execute_log(args, root_dir): """Print the current log file. Args: args['keys'] (int): If given, we only look at the specified processes. root_dir (string): The path to the root directory the daemon is running in. """
# Print the logs of all specified processes if args.get('keys'): config_dir = os.path.join(root_dir, '.config/pueue') queue_path = os.path.join(config_dir, 'queue') if os.path.exists(queue_path): queue_file = open(queue_path, 'rb') try: queue = pickle.load(queue_file) except Exception: print('Queue log file seems to be corrupted. Aborting.') return queue_file.close() else: print('There is no queue log file. Aborting.') return for key in args.get('keys'): # Check if there is an entry with this key if queue.get(key) and queue[key]['status'] in ['failed', 'done']: entry = queue[key] print('Log of entry: {}'.format(key)) print('Returncode: {}'.format(entry['returncode'])) print('Command: {}'.format(entry['command'])) print('Path: {}'.format(entry['path'])) print('Start: {}, End: {} \n'.format(entry['start'], entry['end'])) # Write STDERR if len(entry['stderr']) > 0: print(Color('{autored}Stderr output: {/autored}\n ') + entry['stderr']) # Write STDOUT if len(entry['stdout']) > 0: print(Color('{autogreen}Stdout output: {/autogreen}\n ') + entry['stdout']) else: print('No finished process with key {}.'.format(key)) # Print the log of all processes else: log_path = os.path.join(root_dir, '.local/share/pueue/queue.log') log_file = open(log_path, 'r') print(log_file.read())
<SYSTEM_TASK:> Print stderr and stdout of the current running process. <END_TASK> <USER_TASK:> Description: def execute_show(args, root_dir): """Print stderr and stdout of the current running process. Args: args['watch'] (bool): If True, we open a curses session and tail the output live in the console. root_dir (string): The path to the root directory the daemon is running in. """
key = None if args.get('key'): key = args['key'] status = command_factory('status')({}, root_dir=root_dir) if key not in status['data'] or status['data'][key]['status'] != 'running': print('No running process with this key, use `log` to show finished processes.') return # In case no key provided, we take the oldest running process else: status = command_factory('status')({}, root_dir=root_dir) if isinstance(status['data'], str): print(status['data']) return for k in sorted(status['data'].keys()): if status['data'][k]['status'] == 'running': key = k break if key is None: print('No running process, use `log` to show finished processes.') return config_dir = os.path.join(root_dir, '.config/pueue') # Get current pueueSTDout file from tmp stdoutFile = os.path.join(config_dir, 'pueue_process_{}.stdout'.format(key)) stderrFile = os.path.join(config_dir, 'pueue_process_{}.stderr'.format(key)) stdoutDescriptor = open(stdoutFile, 'r') stderrDescriptor = open(stderrFile, 'r') running = True # Continually print output with curses or just print once if args['watch']: # Initialize curses stdscr = curses.initscr() curses.noecho() curses.cbreak() curses.curs_set(2) stdscr.keypad(True) stdscr.refresh() try: # Update output every two seconds while running: stdscr.clear() stdoutDescriptor.seek(0) message = stdoutDescriptor.read() stdscr.addstr(0, 0, message) stdscr.refresh() time.sleep(2) except Exception: # Curses cleanup curses.nocbreak() stdscr.keypad(False) curses.echo() curses.endwin() else: print('Stdout output:\n') stdoutDescriptor.seek(0) print(get_descriptor_output(stdoutDescriptor, key)) print('\n\nStderr output:\n') stderrDescriptor.seek(0) print(get_descriptor_output(stderrDescriptor, key))
<SYSTEM_TASK:> Show a specific indicator by id <END_TASK> <USER_TASK:> Description: def show(self, user, feed, id): """ Show a specific indicator by id :param user: feed username :param feed: feed name :param id: indicator endpoint id [INT] :return: dict Example: ret = Indicator.show('csirtgadgets','port-scanners', '1234') """
uri = '/users/{}/feeds/{}/indicators/{}'.format(user, feed, id) return self.client.get(uri)
<SYSTEM_TASK:> Submit action on the Indicator object <END_TASK> <USER_TASK:> Description: def create(self): """ Submit action on the Indicator object :return: Indicator Object """
uri = '/users/{0}/feeds/{1}/indicators'\ .format(self.user, self.feed) data = { "indicator": json.loads(str(self.indicator)), "comment": self.comment, "content": self.content } if self.attachment: attachment = self._file_to_attachment( self.attachment, filename=self.attachment_name) data['attachment'] = { 'data': attachment['data'], 'filename': attachment['filename'] } if not data['indicator'].get('indicator'): data['indicator']['indicator'] = attachment['sha1'] if not data['indicator'].get('indicator'): raise Exception('Missing indicator') return self.client.post(uri, data)
<SYSTEM_TASK:> Submit action against the IndicatorBulk endpoint <END_TASK> <USER_TASK:> Description: def create_bulk(self, indicators, user, feed): from .constants import API_VERSION if API_VERSION == '1': print("create_bulk currently un-avail with APIv1") raise SystemExit """ Submit action against the IndicatorBulk endpoint :param indicators: list of Indicator Objects :param user: feed username :param feed: feed name :return: list of Indicator Objects submitted from csirtgsdk.client import Client from csirtgsdk.indicator import Indicator remote = 'https://csirtg.io/api' token = '' verify_ssl = True i = { 'indicator': 'example.com', 'feed': 'test', 'user': 'admin', 'comment': 'this is a test', } data = [] cli = Client(remote=remote, token=token, verify_ssl=verify_ssl) for x in range(0, 5): data.append( Indicator(cli, i) ) ret = cli.submit_bulk(data, 'csirtgadgets', 'test-feed') """
uri = '/users/{0}/feeds/{1}/indicators_bulk'.format(user, feed) data = { 'indicators': [ { 'indicator': i.args.indicator, 'feed_id': i.args.feed, 'tag_list': i.args.tags, "description": i.args.description, "portlist": i.args.portlist, "protocol": i.args.protocol, 'firsttime': i.args.firsttime, 'lasttime': i.args.lasttime, 'portlist_src': i.args.portlist_src, 'comment': { 'content': i.args.comment }, 'rdata': i.args.rdata, 'rtype': i.args.rtype, 'content': i.args.content, 'provider': i.args.provider, } for i in indicators ] } return self.client.post(uri, data)
<SYSTEM_TASK:> A utility function to turn strings like 'Mod1+Mod4+a' into a pair <END_TASK> <USER_TASK:> Description: def parse_keystring(conn, key_string): """ A utility function to turn strings like 'Mod1+Mod4+a' into a pair corresponding to its modifiers and keycode. :param key_string: String starting with zero or more modifiers followed by exactly one key press. Available modifiers: Control, Mod1, Mod2, Mod3, Mod4, Mod5, Shift, Lock :type key_string: str :return: Tuple of modifier mask and keycode :rtype: (mask, int) """
# FIXME this code is temporary hack, requires better abstraction from PyQt5.QtGui import QKeySequence from PyQt5.QtCore import Qt from .qt_keycodes import KeyTbl, ModsTbl keysequence = QKeySequence(key_string) ks = keysequence[0] # Calculate the modifiers mods = Qt.NoModifier qtmods = Qt.NoModifier modifiers = 0 if (ks & Qt.ShiftModifier == Qt.ShiftModifier): mods |= ModsTbl.index(Qt.ShiftModifier) qtmods |= Qt.ShiftModifier.real modifiers |= getattr(xproto.KeyButMask, "Shift", 0) if (ks & Qt.AltModifier == Qt.AltModifier): mods |= ModsTbl.index(Qt.AltModifier) qtmods |= Qt.AltModifier.real modifiers |= getattr(xproto.KeyButMask, "Mod1", 0) if (ks & Qt.ControlModifier == Qt.ControlModifier): mods |= ModsTbl.index(Qt.ControlModifier) qtmods |= Qt.ControlModifier.real modifiers |= getattr(xproto.KeyButMask, "Control", 0) # Calculate the keys qtkeys = ks ^ qtmods key = QKeySequence(Qt.Key(qtkeys)).toString().lower() keycode = lookup_string(conn, key) return modifiers, keycode # Fallback logic modifiers = 0 keycode = None key_string = "Shift+Control+A" for part in key_string.split('+'): if hasattr(xproto.KeyButMask, part): modifiers |= getattr(xproto.KeyButMask, part) else: if len(part) == 1: part = part.lower() keycode = lookup_string(conn, part) return modifiers, keycode
<SYSTEM_TASK:> Finds the keycode associated with a string representation of a keysym. <END_TASK> <USER_TASK:> Description: def lookup_string(conn, kstr): """ Finds the keycode associated with a string representation of a keysym. :param kstr: English representation of a keysym. :return: Keycode, if one exists. :rtype: int """
if kstr in keysyms: return get_keycode(conn, keysyms[kstr]) elif len(kstr) > 1 and kstr.capitalize() in keysyms: return get_keycode(conn, keysyms[kstr.capitalize()]) return None
<SYSTEM_TASK:> Return a keyboard mapping cookie that can be used to fetch the table of <END_TASK> <USER_TASK:> Description: def get_keyboard_mapping(conn): """ Return a keyboard mapping cookie that can be used to fetch the table of keysyms in the current X environment. :rtype: xcb.xproto.GetKeyboardMappingCookie """
mn, mx = get_min_max_keycode(conn) return conn.core.GetKeyboardMapping(mn, mx - mn + 1)
<SYSTEM_TASK:> Return an unchecked keyboard mapping cookie that can be used to fetch the <END_TASK> <USER_TASK:> Description: def get_keyboard_mapping_unchecked(conn): """ Return an unchecked keyboard mapping cookie that can be used to fetch the table of keysyms in the current X environment. :rtype: xcb.xproto.GetKeyboardMappingCookie """
mn, mx = get_min_max_keycode() return conn.core.GetKeyboardMappingUnchecked(mn, mx - mn + 1)
<SYSTEM_TASK:> Given a keysym, find the keycode mapped to it in the current X environment. <END_TASK> <USER_TASK:> Description: def get_keycode(conn, keysym): """ Given a keysym, find the keycode mapped to it in the current X environment. It is necessary to search the keysym table in order to do this, including all columns. :param keysym: An X keysym. :return: A keycode or None if one could not be found. :rtype: int """
mn, mx = get_min_max_keycode(conn) cols = __kbmap.keysyms_per_keycode for i in range(mn, mx + 1): for j in range(0, cols): ks = get_keysym(conn, i, col=j) if ks == keysym: return i return None
<SYSTEM_TASK:> Ungrabs a key that was grabbed by ``grab_key``. Similarly, it will return <END_TASK> <USER_TASK:> Description: def ungrab_key(conn, wid, modifiers, key): """ Ungrabs a key that was grabbed by ``grab_key``. Similarly, it will return True on success and False on failure. When ungrabbing a key, the parameters to this function should be *precisely* the same as the parameters to ``grab_key``. :param wid: A window identifier. :type wid: int :param modifiers: A modifier mask. :type modifiers: int :param key: A keycode. :type key: int :rtype: bool """
try: for mod in TRIVIAL_MODS: conn.core.UngrabKeyChecked(key, wid, modifiers | mod).check() return True except xproto.BadAccess: return False
<SYSTEM_TASK:> Whenever the keyboard mapping is changed, this function needs to be called <END_TASK> <USER_TASK:> Description: def update_keyboard_mapping(conn, e): """ Whenever the keyboard mapping is changed, this function needs to be called to update xpybutil's internal representing of the current keysym table. Indeed, xpybutil will do this for you automatically. Moreover, if something is changed that affects the current keygrabs, xpybutil will initiate a regrab with the changed keycode. :param e: The MappingNotify event. :type e: xcb.xproto.MappingNotifyEvent :rtype: void """
global __kbmap, __keysmods newmap = get_keyboard_mapping(conn).reply() if e is None: __kbmap = newmap __keysmods = get_keys_to_mods(conn) return if e.request == xproto.Mapping.Keyboard: changes = {} for kc in range(*get_min_max_keycode(conn)): knew = get_keysym(kc, kbmap=newmap) oldkc = get_keycode(conn, knew) if oldkc != kc: changes[oldkc] = kc __kbmap = newmap __regrab(changes) elif e.request == xproto.Mapping.Modifier: __keysmods = get_keys_to_mods()
<SYSTEM_TASK:> Return a list of Storage objects from the API. <END_TASK> <USER_TASK:> Description: def get_storages(self, storage_type='normal'): """ Return a list of Storage objects from the API. Storage types: public, private, normal, backup, cdrom, template, favorite """
res = self.get_request('/storage/' + storage_type) return Storage._create_storage_objs(res['storages'], cloud_manager=self)
<SYSTEM_TASK:> Create a Storage object. Returns an object based on the API's response. <END_TASK> <USER_TASK:> Description: def create_storage(self, size=10, tier='maxiops', title='Storage disk', zone='fi-hel1', backup_rule={}): """ Create a Storage object. Returns an object based on the API's response. """
body = { 'storage': { 'size': size, 'tier': tier, 'title': title, 'zone': zone, 'backup_rule': backup_rule } } res = self.post_request('/storage', body) return Storage(cloud_manager=self, **res['storage'])
<SYSTEM_TASK:> Modify a Storage object. Returns an object based on the API's response. <END_TASK> <USER_TASK:> Description: def modify_storage(self, storage, size, title, backup_rule={}): """ Modify a Storage object. Returns an object based on the API's response. """
res = self._modify_storage(str(storage), size, title, backup_rule) return Storage(cloud_manager=self, **res['storage'])
<SYSTEM_TASK:> Attach a Storage object to a Server. Return a list of the server's storages. <END_TASK> <USER_TASK:> Description: def attach_storage(self, server, storage, storage_type, address): """ Attach a Storage object to a Server. Return a list of the server's storages. """
body = {'storage_device': {}} if storage: body['storage_device']['storage'] = str(storage) if storage_type: body['storage_device']['type'] = storage_type if address: body['storage_device']['address'] = address url = '/server/{0}/storage/attach'.format(server) res = self.post_request(url, body) return Storage._create_storage_objs(res['server']['storage_devices'], cloud_manager=self)
<SYSTEM_TASK:> Detach a Storage object to a Server. Return a list of the server's storages. <END_TASK> <USER_TASK:> Description: def detach_storage(self, server, address): """ Detach a Storage object to a Server. Return a list of the server's storages. """
body = {'storage_device': {'address': address}} url = '/server/{0}/storage/detach'.format(server) res = self.post_request(url, body) return Storage._create_storage_objs(res['server']['storage_devices'], cloud_manager=self)
<SYSTEM_TASK:> Reset after repopulating from API. <END_TASK> <USER_TASK:> Description: def _reset(self, **kwargs): """ Reset after repopulating from API. """
# there are some inconsistenciens in the API regarding these # note: this could be written in fancier ways, but this way is simpler if 'uuid' in kwargs: self.uuid = kwargs['uuid'] elif 'storage' in kwargs: # let's never use storage.storage internally self.uuid = kwargs['storage'] if 'title' in kwargs: self.title = kwargs['title'] elif 'storage_title' in kwargs: self.title = kwargs['storage_title'] if 'size' in kwargs: self.size = kwargs['size'] elif 'storage_size' in kwargs: self.size = kwargs['storage_size'] # send the rest to super._reset filtered_kwargs = dict( (key, val) for key, val in kwargs.items() if key not in ['uuid', 'storage', 'title', 'storage_title', 'size', 'storage_size'] ) super(Storage, self)._reset(**filtered_kwargs)
<SYSTEM_TASK:> Prints name, author, size and age <END_TASK> <USER_TASK:> Description: def lookup(self): """ Prints name, author, size and age """
print "%s by %s, size: %s, uploaded %s ago" % (self.name, self.author, self.size, self.age)
<SYSTEM_TASK:> Build and return url. Also update max_page. <END_TASK> <USER_TASK:> Description: def build(self, update=True): """ Build and return url. Also update max_page. URL structure for user torrent lists differs from other result lists as the page number is part of the query string and not the URL path """
query_str = "?page={}".format(self.page) if self.order: query_str += "".join(("&field=", self.order[0], "&sorder=",self.order[1])) ret = "".join((self.base, self.user, "/uploads/", query_str)) if update: self.max_page = self._get_max_page(ret) return ret
<SYSTEM_TASK:> Parse url and yield namedtuple Torrent for every torrent on page <END_TASK> <USER_TASK:> Description: def _items(self): """ Parse url and yield namedtuple Torrent for every torrent on page """
torrents = map(self._get_torrent, self._get_rows()) for t in torrents: yield t
<SYSTEM_TASK:> Parse row into namedtuple <END_TASK> <USER_TASK:> Description: def _get_torrent(self, row): """ Parse row into namedtuple """
td = row("td") name = td("a.cellMainLink").text() name = name.replace(" . ", ".").replace(" .", ".") author = td("a.plain").text() verified_author = True if td(".lightgrey>.ka-verify") else False category = td("span").find("strong").find("a").eq(0).text() verified_torrent = True if td(".icon16>.ka-green") else False comments = td(".iaconbox>.icommentjs>.iconvalue").text() torrent_link = "http://" + BASE.domain if td("a.cellMainLink").attr("href") is not None: torrent_link += td("a.cellMainLink").attr("href") magnet_link = td("a[data-nop]").eq(1).attr("href") download_link = td("a[data-download]").attr("href") td_centers = row("td.center") size = td_centers.eq(0).text() files = td_centers.eq(1).text() age = " ".join(td_centers.eq(2).text().split()) seed = td_centers.eq(3).text() leech = td_centers.eq(4).text() return Torrent(name, author, verified_author, category, size, files, age, seed, leech, verified_torrent, comments, torrent_link, magnet_link, download_link)
<SYSTEM_TASK:> Yield torrents in range from page_from to page_to <END_TASK> <USER_TASK:> Description: def pages(self, page_from, page_to): """ Yield torrents in range from page_from to page_to """
if not all([page_from < self.url.max_page, page_from > 0, page_to <= self.url.max_page, page_to > page_from]): raise IndexError("Invalid page numbers") size = (page_to + 1) - page_from threads = ret = [] page_list = range(page_from, page_to+1) locks = [threading.Lock() for i in range(size)] for lock in locks[1:]: lock.acquire() def t_function(pos): """ Thread function that fetch page for list of torrents """ res = self.page(page_list[pos]).list() locks[pos].acquire() ret.extend(res) if pos != size-1: locks[pos+1].release() threads = [threading.Thread(target=t_function, args=(i,)) for i in range(size)] for thread in threads: thread.start() for thread in threads: thread.join() for torrent in ret: yield torrent
<SYSTEM_TASK:> Yield torrents in range from current page to last page <END_TASK> <USER_TASK:> Description: def all(self): """ Yield torrents in range from current page to last page """
return self.pages(self.url.page, self.url.max_page)
<SYSTEM_TASK:> Set field and order set by arguments <END_TASK> <USER_TASK:> Description: def order(self, field, order=None): """ Set field and order set by arguments """
if not order: order = ORDER.DESC self.url.order = (field, order) self.url.set_page(1) return self
<SYSTEM_TASK:> Change category of current search and return self <END_TASK> <USER_TASK:> Description: def category(self, category): """ Change category of current search and return self """
self.url.category = category self.url.set_page(1) return self