text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Quick wrapper for using the Watcher. <END_TASK> <USER_TASK:> Description: def watch(logger_name, level=DEBUG, out=stdout): """ Quick wrapper for using the Watcher. :param logger_name: name of logger to watch :param level: minimum log level to show (default INFO) :param out: where to send output (default stdout) :return: Watcher instance """
watcher = Watcher(logger_name) watcher.watch(level, out) return watcher
<SYSTEM_TASK:> Obtain the default user agent string sent to the server after <END_TASK> <USER_TASK:> Description: def get_user_agent(): """ Obtain the default user agent string sent to the server after a successful handshake. """
from sys import platform, version_info template = "neobolt/{} Python/{}.{}.{}-{}-{} ({})" fields = (version,) + tuple(version_info) + (platform,) return template.format(*fields)
<SYSTEM_TASK:> Import the best available module, <END_TASK> <USER_TASK:> Description: def import_best(c_module, py_module): """ Import the best available module, with C preferred to pure Python. """
from importlib import import_module from os import getenv pure_python = getenv("PURE_PYTHON", "") if pure_python: return import_module(py_module) else: try: return import_module(c_module) except ImportError: return import_module(py_module)
<SYSTEM_TASK:> Return the URL used out-of-band to grant access to your application. <END_TASK> <USER_TASK:> Description: def authorize_url(self, duration, scopes, state, implicit=False): """Return the URL used out-of-band to grant access to your application. :param duration: Either ``permanent`` or ``temporary``. ``temporary`` authorizations generate access tokens that last only 1 hour. ``permanent`` authorizations additionally generate a refresh token that can be indefinitely used to generate new hour-long access tokens. Only ``temporary`` can be specified if ``implicit`` is set to ``True``. :param scopes: A list of OAuth scopes to request authorization for. :param state: A string that will be reflected in the callback to ``redirect_uri``. This value should be temporarily unique to the client for whom the URL was generated for. :param implicit: (optional) Use the implicit grant flow (default: False). This flow is only available for UntrustedAuthenticators. """
if self.redirect_uri is None: raise InvalidInvocation("redirect URI not provided") if implicit and not isinstance(self, UntrustedAuthenticator): raise InvalidInvocation( "Only UntrustedAuthentictor instances can " "use the implicit grant flow." ) if implicit and duration != "temporary": raise InvalidInvocation( "The implicit grant flow only supports " "temporary access tokens." ) params = { "client_id": self.client_id, "duration": duration, "redirect_uri": self.redirect_uri, "response_type": "token" if implicit else "code", "scope": " ".join(scopes), "state": state, } url = self._requestor.reddit_url + const.AUTHORIZATION_PATH request = Request("GET", url, params=params) return request.prepare().url
<SYSTEM_TASK:> Ask Reddit to revoke the provided token. <END_TASK> <USER_TASK:> Description: def revoke_token(self, token, token_type=None): """Ask Reddit to revoke the provided token. :param token: The access or refresh token to revoke. :param token_type: (Optional) When provided, hint to Reddit what the token type is for a possible efficiency gain. The value can be either ``access_token`` or ``refresh_token``. """
data = {"token": token} if token_type is not None: data["token_type_hint"] = token_type url = self._requestor.reddit_url + const.REVOKE_TOKEN_PATH self._post(url, success_status=codes["no_content"], **data)
<SYSTEM_TASK:> Obtain and set authorization tokens based on ``code``. <END_TASK> <USER_TASK:> Description: def authorize(self, code): """Obtain and set authorization tokens based on ``code``. :param code: The code obtained by an out-of-band authorization request to Reddit. """
if self._authenticator.redirect_uri is None: raise InvalidInvocation("redirect URI not provided") self._request_token( code=code, grant_type="authorization_code", redirect_uri=self._authenticator.redirect_uri, )
<SYSTEM_TASK:> Obtain a new access token from the refresh_token. <END_TASK> <USER_TASK:> Description: def refresh(self): """Obtain a new access token from the refresh_token."""
if self.refresh_token is None: raise InvalidInvocation("refresh token not provided") self._request_token( grant_type="refresh_token", refresh_token=self.refresh_token )
<SYSTEM_TASK:> Obtain a new personal-use script type access token. <END_TASK> <USER_TASK:> Description: def refresh(self): """Obtain a new personal-use script type access token."""
self._request_token( grant_type="password", username=self._username, password=self._password, )
<SYSTEM_TASK:> Issue the HTTP request capturing any errors that may occur. <END_TASK> <USER_TASK:> Description: def request(self, *args, **kwargs): """Issue the HTTP request capturing any errors that may occur."""
try: return self._http.request(*args, timeout=TIMEOUT, **kwargs) except Exception as exc: raise RequestException(exc, args, kwargs)
<SYSTEM_TASK:> Return the Hangul character for the given jamo characters. <END_TASK> <USER_TASK:> Description: def _jamo_to_hangul_char(lead, vowel, tail=0): """Return the Hangul character for the given jamo characters. """
lead = ord(lead) - _JAMO_LEAD_OFFSET vowel = ord(vowel) - _JAMO_VOWEL_OFFSET tail = ord(tail) - _JAMO_TAIL_OFFSET if tail else 0 return chr(tail + (vowel - 1) * 28 + (lead - 1) * 588 + _JAMO_OFFSET)
<SYSTEM_TASK:> Test if a single character is a jamo character. <END_TASK> <USER_TASK:> Description: def is_jamo(character): """Test if a single character is a jamo character. Valid jamo includes all modern and archaic jamo, as well as all HCJ. Non-assigned code points are invalid. """
code = ord(character) return 0x1100 <= code <= 0x11FF or\ 0xA960 <= code <= 0xA97C or\ 0xD7B0 <= code <= 0xD7C6 or 0xD7CB <= code <= 0xD7FB or\ is_hcj(character)
<SYSTEM_TASK:> Test if a single character is a compound, i.e., a consonant <END_TASK> <USER_TASK:> Description: def is_jamo_compound(character): """Test if a single character is a compound, i.e., a consonant cluster, double consonant, or dipthong. """
if len(character) != 1: return False # Consider instead: # raise TypeError('is_jamo_compound() expected a single character') if is_jamo(character): return character in JAMO_COMPOUNDS return False
<SYSTEM_TASK:> Determine if a jamo character is a lead, vowel, or tail. <END_TASK> <USER_TASK:> Description: def get_jamo_class(jamo): """Determine if a jamo character is a lead, vowel, or tail. Integers and U+11xx characters are valid arguments. HCJ consonants are not valid here. get_jamo_class should return the class ["lead" | "vowel" | "tail"] of a given character or integer. Note: jamo class directly corresponds to the Unicode 7.0 specification, thus includes filler characters as having a class. """
# TODO: Perhaps raise a separate error for U+3xxx jamo. if jamo in JAMO_LEADS or jamo == chr(0x115F): return "lead" if jamo in JAMO_VOWELS or jamo == chr(0x1160) or\ 0x314F <= ord(jamo) <= 0x3163: return "vowel" if jamo in JAMO_TAILS: return "tail" else: raise InvalidJamoError("Invalid or classless jamo argument.", jamo)
<SYSTEM_TASK:> Convert a string of Hangul to jamo. <END_TASK> <USER_TASK:> Description: def hangul_to_jamo(hangul_string): """Convert a string of Hangul to jamo. Arguments may be iterables of characters. hangul_to_jamo should split every Hangul character into U+11xx jamo characters for any given string. Non-hangul characters are not changed. hangul_to_jamo is the generator version of h2j, the string version. """
return (_ for _ in chain.from_iterable(_hangul_char_to_jamo(_) for _ in hangul_string))
<SYSTEM_TASK:> Return the Hangul character for the given jamo input. <END_TASK> <USER_TASK:> Description: def jamo_to_hangul(lead, vowel, tail=''): """Return the Hangul character for the given jamo input. Integers corresponding to U+11xx jamo codepoints, U+11xx jamo characters, or HCJ are valid inputs. Outputs a one-character Hangul string. This function is identical to j2h. """
# Internally, we convert everything to a jamo char, # then pass it to _jamo_to_hangul_char lead = hcj_to_jamo(lead, "lead") vowel = hcj_to_jamo(vowel, "vowel") if not tail or ord(tail) == 0: tail = None elif is_hcj(tail): tail = hcj_to_jamo(tail, "tail") if (is_jamo(lead) and get_jamo_class(lead) == "lead") and\ (is_jamo(vowel) and get_jamo_class(vowel) == "vowel") and\ ((not tail) or (is_jamo(tail) and get_jamo_class(tail) == "tail")): result = _jamo_to_hangul_char(lead, vowel, tail) if is_hangul_char(result): return result raise InvalidJamoError("Could not synthesize characters to Hangul.", '\x00')
<SYSTEM_TASK:> Return the compound jamo for the given jamo input. <END_TASK> <USER_TASK:> Description: def compose_jamo(*parts): """Return the compound jamo for the given jamo input. Integers corresponding to U+11xx jamo codepoints, U+11xx jamo characters, or HCJ are valid inputs. Outputs a one-character jamo string. """
# Internally, we convert everything to a jamo char, # then pass it to _jamo_to_hangul_char # NOTE: Relies on hcj_to_jamo not strictly requiring "position" arg. for p in parts: if not (type(p) == str and len(p) == 1 and 2 <= len(parts) <= 3): raise TypeError("compose_jamo() expected 2-3 single characters " + "but received " + str(parts), '\x00') hcparts = [j2hcj(_) for _ in parts] hcparts = tuple(hcparts) if hcparts in _COMPONENTS_REVERSE_LOOKUP: return _COMPONENTS_REVERSE_LOOKUP[hcparts] raise InvalidJamoError( "Could not synthesize characters to compound: " + ", ".join( str(_) + "(U+" + str(hex(ord(_)))[2:] + ")" for _ in parts), '\x00')
<SYSTEM_TASK:> Convert jamo characters in a string into hcj as much as possible. <END_TASK> <USER_TASK:> Description: def synth_hangul(string): """Convert jamo characters in a string into hcj as much as possible."""
raise NotImplementedError return ''.join([''.join(''.join(jamo_to_hcj(_)) for _ in string)])
<SYSTEM_TASK:> Return an exception instance that maps to the OAuth Error. <END_TASK> <USER_TASK:> Description: def authorization_error_class(response): """Return an exception instance that maps to the OAuth Error. :param response: The HTTP response containing a www-authenticate error. """
message = response.headers.get("www-authenticate") if message: error = message.replace('"', "").rsplit("=", 1)[1] else: error = response.status_code return _auth_error_mapping[error](response)
<SYSTEM_TASK:> Return the latest of two bookmarks by looking for the maximum <END_TASK> <USER_TASK:> Description: def _last_bookmark(b0, b1): """ Return the latest of two bookmarks by looking for the maximum integer value following the last colon in the bookmark string. """
n = [None, None] _, _, n[0] = b0.rpartition(":") _, _, n[1] = b1.rpartition(":") for i in range(2): try: n[i] = int(n[i]) except ValueError: raise ValueError("Invalid bookmark: {}".format(b0)) return b0 if n[0] > n[1] else b1
<SYSTEM_TASK:> Connect and perform a handshake and return a valid Connection object, assuming <END_TASK> <USER_TASK:> Description: def connect(address, **config): """ Connect and perform a handshake and return a valid Connection object, assuming a protocol version can be agreed. """
ssl_context = make_ssl_context(**config) last_error = None # Establish a connection to the host and port specified # Catches refused connections see: # https://docs.python.org/2/library/errno.html log_debug("[#0000] C: <RESOLVE> %s", address) resolver = Resolver(custom_resolver=config.get("resolver")) resolver.addresses.append(address) resolver.custom_resolve() resolver.dns_resolve() for resolved_address in resolver.addresses: try: s = _connect(resolved_address, **config) s, der_encoded_server_certificate = _secure(s, address[0], ssl_context) connection = _handshake(s, resolved_address, der_encoded_server_certificate, **config) except Exception as error: last_error = error else: return connection if last_error is None: raise ServiceUnavailable("Failed to resolve addresses for %s" % address) else: raise last_error
<SYSTEM_TASK:> Add a message to the outgoing queue. <END_TASK> <USER_TASK:> Description: def _append(self, signature, fields=(), response=None): """ Add a message to the outgoing queue. :arg signature: the signature of the message :arg fields: the fields of the message as a tuple :arg response: a response object to handle callbacks """
self.packer.pack_struct(signature, fields) self.output_buffer.chunk() self.output_buffer.chunk() self.responses.append(response)
<SYSTEM_TASK:> Add a RESET message to the outgoing queue, send <END_TASK> <USER_TASK:> Description: def reset(self): """ Add a RESET message to the outgoing queue, send it and consume all remaining messages. """
def fail(metadata): raise ProtocolError("RESET failed %r" % metadata) log_debug("[#%04X] C: RESET", self.local_port) self._append(b"\x0F", response=Response(self, on_failure=fail)) self.sync()
<SYSTEM_TASK:> Send all queued messages to the server. <END_TASK> <USER_TASK:> Description: def _send(self): """ Send all queued messages to the server. """
data = self.output_buffer.view() if not data: return if self.closed(): raise self.Error("Failed to write to closed connection {!r}".format(self.server.address)) if self.defunct(): raise self.Error("Failed to write to defunct connection {!r}".format(self.server.address)) self.socket.sendall(data) self.output_buffer.clear()
<SYSTEM_TASK:> Receive at least one message from the server, if available. <END_TASK> <USER_TASK:> Description: def _fetch(self): """ Receive at least one message from the server, if available. :return: 2-tuple of number of detail messages and number of summary messages fetched """
if self.closed(): raise self.Error("Failed to read from closed connection {!r}".format(self.server.address)) if self.defunct(): raise self.Error("Failed to read from defunct connection {!r}".format(self.server.address)) if not self.responses: return 0, 0 self._receive() details, summary_signature, summary_metadata = self._unpack() if details: log_debug("[#%04X] S: RECORD * %d", self.local_port, len(details)) # TODO self.responses[0].on_records(details) if summary_signature is None: return len(details), 0 response = self.responses.popleft() response.complete = True if summary_signature == b"\x70": log_debug("[#%04X] S: SUCCESS %r", self.local_port, summary_metadata) response.on_success(summary_metadata or {}) elif summary_signature == b"\x7E": self._last_run_statement = None log_debug("[#%04X] S: IGNORED", self.local_port) response.on_ignored(summary_metadata or {}) elif summary_signature == b"\x7F": self._last_run_statement = None log_debug("[#%04X] S: FAILURE %r", self.local_port, summary_metadata) response.on_failure(summary_metadata or {}) else: self._last_run_statement = None raise ProtocolError("Unexpected response message with signature %02X" % summary_signature) return len(details), 1
<SYSTEM_TASK:> Send and fetch all outstanding messages. <END_TASK> <USER_TASK:> Description: def sync(self): """ Send and fetch all outstanding messages. :return: 2-tuple of number of detail messages and number of summary messages fetched """
self.send() detail_count = summary_count = 0 while self.responses: response = self.responses[0] while not response.complete: detail_delta, summary_delta = self.fetch() detail_count += detail_delta summary_count += summary_delta return detail_count, summary_count
<SYSTEM_TASK:> Acquire a connection to a given address from the pool. <END_TASK> <USER_TASK:> Description: def acquire_direct(self, address): """ Acquire a connection to a given address from the pool. The address supplied should always be an IP address, not a host name. This method is thread safe. """
if self.closed(): raise ServiceUnavailable("Connection pool closed") with self.lock: try: connections = self.connections[address] except KeyError: connections = self.connections[address] = deque() connection_acquisition_start_timestamp = perf_counter() while True: # try to find a free connection in pool for connection in list(connections): if connection.closed() or connection.defunct() or connection.timedout(): connections.remove(connection) continue if not connection.in_use: connection.in_use = True return connection # all connections in pool are in-use infinite_connection_pool = (self._max_connection_pool_size < 0 or self._max_connection_pool_size == float("inf")) can_create_new_connection = infinite_connection_pool or len(connections) < self._max_connection_pool_size if can_create_new_connection: try: connection = self.connector(address) except ServiceUnavailable: self.remove(address) raise else: connection.pool = self connection.in_use = True connections.append(connection) return connection # failed to obtain a connection from pool because the pool is full and no free connection in the pool span_timeout = self._connection_acquisition_timeout - (perf_counter() - connection_acquisition_start_timestamp) if span_timeout > 0: self.cond.wait(span_timeout) # if timed out, then we throw error. This time computation is needed, as with python 2.7, we cannot # tell if the condition is notified or timed out when we come to this line if self._connection_acquisition_timeout <= (perf_counter() - connection_acquisition_start_timestamp): raise ClientError("Failed to obtain a connection from pool within {!r}s".format( self._connection_acquisition_timeout)) else: raise ClientError("Failed to obtain a connection from pool within {!r}s".format(self._connection_acquisition_timeout))
<SYSTEM_TASK:> Release a connection back into the pool. <END_TASK> <USER_TASK:> Description: def release(self, connection): """ Release a connection back into the pool. This method is thread safe. """
with self.lock: connection.in_use = False self.cond.notify_all()
<SYSTEM_TASK:> Count the number of connections currently in use to a given <END_TASK> <USER_TASK:> Description: def in_use_connection_count(self, address): """ Count the number of connections currently in use to a given address. """
try: connections = self.connections[address] except KeyError: return 0 else: return sum(1 if connection.in_use else 0 for connection in connections)
<SYSTEM_TASK:> Deactivate an address from the connection pool, if present, closing <END_TASK> <USER_TASK:> Description: def deactivate(self, address): """ Deactivate an address from the connection pool, if present, closing all idle connection to that address """
with self.lock: try: connections = self.connections[address] except KeyError: # already removed from the connection pool return for conn in list(connections): if not conn.in_use: connections.remove(conn) try: conn.close() except IOError: pass if not connections: self.remove(address)
<SYSTEM_TASK:> Remove an address from the connection pool, if present, closing <END_TASK> <USER_TASK:> Description: def remove(self, address): """ Remove an address from the connection pool, if present, closing all connections to that address. """
with self.lock: for connection in self.connections.pop(address, ()): try: connection.close() except IOError: pass
<SYSTEM_TASK:> Close all connections and empty the pool. <END_TASK> <USER_TASK:> Description: def close(self): """ Close all connections and empty the pool. This method is thread safe. """
if self._closed: return try: with self.lock: if not self._closed: self._closed = True for address in list(self.connections): self.remove(address) except TypeError as e: pass
<SYSTEM_TASK:> Called when one or more RECORD messages have been received. <END_TASK> <USER_TASK:> Description: def on_records(self, records): """ Called when one or more RECORD messages have been received. """
handler = self.handlers.get("on_records") if callable(handler): handler(records)
<SYSTEM_TASK:> Called when a SUCCESS message has been received. <END_TASK> <USER_TASK:> Description: def on_success(self, metadata): """ Called when a SUCCESS message has been received. """
handler = self.handlers.get("on_success") if callable(handler): handler(metadata) handler = self.handlers.get("on_summary") if callable(handler): handler()
<SYSTEM_TASK:> Called when a FAILURE message has been received. <END_TASK> <USER_TASK:> Description: def on_failure(self, metadata): """ Called when a FAILURE message has been received. """
self.connection.reset() handler = self.handlers.get("on_failure") if callable(handler): handler(metadata) handler = self.handlers.get("on_summary") if callable(handler): handler() raise CypherError.hydrate(**metadata)
<SYSTEM_TASK:> Called when an IGNORED message has been received. <END_TASK> <USER_TASK:> Description: def on_ignored(self, metadata=None): """ Called when an IGNORED message has been received. """
handler = self.handlers.get("on_ignored") if callable(handler): handler(metadata) handler = self.handlers.get("on_summary") if callable(handler): handler()
<SYSTEM_TASK:> A replacement for the property decorator that will only compute the <END_TASK> <USER_TASK:> Description: def cached_property(prop): """ A replacement for the property decorator that will only compute the attribute's value on the first call and serve a cached copy from then on. """
def cache_wrapper(self): if not hasattr(self, "_cache"): self._cache = {} if prop.__name__ not in self._cache: return_value = prop(self) if isgenerator(return_value): return_value = tuple(return_value) self._cache[prop.__name__] = return_value return self._cache[prop.__name__] return property(cache_wrapper)
<SYSTEM_TASK:> Get a table of values with the given OID prefix. <END_TASK> <USER_TASK:> Description: def table(self, oid, columns=None, column_value_mapping=None, non_repeaters=0, max_repetitions=20, fetch_all_columns=True): """ Get a table of values with the given OID prefix. """
snmpsecurity = self._get_snmp_security() base_oid = oid.strip(".") if not fetch_all_columns and not columns: raise ValueError("please use the columns argument to " "indicate which columns to fetch") if fetch_all_columns: columns_to_fetch = [""] else: columns_to_fetch = ["." + str(col_id) for col_id in columns.keys()] full_obj_table = [] for col in columns_to_fetch: try: engine_error, pdu_error, pdu_error_index, obj_table = self._cmdgen.bulkCmd( snmpsecurity, cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout, retries=self.retries), non_repeaters, max_repetitions, oid + col, ) except Exception as e: raise SNMPError(e) if engine_error: raise SNMPError(engine_error) if pdu_error: raise SNMPError(pdu_error.prettyPrint()) # remove any trailing rows from the next subtree try: while not str(obj_table[-1][0][0].getOid()).lstrip(".").startswith( base_oid + col + "." ): obj_table.pop() except IndexError: pass # append this column to full result full_obj_table += obj_table t = Table(columns=columns, column_value_mapping=column_value_mapping) for row in full_obj_table: for name, value in row: oid = str(name.getOid()).strip(".") value = _convert_value_to_native(value) column, row_id = oid[len(base_oid) + 1:].split(".", 1) t._add_value(int(column), row_id, value) return t
<SYSTEM_TASK:> Load parser for command line arguments. <END_TASK> <USER_TASK:> Description: def get_parser(): """Load parser for command line arguments. It parses argv/input into args variable. """
desc = Colors.LIGHTBLUE + textwrap.dedent( '''\ Welcome to _ _ _ __ _ _ _| |_ ___ _ __ _ _ | |_ ___ _ __ _ __ ___ _ __ | |_ / _` | | | | __/ _ \ | '_ \| | | | | __/ _ \| '__| '__/ _ \ '_ \| __| | (_| | |_| | || (_) | | |_) | |_| | | || (_) | | | | | __/ | | | |_ \__,_|\__,_|\__\___/____| .__/ \__, |___\__\___/|_| |_| \___|_| |_|\__| |_____|_| |___/_____| ------------------------------------ auto_py_torrent is an automated tool for download files by obtaining torrents or magnets that are in different provided pages that the user can choose. Its goal is to make it easier for users to find the files they want and download them instantly. An auto_py_torrent command is provided in which the user can currently choose between two modes, best_rated and list mode, then it selects one of the torrent tracking pages for multimedia content and finally enter the text of what you want to download. ------------------------------------ ''') + Colors.ENDC usage_info = Colors.LGREEN + textwrap.dedent( '''\ Use "%(prog)s --help" for more information. Examples: use "%(prog)s MODE SELECTED_PAGE STRING_TO_SEARCH # generic. use "%(prog)s 0 0 "The simpsons" # best rated. use "%(prog)s 1 0 "The simpsons" # list rated. Mode options: 0: best_rated. # Download the most rated file. 1: list. # Get a list, and select one of them. Page list options: 0: torrent project. 1: the pirate bay. 2: 1337x. 3: eztv. 4: limetorrents. 5: isohunt. ''') + Colors.ENDC epi = Colors.LIGHTPURPLE + textwrap.dedent( '''\ -> Thanks for using auto_py_torrent! ''') + Colors.ENDC # Parent and only parser. parser = argparse.ArgumentParser( add_help=True, formatter_class=argparse.RawTextHelpFormatter, usage=usage_info, description=desc, epilog=epi) parser.add_argument('mode', action='store', choices=range(len(MODES)), type=int, help='Select mode of file download.\n' ' e.g: 0(rated) or 1(list).') parser.add_argument('torr_page', action='store', choices=range(len(TORRENTS)), type=int, help='Select tracking page to download from.\n' ' e.g: 0 to .. ' + str(len(TORRENTS)-1) + '.') parser.add_argument('str_search', action='store', type=str, help='Input torrent string to search.\n' ' e.g: "String search"') return(parser)
<SYSTEM_TASK:> Insert args values into instance variables. <END_TASK> <USER_TASK:> Description: def insert(args): """Insert args values into instance variables."""
string_search = args.str_search mode_search = MODES[args.mode] page = list(TORRENTS[args.torr_page].keys())[0] key_search = TORRENTS[args.torr_page][page]['key_search'] torrent_page = TORRENTS[args.torr_page][page]['page'] domain = TORRENTS[args.torr_page][page]['domain'] return([args, string_search, mode_search, page, key_search, torrent_page, domain])
<SYSTEM_TASK:> Search and download torrents until the user says it so. <END_TASK> <USER_TASK:> Description: def run_it(): """Search and download torrents until the user says it so."""
initialize() parser = get_parser() args = None first_parse = True while(True): if first_parse is True: first_parse = False args = parser.parse_args() else: print(textwrap.dedent( '''\ Search again like in the beginning. -- You can either choose best rated or list mode. -- This time, you can insert the search string without double quotes. Remember the list mode options! 0: torrent project. 1: the pirate bay. 2: 1337x. 3: eztv. 4: limetorrents. 5: isohunt. ''')) print('Or.. if you want to exit just write "' + Colors.LRED + 'Q' + Colors.ENDC + '" or "' + Colors.LRED + 'q' + Colors.ENDC + '".') input_parse = input('>> ').replace("'", "").replace('"', '') if input_parse in ['Q', 'q']: sys.exit(1) args = parser.parse_args(input_parse.split(' ', 2)) if args.str_search.strip() == "": print('Please insert an appropiate non-empty string.') else: auto = AutoPy(*insert(args)) auto.get_content() auto.select_torrent() auto.download_torrent()
<SYSTEM_TASK:> Open magnet according to os. <END_TASK> <USER_TASK:> Description: def open_magnet(self): """Open magnet according to os."""
if sys.platform.startswith('linux'): subprocess.Popen(['xdg-open', self.magnet], stdout=subprocess.PIPE, stderr=subprocess.PIPE) elif sys.platform.startswith('win32'): os.startfile(self.magnet) elif sys.platform.startswith('cygwin'): os.startfile(self.magnet) elif sys.platform.startswith('darwin'): subprocess.Popen(['open', self.magnet], stdout=subprocess.PIPE, stderr=subprocess.PIPE) else: subprocess.Popen(['xdg-open', self.magnet], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
<SYSTEM_TASK:> Get magnet from torrent page. Url already got domain. <END_TASK> <USER_TASK:> Description: def get_magnet(self, url): """Get magnet from torrent page. Url already got domain."""
content_most_rated = requests.get(url) rated_soup = BeautifulSoup(content_most_rated.content, 'lxml') if self.page == 'torrent_project': self.magnet = rated_soup.find( 'a', href=True, text=re.compile('Download'))['href'] elif self.page == 'the_pirate_bay': self.magnet = rated_soup.find( 'a', href=True, text=re.compile('Get this torrent'))['href'] elif self.page == '1337x': div1337 = rated_soup.find( 'div', {'class': 'torrent-category-detail'}) self.magnet = div1337.find('a', href=re.compile('magnet'))['href'] elif self.page == 'isohunt': self.magnet = rated_soup.find( 'a', href=re.compile('magnet'))['href'] else: print('Wrong page to get magnet!') sys.exit(1)
<SYSTEM_TASK:> Download torrent. <END_TASK> <USER_TASK:> Description: def download_torrent(self): """Download torrent. Rated implies download the unique best rated torrent found. Otherwise: get the magnet and download it. """
try: if self.back_to_menu is True: return if self.found_torrents is False: print('Nothing found.') return if self.mode_search == 'best_rated': print('Downloading..') self.open_magnet() elif self.mode_search == 'list': if self.selected is not None: # t_p, pirate and 1337x got magnet inside, else direct. if self.page in ['eztv', 'limetorrents']: self.magnet = self.hrefs[int(self.selected)] print('Downloading..') self.open_magnet() elif self.page in ['the_pirate_bay', 'torrent_project', '1337x', 'isohunt']: url = self.hrefs[int(self.selected)] self.get_magnet(url) print('Downloading..') self.open_magnet() else: print('Bad selected page.') else: print('Nothing selected.') sys.exit(1) except Exception: print(traceback.format_exc()) sys.exit(0)
<SYSTEM_TASK:> Handle user's input in list mode. <END_TASK> <USER_TASK:> Description: def handle_select(self): """Handle user's input in list mode."""
self.selected = input('>> ') if self.selected in ['Q', 'q']: sys.exit(1) elif self.selected in ['B', 'b']: self.back_to_menu = True return True elif is_num(self.selected): if 0 <= int(self.selected) <= len(self.hrefs) - 1: self.back_to_menu = False return True else: print(Colors.FAIL + 'Wrong index. ' + 'Please select an appropiate one or other option.' + Colors.ENDC) return False else: print(Colors.FAIL + 'Invalid input. ' + 'Please select an appropiate one or other option.' + Colors.ENDC) return False
<SYSTEM_TASK:> Select torrent. <END_TASK> <USER_TASK:> Description: def select_torrent(self): """Select torrent. First check if specific element/info is obtained in content_page. Specify to user if it wants best rated torrent or select one from list. If the user wants best rated: Directly obtain magnet/torrent. Else: build table with all data and enable the user select the torrent. """
try: self.found_torrents = not bool(self.key_search in self.content_page.text) if not self.found_torrents: print('No torrents found.') sys.exit(1) self.soupify() if self.mode_search == 'list': self.build_table() if len(self.hrefs) == 1: print('Press "0" to download it.') elif len(self.hrefs) >= 2: print('\nSelect one of the following torrents. ' + 'Enter a number between: 0 and ' + str(len(self.hrefs) - 1)) print('If you want to exit write "' + Colors.LRED + 'Q' + Colors.ENDC + '" or "' + Colors.LRED + 'q' + Colors.ENDC + '".') print('If you want to go back to menu and search again write "' + Colors.LGREEN + 'B' + Colors.ENDC + '" or "' + Colors.LGREEN + 'b' + Colors.ENDC + '".') while not(self.picked_choice): self.picked_choice = self.handle_select() except Exception: print('ERROR select_torrent: ') logging.error(traceback.format_exc()) sys.exit(0)
<SYSTEM_TASK:> Build appropiate encoded URL. <END_TASK> <USER_TASK:> Description: def build_url(self): """Build appropiate encoded URL. This implies the same way of searching a torrent as in the page itself. """
url = requests.utils.requote_uri( self.torrent_page + self.string_search) if self.page == '1337x': return(url + '/1/') elif self.page == 'limetorrents': return(url + '/') else: return(url)
<SYSTEM_TASK:> Get content of the page through url. <END_TASK> <USER_TASK:> Description: def get_content(self): """Get content of the page through url."""
url = self.build_url() try: self.content_page = requests.get(url) if not(self.content_page.status_code == requests.codes.ok): self.content_page.raise_for_status() except requests.exceptions.RequestException as ex: logging.info('A requests exception has ocurred: ' + str(ex)) logging.error(traceback.format_exc()) sys.exit(0)
<SYSTEM_TASK:> Reclaim buffer space before the origin. <END_TASK> <USER_TASK:> Description: def _recycle(self): """ Reclaim buffer space before the origin. Note: modifies buffer size """
origin = self._origin if origin == 0: return False available = self._extent - origin self._data[:available] = self._data[origin:self._extent] self._extent = available self._origin = 0 #log_debug("Recycled %d bytes" % origin) return True
<SYSTEM_TASK:> Construct a frame around the first complete message in the buffer. <END_TASK> <USER_TASK:> Description: def frame_message(self): """ Construct a frame around the first complete message in the buffer. """
if self._frame is not None: self.discard_message() panes = [] p = origin = self._origin extent = self._extent while p < extent: available = extent - p if available < 2: break chunk_size, = struct_unpack(">H", self._view[p:(p + 2)]) p += 2 if chunk_size == 0: self._limit = p self._frame = MessageFrame(memoryview(self._view[origin:self._limit]), panes) return True q = p + chunk_size panes.append((p - origin, q - origin)) p = q return False
<SYSTEM_TASK:> Rate limit the call to request_function. <END_TASK> <USER_TASK:> Description: def call(self, request_function, set_header_callback, *args, **kwargs): """Rate limit the call to request_function. :param request_function: A function call that returns an HTTP response object. :param set_header_callback: A callback function used to set the request headers. This callback is called after any necessary sleep time occurs. :param *args: The positional arguments to ``request_function``. :param **kwargs: The keyword arguments to ``request_function``. """
self.delay() kwargs["headers"] = set_header_callback() response = request_function(*args, **kwargs) self.update(response.headers) return response
<SYSTEM_TASK:> Sleep for an amount of time to remain under the rate limit. <END_TASK> <USER_TASK:> Description: def delay(self): """Sleep for an amount of time to remain under the rate limit."""
if self.next_request_timestamp is None: return sleep_seconds = self.next_request_timestamp - time.time() if sleep_seconds <= 0: return message = "Sleeping: {:0.2f} seconds prior to" " call".format( sleep_seconds ) log.debug(message) time.sleep(sleep_seconds)
<SYSTEM_TASK:> Update the state of the rate limiter based on the response headers. <END_TASK> <USER_TASK:> Description: def update(self, response_headers): """Update the state of the rate limiter based on the response headers. This method should only be called following a HTTP request to reddit. Response headers that do not contain x-ratelimit fields will be treated as a single request. This behavior is to error on the safe-side as such responses should trigger exceptions that indicate invalid behavior. """
if "x-ratelimit-remaining" not in response_headers: if self.remaining is not None: self.remaining -= 1 self.used += 1 return now = time.time() prev_remaining = self.remaining seconds_to_reset = int(response_headers["x-ratelimit-reset"]) self.remaining = float(response_headers["x-ratelimit-remaining"]) self.used = int(response_headers["x-ratelimit-used"]) self.reset_timestamp = now + seconds_to_reset if self.remaining <= 0: self.next_request_timestamp = self.reset_timestamp return if prev_remaining is not None and prev_remaining > self.remaining: estimated_clients = prev_remaining - self.remaining else: estimated_clients = 1.0 self.next_request_timestamp = min( self.reset_timestamp, now + (estimated_clients * seconds_to_reset / self.remaining), )
<SYSTEM_TASK:> If a custom resolver is defined, perform custom resolution on <END_TASK> <USER_TASK:> Description: def custom_resolve(self): """ If a custom resolver is defined, perform custom resolution on the contained addresses. :return: """
if not callable(self.custom_resolver): return new_addresses = [] for address in self.addresses: for new_address in self.custom_resolver(address): new_addresses.append(new_address) self.addresses = new_addresses
<SYSTEM_TASK:> Perform DNS resolution on the contained addresses. <END_TASK> <USER_TASK:> Description: def dns_resolve(self): """ Perform DNS resolution on the contained addresses. :return: """
new_addresses = [] for address in self.addresses: try: info = getaddrinfo(address[0], address[1], 0, SOCK_STREAM, IPPROTO_TCP) except gaierror: raise AddressError("Cannot resolve address {!r}".format(address)) else: for _, _, _, _, address in info: if len(address) == 4 and address[3] != 0: # skip any IPv6 addresses with a non-zero scope id # as these appear to cause problems on some platforms continue new_addresses.append(address) self.addresses = new_addresses
<SYSTEM_TASK:> Returns the first matching line in a list of lines. <END_TASK> <USER_TASK:> Description: def matching_line(lines, keyword): """ Returns the first matching line in a list of lines. @see match() """
for line in lines: matching = match(line,keyword) if matching != None: return matching return None
<SYSTEM_TASK:> Return the json content from the resource at ``path``. <END_TASK> <USER_TASK:> Description: def request( self, method, path, data=None, files=None, json=None, params=None ): """Return the json content from the resource at ``path``. :param method: The request verb. E.g., get, post, put. :param path: The path of the request. This path will be combined with the ``oauth_url`` of the Requestor. :param data: Dictionary, bytes, or file-like object to send in the body of the request. :param files: Dictionary, mapping ``filename`` to file-like object. :param json: Object to be serialized to JSON in the body of the request. :param params: The query parameters to send with the request. Automatically refreshes the access token if it becomes invalid and a refresh token is available. Raises InvalidInvocation in such a case if a refresh token is not available. """
params = deepcopy(params) or {} params["raw_json"] = 1 if isinstance(data, dict): data = deepcopy(data) data["api_type"] = "json" data = sorted(data.items()) url = urljoin(self._requestor.oauth_url, path) return self._request_with_retries( data=data, files=files, json=json, method=method, params=params, url=url, )
<SYSTEM_TASK:> Perform a request, or return a cached response if available. <END_TASK> <USER_TASK:> Description: def request(self, method, url, params=None, **kwargs): """Perform a request, or return a cached response if available."""
params_key = tuple(params.items()) if params else () if method.upper() == "GET": if (url, params_key) in self.get_cache: print("Returning cached response for:", method, url, params) return self.get_cache[(url, params_key)] result = super().request(method, url, params, **kwargs) if method.upper() == "GET": self.get_cache[(url, params_key)] = result print("Adding entry to the cache:", method, url, params) return result
<SYSTEM_TASK:> Parse the records returned from a getServers call and <END_TASK> <USER_TASK:> Description: def parse_routing_info(cls, records): """ Parse the records returned from a getServers call and return a new RoutingTable instance. """
if len(records) != 1: raise RoutingProtocolError("Expected exactly one record") record = records[0] routers = [] readers = [] writers = [] try: servers = record["servers"] for server in servers: role = server["role"] addresses = [] for address in server["addresses"]: addresses.append(SocketAddress.parse(address, DEFAULT_PORT)) if role == "ROUTE": routers.extend(addresses) elif role == "READ": readers.extend(addresses) elif role == "WRITE": writers.extend(addresses) ttl = record["ttl"] except (KeyError, TypeError): raise RoutingProtocolError("Cannot parse routing info") else: return cls(routers, readers, writers, ttl)
<SYSTEM_TASK:> Indicator for whether routing information is still usable. <END_TASK> <USER_TASK:> Description: def is_fresh(self, access_mode): """ Indicator for whether routing information is still usable. """
log_debug("[#0000] C: <ROUTING> Checking table freshness for %r", access_mode) expired = self.last_updated_time + self.ttl <= self.timer() has_server_for_mode = bool(access_mode == READ_ACCESS and self.readers) or bool(access_mode == WRITE_ACCESS and self.writers) log_debug("[#0000] C: <ROUTING> Table expired=%r", expired) log_debug("[#0000] C: <ROUTING> Table routers=%r", self.routers) log_debug("[#0000] C: <ROUTING> Table has_server_for_mode=%r", has_server_for_mode) return not expired and self.routers and has_server_for_mode
<SYSTEM_TASK:> Update the current routing table with new routing information <END_TASK> <USER_TASK:> Description: def update(self, new_routing_table): """ Update the current routing table with new routing information from a replacement table. """
self.routers.replace(new_routing_table.routers) self.readers.replace(new_routing_table.readers) self.writers.replace(new_routing_table.writers) self.last_updated_time = self.timer() self.ttl = new_routing_table.ttl log_debug("[#0000] S: <ROUTING> table=%r", self)
<SYSTEM_TASK:> Fetch raw routing info from a given router address. <END_TASK> <USER_TASK:> Description: def fetch_routing_info(self, address): """ Fetch raw routing info from a given router address. :param address: router address :return: list of routing records or None if no connection could be established :raise ServiceUnavailable: if the server does not support routing or if routing support is broken """
metadata = {} records = [] def fail(md): if md.get("code") == "Neo.ClientError.Procedure.ProcedureNotFound": raise RoutingProtocolError("Server {!r} does not support routing".format(address)) else: raise RoutingProtocolError("Routing support broken on server {!r}".format(address)) try: with self.acquire_direct(address) as cx: _, _, server_version = (cx.server.agent or "").partition("/") # TODO 2.0: remove old routing procedure if server_version and Version.parse(server_version) >= Version((3, 2)): log_debug("[#%04X] C: <ROUTING> query=%r", cx.local_port, self.routing_context or {}) cx.run("CALL dbms.cluster.routing.getRoutingTable({context})", {"context": self.routing_context}, on_success=metadata.update, on_failure=fail) else: log_debug("[#%04X] C: <ROUTING> query={}", cx.local_port) cx.run("CALL dbms.cluster.routing.getServers", {}, on_success=metadata.update, on_failure=fail) cx.pull_all(on_success=metadata.update, on_records=records.extend) cx.sync() routing_info = [dict(zip(metadata.get("fields", ()), values)) for values in records] log_debug("[#%04X] S: <ROUTING> info=%r", cx.local_port, routing_info) return routing_info except RoutingProtocolError as error: raise ServiceUnavailable(*error.args) except ServiceUnavailable: self.deactivate(address) return None
<SYSTEM_TASK:> Fetch a routing table from a given router address. <END_TASK> <USER_TASK:> Description: def fetch_routing_table(self, address): """ Fetch a routing table from a given router address. :param address: router address :return: a new RoutingTable instance or None if the given router is currently unable to provide routing information :raise ServiceUnavailable: if no writers are available :raise ProtocolError: if the routing information received is unusable """
new_routing_info = self.fetch_routing_info(address) if new_routing_info is None: return None # Parse routing info and count the number of each type of server new_routing_table = RoutingTable.parse_routing_info(new_routing_info) num_routers = len(new_routing_table.routers) num_readers = len(new_routing_table.readers) num_writers = len(new_routing_table.writers) # No writers are available. This likely indicates a temporary state, # such as leader switching, so we should not signal an error. # When no writers available, then we flag we are reading in absence of writer self.missing_writer = (num_writers == 0) # No routers if num_routers == 0: raise RoutingProtocolError("No routing servers returned from server %r" % (address,)) # No readers if num_readers == 0: raise RoutingProtocolError("No read servers returned from server %r" % (address,)) # At least one of each is fine, so return this table return new_routing_table
<SYSTEM_TASK:> Try to update routing tables with the given routers. <END_TASK> <USER_TASK:> Description: def update_routing_table_from(self, *routers): """ Try to update routing tables with the given routers. :return: True if the routing table is successfully updated, otherwise False """
for router in routers: new_routing_table = self.fetch_routing_table(router) if new_routing_table is not None: self.routing_table.update(new_routing_table) return True return False
<SYSTEM_TASK:> Update the routing table from the first router able to provide <END_TASK> <USER_TASK:> Description: def update_routing_table(self): """ Update the routing table from the first router able to provide valid routing information. """
# copied because it can be modified existing_routers = list(self.routing_table.routers) has_tried_initial_routers = False if self.missing_writer: has_tried_initial_routers = True if self.update_routing_table_from(self.initial_address): return if self.update_routing_table_from(*existing_routers): return if not has_tried_initial_routers and self.initial_address not in existing_routers: if self.update_routing_table_from(self.initial_address): return # None of the routers have been successful, so just fail raise ServiceUnavailable("Unable to retrieve routing information")
<SYSTEM_TASK:> Update the routing table if stale. <END_TASK> <USER_TASK:> Description: def ensure_routing_table_is_fresh(self, access_mode): """ Update the routing table if stale. This method performs two freshness checks, before and after acquiring the refresh lock. If the routing table is already fresh on entry, the method exits immediately; otherwise, the refresh lock is acquired and the second freshness check that follows determines whether an update is still required. This method is thread-safe. :return: `True` if an update was required, `False` otherwise. """
if self.routing_table.is_fresh(access_mode): return False with self.refresh_lock: if self.routing_table.is_fresh(access_mode): if access_mode == READ_ACCESS: # if reader is fresh but writers is not fresh, then we are reading in absence of writer self.missing_writer = not self.routing_table.is_fresh(WRITE_ACCESS) return False self.update_routing_table() self.update_connection_pool() return True
<SYSTEM_TASK:> Deactivate an address from the connection pool, <END_TASK> <USER_TASK:> Description: def deactivate(self, address): """ Deactivate an address from the connection pool, if present, remove from the routing table and also closing all idle connections to that address. """
log_debug("[#0000] C: <ROUTING> Deactivating address %r", address) # We use `discard` instead of `remove` here since the former # will not fail if the address has already been removed. self.routing_table.routers.discard(address) self.routing_table.readers.discard(address) self.routing_table.writers.discard(address) log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table) super(RoutingConnectionPool, self).deactivate(address)
<SYSTEM_TASK:> Remove a writer address from the routing table, if present. <END_TASK> <USER_TASK:> Description: def remove_writer(self, address): """ Remove a writer address from the routing table, if present. """
log_debug("[#0000] C: <ROUTING> Removing writer %r", address) self.routing_table.writers.discard(address) log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table)
<SYSTEM_TASK:> Handle any cleanup or similar activity related to an error <END_TASK> <USER_TASK:> Description: def handle(self, error, connection): """ Handle any cleanup or similar activity related to an error occurring on a pooled connection. """
error_class = error.__class__ if error_class in (ConnectionExpired, ServiceUnavailable, DatabaseUnavailableError): self.deactivate(connection.address) elif error_class in (NotALeaderError, ForbiddenOnReadOnlyDatabaseError): self.remove_writer(connection.address)
<SYSTEM_TASK:> Dynamically create a Point subclass. <END_TASK> <USER_TASK:> Description: def point_type(name, fields, srid_map): """ Dynamically create a Point subclass. """
def srid(self): try: return srid_map[len(self)] except KeyError: return None attributes = {"srid": property(srid)} for index, subclass_field in enumerate(fields): def accessor(self, i=index, f=subclass_field): try: return self[i] except IndexError: raise AttributeError(f) for field_alias in {subclass_field, "xyz"[index]}: attributes[field_alias] = property(accessor) cls = type(name, (Point,), attributes) with __srid_table_lock: for dim, srid in srid_map.items(): __srid_table[srid] = (cls, dim) return cls
<SYSTEM_TASK:> Read a directory containing json files for Kibana panels, <END_TASK> <USER_TASK:> Description: def main(): """Read a directory containing json files for Kibana panels, beautify them and replace size value in aggregations as specified through corresponding params params. """
args = parse_args() configure_logging(args.debug) src_path = args.src_path dest_path = args.dest_path old_str1 = '\\"size\\":' + args.old_size old_str2 = '\\"size\\": ' + args.old_size new_str = '\\"size\\":' + args.new_size logging.info('Input path: %s', src_path) logging.info('Output path: %s', dest_path) logging.info('old str: %s', old_str1) logging.info('old str: %s', old_str2) logging.info('new str: %s', new_str) if os.path.abspath(src_path) == os.path.abspath(dest_path): logging.error('source and destination directiories must be different') sys.exit(1) # Iterate over input files json_files = [f for f in os.listdir(src_path) if f.endswith('.json')] for filename in json_files: in_file_path = os.path.join(src_path, filename) in_file_path = os.path.join(src_path, filename) out_file_path = os.path.join(dest_path, filename) logging.info('INPUT FILE: %s',in_file_path) logging.info('OUTPUT FILE: %s',out_file_path) # First beautify input pretty = utils.beautify(filename=in_file_path) # Iterate the beautified json string line by line pretty_replaced = utils.replace(pretty, old_str1, new_str) pretty_replaced = utils.replace(pretty_replaced, old_str2, new_str) with open(out_file_path, 'w') as output_file: output_file.write(pretty_replaced) logging.info('This is the end.')
<SYSTEM_TASK:> Show changes graphically in memory consumption <END_TASK> <USER_TASK:> Description: def graph_format(new_mem, old_mem, is_firstiteration=True): """Show changes graphically in memory consumption"""
if is_firstiteration: output = " n/a " elif new_mem - old_mem > 50000000: output = " +++++" elif new_mem - old_mem > 20000000: output = " ++++ " elif new_mem - old_mem > 5000000: output = " +++ " elif new_mem - old_mem > 1000000: output = " ++ " elif new_mem - old_mem > 50000: output = " + " elif old_mem - new_mem > 10000000: output = "--- " elif old_mem - new_mem > 2000000: output = " -- " elif old_mem - new_mem > 100000: output = " - " else: output = " " return output
<SYSTEM_TASK:> Check if a propper Python version is used. <END_TASK> <USER_TASK:> Description: def check_py_version(): """Check if a propper Python version is used."""
try: if sys.version_info >= (2, 7): return except: pass print(" ") print(" ERROR - memtop needs python version at least 2.7") print(("Chances are that you can install newer version from your " "repositories, or even that you have some newer version " "installed yet.")) print("(one way to find out which versions are installed is to try " "following: 'which python2.7' , 'which python3' and so...)") print(" ") sys.exit(-1)
<SYSTEM_TASK:> Prompt a single character. <END_TASK> <USER_TASK:> Description: def character(prompt=None, empty=False): """Prompt a single character. Parameters ---------- prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- str or None A str if the user entered a single-character, non-empty string. None if the user pressed only Enter and ``empty`` was True. """
s = _prompt_input(prompt) if empty and not s: return None elif len(s) == 1: return s else: return character(prompt=prompt, empty=empty)
<SYSTEM_TASK:> Prompt an email address. <END_TASK> <USER_TASK:> Description: def email(prompt=None, empty=False, mode="simple"): """Prompt an email address. This check is based on a simple regular expression and does not verify whether an email actually exists. Parameters ---------- prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. mode : {'simple'}, optional 'simple' will use a simple regular expression. No other mode is implemented yet. Returns ------- str or None A str if the user entered a likely email address. None if the user pressed only Enter and ``empty`` was True. """
if mode == "simple": s = _prompt_input(prompt) if empty and not s: return None else: if RE_EMAIL_SIMPLE.match(s): return s else: return email(prompt=prompt, empty=empty, mode=mode) else: raise ValueError
<SYSTEM_TASK:> Prompt an integer. <END_TASK> <USER_TASK:> Description: def integer(prompt=None, empty=False): """Prompt an integer. Parameters ---------- prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- int or None An int if the user entered a valid integer. None if the user pressed only Enter and ``empty`` was True. """
s = _prompt_input(prompt) if empty and not s: return None else: try: return int(s) except ValueError: return integer(prompt=prompt, empty=empty)
<SYSTEM_TASK:> Prompt a real number. <END_TASK> <USER_TASK:> Description: def real(prompt=None, empty=False): """Prompt a real number. Parameters ---------- prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- float or None A float if the user entered a valid real number. None if the user pressed only Enter and ``empty`` was True. """
s = _prompt_input(prompt) if empty and not s: return None else: try: return float(s) except ValueError: return real(prompt=prompt, empty=empty)
<SYSTEM_TASK:> Prompt a string that matches a regular expression. <END_TASK> <USER_TASK:> Description: def regex(pattern, prompt=None, empty=False, flags=0): """Prompt a string that matches a regular expression. Parameters ---------- pattern : str A regular expression that must be matched. prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. flags : int, optional Flags that will be passed to ``re.match``. Returns ------- Match or None A match object if the user entered a matching string. None if the user pressed only Enter and ``empty`` was True. See Also -------- re.match """
s = _prompt_input(prompt) if empty and not s: return None else: m = re.match(pattern, s, flags=flags) if m: return m else: return regex(pattern, prompt=prompt, empty=empty, flags=flags)
<SYSTEM_TASK:> Prompt a string without echoing. <END_TASK> <USER_TASK:> Description: def secret(prompt=None, empty=False): """Prompt a string without echoing. Parameters ---------- prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- str or None A str if the user entered a non-empty string. None if the user pressed only Enter and ``empty`` was True. Raises ------ getpass.GetPassWarning If echo free input is unavailable. See Also -------- getpass.getpass """
if prompt is None: prompt = PROMPT s = getpass.getpass(prompt=prompt) if empty and not s: return None else: if s: return s else: return secret(prompt=prompt, empty=empty)
<SYSTEM_TASK:> Prompt a string. <END_TASK> <USER_TASK:> Description: def string(prompt=None, empty=False): """Prompt a string. Parameters ---------- prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- str or None A str if the user entered a non-empty string. None if the user pressed only Enter and ``empty`` was True. """
s = _prompt_input(prompt) if empty and not s: return None else: if s: return s else: return string(prompt=prompt, empty=empty)
<SYSTEM_TASK:> Return the value from the cache for this query. <END_TASK> <USER_TASK:> Description: def get_value(self, merge=True, createfunc=None, expiration_time=None, ignore_expiration=False): """ Return the value from the cache for this query. """
cache, cache_key = self._get_cache_plus_key() # ignore_expiration means, if the value is in the cache # but is expired, return it anyway. This doesn't make sense # with createfunc, which says, if the value is expired, generate # a new value. assert not ignore_expiration or not createfunc, \ "Can't ignore expiration and also provide createfunc" if ignore_expiration or not createfunc: cached_value = cache.get(cache_key, expiration_time=expiration_time, ignore_expiration=ignore_expiration) else: cached_value = cache.get(cache_key) if not cached_value: cached_value = createfunc() cache.set(cache_key, cached_value, timeout=expiration_time) if cached_value and merge: cached_value = self.merge_result(cached_value, load=False) return cached_value
<SYSTEM_TASK:> Set the value in the cache for this query. <END_TASK> <USER_TASK:> Description: def set_value(self, value): """Set the value in the cache for this query."""
cache, cache_key = self._get_cache_plus_key() cache.set(cache_key, value)
<SYSTEM_TASK:> Given a Query, create a cache key. <END_TASK> <USER_TASK:> Description: def key_from_query(self, qualifier=None): """ Given a Query, create a cache key. There are many approaches to this; here we use the simplest, which is to create an md5 hash of the text of the SQL statement, combined with stringified versions of all the bound parameters within it. There's a bit of a performance hit with compiling out "query.statement" here; other approaches include setting up an explicit cache key with a particular Query, then combining that with the bound parameter values. """
stmt = self.with_labels().statement compiled = stmt.compile() params = compiled.params values = [str(compiled)] for k in sorted(params): values.append(repr(params[k])) key = u" ".join(values) return md5(key.encode('utf8')).hexdigest()
<SYSTEM_TASK:> Process a Query that is used within a lazy loader. <END_TASK> <USER_TASK:> Description: def process_query_conditionally(self, query): """ Process a Query that is used within a lazy loader. (the process_query_conditionally() method is a SQLAlchemy hook invoked only within lazyload.) """
if query._current_path: mapper, prop = query._current_path[-2:] for cls in mapper.class_.__mro__: k = (cls, prop.key) relationship_option = self._relationship_options.get(k) if relationship_option: query._cache = relationship_option break
<SYSTEM_TASK:> Fit the smoother <END_TASK> <USER_TASK:> Description: def fit(self, t, y, dy=1, presorted=False): """Fit the smoother Parameters ---------- t : array_like time locations of the points to smooth y : array_like y locations of the points to smooth dy : array_like or float (default = 1) Errors in the y values presorted : bool (default = False) If True, then t is assumed to be sorted. Returns ------- self : Smoother instance """
self.t, self.y, self.dy = self._validate_inputs(t, y, dy, presorted) self._fit(self.t, self.y, self.dy) return self
<SYSTEM_TASK:> Predict the smoothed function value at time t <END_TASK> <USER_TASK:> Description: def predict(self, t): """Predict the smoothed function value at time t Parameters ---------- t : array_like Times at which to predict the result Returns ------- y : ndarray Smoothed values at time t """
t = np.asarray(t) return self._predict(np.ravel(t)).reshape(t.shape)
<SYSTEM_TASK:> Return the residuals of the cross-validation for the fit data <END_TASK> <USER_TASK:> Description: def cv_residuals(self, cv=True): """Return the residuals of the cross-validation for the fit data"""
vals = self.cv_values(cv) return (self.y - vals) / self.dy
<SYSTEM_TASK:> Return the sum of cross-validation residuals for the input data <END_TASK> <USER_TASK:> Description: def cv_error(self, cv=True, skip_endpoints=True): """Return the sum of cross-validation residuals for the input data"""
resids = self.cv_residuals(cv) if skip_endpoints: resids = resids[1:-1] return np.mean(abs(resids))
<SYSTEM_TASK:> Reconnect to the remote server. <END_TASK> <USER_TASK:> Description: def reconnect(self): """Reconnect to the remote server."""
self.lock.acquire() if self.use_ssl: self.client = http.client.HTTPSConnection(self.host, self.port, context=self.ssl_context) else: self.client = http.client.HTTPConnection(self.host, self.port) self.lock.release()
<SYSTEM_TASK:> Issue a call to the remote end point to execute the specified <END_TASK> <USER_TASK:> Description: def call(self, method, *args, **kwargs): """ Issue a call to the remote end point to execute the specified procedure. :param str method: The name of the remote procedure to execute. :return: The return value from the remote function. """
if kwargs: options = self.encode(dict(args=args, kwargs=kwargs)) else: options = self.encode(args) headers = {} if self.headers: headers.update(self.headers) headers['Content-Type'] = self.serializer.content_type headers['Content-Length'] = str(len(options)) headers['Connection'] = 'close' if self.username is not None and self.password is not None: headers['Authorization'] = 'Basic ' + base64.b64encode((self.username + ':' + self.password).encode('UTF-8')).decode('UTF-8') method = os.path.join(self.uri_base, method) self.logger.debug('calling RPC method: ' + method[1:]) try: with self.lock: self.client.request('RPC', method, options, headers) resp = self.client.getresponse() except http.client.ImproperConnectionState: raise RPCConnectionError('improper connection state') if resp.status != 200: raise RPCError(resp.reason, resp.status) resp_data = resp.read() resp_data = self.decode(resp_data) if not ('exception_occurred' in resp_data and 'result' in resp_data): raise RPCError('missing response information', resp.status) if resp_data['exception_occurred']: raise RPCError('remote method incurred an exception', resp.status, remote_exception=resp_data['exception']) return resp_data['result']
<SYSTEM_TASK:> Call a remote method and update the local cache with the result <END_TASK> <USER_TASK:> Description: def cache_call_refresh(self, method, *options): """ Call a remote method and update the local cache with the result if it already existed. :param str method: The name of the remote procedure to execute. :return: The return value from the remote function. """
options_hash = self.encode(options) if len(options_hash) > 20: options_hash = hashlib.new('sha1', options).digest() options_hash = sqlite3.Binary(options_hash) with self.cache_lock: cursor = self.cache_db.cursor() cursor.execute('DELETE FROM cache WHERE method = ? AND options_hash = ?', (method, options_hash)) return_value = self.call(method, *options) store_return_value = sqlite3.Binary(self.encode(return_value)) with self.cache_lock: cursor = self.cache_db.cursor() cursor.execute('INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)', (method, options_hash, store_return_value)) self.cache_db.commit() return return_value
<SYSTEM_TASK:> Purge the local store of all cached function information. <END_TASK> <USER_TASK:> Description: def cache_clear(self): """Purge the local store of all cached function information."""
with self.cache_lock: cursor = self.cache_db.cursor() cursor.execute('DELETE FROM cache') self.cache_db.commit() self.logger.info('the RPC cache has been purged') return
<SYSTEM_TASK:> Respond to the client by serving a file, either directly or as <END_TASK> <USER_TASK:> Description: def respond_file(self, file_path, attachment=False, query=None): """ Respond to the client by serving a file, either directly or as an attachment. :param str file_path: The path to the file to serve, this does not need to be in the web root. :param bool attachment: Whether to serve the file as a download by setting the Content-Disposition header. """
del query file_path = os.path.abspath(file_path) try: file_obj = open(file_path, 'rb') except IOError: self.respond_not_found() return self.send_response(200) self.send_header('Content-Type', self.guess_mime_type(file_path)) fs = os.fstat(file_obj.fileno()) self.send_header('Content-Length', str(fs[6])) if attachment: file_name = os.path.basename(file_path) self.send_header('Content-Disposition', 'attachment; filename=' + file_name) self.send_header('Last-Modified', self.date_time_string(fs.st_mtime)) self.end_headers() shutil.copyfileobj(file_obj, self.wfile) file_obj.close() return
<SYSTEM_TASK:> Respond to the client with an HTML page listing the contents of <END_TASK> <USER_TASK:> Description: def respond_list_directory(self, dir_path, query=None): """ Respond to the client with an HTML page listing the contents of the specified directory. :param str dir_path: The path of the directory to list the contents of. """
del query try: dir_contents = os.listdir(dir_path) except os.error: self.respond_not_found() return if os.path.normpath(dir_path) != self.__config['serve_files_root']: dir_contents.append('..') dir_contents.sort(key=lambda a: a.lower()) displaypath = html.escape(urllib.parse.unquote(self.path), quote=True) f = io.BytesIO() encoding = sys.getfilesystemencoding() f.write(b'<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n') f.write(b'<html>\n<title>Directory listing for ' + displaypath.encode(encoding) + b'</title>\n') f.write(b'<body>\n<h2>Directory listing for ' + displaypath.encode(encoding) + b'</h2>\n') f.write(b'<hr>\n<ul>\n') for name in dir_contents: fullname = os.path.join(dir_path, name) displayname = linkname = name # Append / for directories or @ for symbolic links if os.path.isdir(fullname): displayname = name + "/" linkname = name + "/" if os.path.islink(fullname): displayname = name + "@" # Note: a link to a directory displays with @ and links with / f.write(('<li><a href="' + urllib.parse.quote(linkname) + '">' + html.escape(displayname, quote=True) + '</a>\n').encode(encoding)) f.write(b'</ul>\n<hr>\n</body>\n</html>\n') length = f.tell() f.seek(0) self.send_response(200) self.send_header('Content-Type', 'text/html; charset=' + encoding) self.send_header('Content-Length', length) self.end_headers() shutil.copyfileobj(f, self.wfile) f.close() return
<SYSTEM_TASK:> Respond to the client with a 301 message and redirect them with <END_TASK> <USER_TASK:> Description: def respond_redirect(self, location='/'): """ Respond to the client with a 301 message and redirect them with a Location header. :param str location: The new location to redirect the client to. """
self.send_response(301) self.send_header('Content-Length', 0) self.send_header('Location', location) self.end_headers() return
<SYSTEM_TASK:> Handle an internal server error, logging a traceback if executed <END_TASK> <USER_TASK:> Description: def respond_server_error(self, status=None, status_line=None, message=None): """ Handle an internal server error, logging a traceback if executed within an exception handler. :param int status: The status code to respond to the client with. :param str status_line: The status message to respond to the client with. :param str message: The body of the response that is sent to the client. """
(ex_type, ex_value, ex_traceback) = sys.exc_info() if ex_type: (ex_file_name, ex_line, _, _) = traceback.extract_tb(ex_traceback)[-1] line_info = "{0}:{1}".format(ex_file_name, ex_line) log_msg = "encountered {0} in {1}".format(repr(ex_value), line_info) self.server.logger.error(log_msg, exc_info=True) status = (status or 500) status_line = (status_line or http.client.responses.get(status, 'Internal Server Error')).strip() self.send_response(status, status_line) message = (message or status_line) if isinstance(message, (str, bytes)): self.send_header('Content-Length', len(message)) self.end_headers() if isinstance(message, str): self.wfile.write(message.encode(sys.getdefaultencoding())) else: self.wfile.write(message) elif hasattr(message, 'fileno'): fs = os.fstat(message.fileno()) self.send_header('Content-Length', fs[6]) self.end_headers() shutil.copyfileobj(message, self.wfile) else: self.end_headers() return
<SYSTEM_TASK:> Respond to the client that the request is unauthorized. <END_TASK> <USER_TASK:> Description: def respond_unauthorized(self, request_authentication=False): """ Respond to the client that the request is unauthorized. :param bool request_authentication: Whether to request basic authentication information by sending a WWW-Authenticate header. """
headers = {} if request_authentication: headers['WWW-Authenticate'] = 'Basic realm="' + self.__config['server_version'] + '"' self.send_response_full(b'Unauthorized', status=401, headers=headers) return
<SYSTEM_TASK:> Dispatch functions based on the established handler_map. It is <END_TASK> <USER_TASK:> Description: def dispatch_handler(self, query=None): """ Dispatch functions based on the established handler_map. It is generally not necessary to override this function and doing so will prevent any handlers from being executed. This function is executed automatically when requests of either GET, HEAD, or POST are received. :param dict query: Parsed query parameters from the corresponding request. """
query = (query or {}) # normalize the path # abandon query parameters self.path = self.path.split('?', 1)[0] self.path = self.path.split('#', 1)[0] original_path = urllib.parse.unquote(self.path) self.path = posixpath.normpath(original_path) words = self.path.split('/') words = filter(None, words) tmp_path = '' for word in words: _, word = os.path.splitdrive(word) _, word = os.path.split(word) if word in (os.curdir, os.pardir): continue tmp_path = os.path.join(tmp_path, word) self.path = tmp_path if self.path == 'robots.txt' and self.__config['serve_robots_txt']: self.send_response_full(self.__config['robots_txt']) return self.cookies = http.cookies.SimpleCookie(self.headers.get('cookie', '')) handler, is_method = self.__get_handler(is_rpc=False) if handler is not None: try: handler(*((query,) if is_method else (self, query))) except Exception: self.respond_server_error() return if not self.__config['serve_files']: self.respond_not_found() return file_path = self.__config['serve_files_root'] file_path = os.path.join(file_path, tmp_path) if os.path.isfile(file_path) and os.access(file_path, os.R_OK): self.respond_file(file_path, query=query) return elif os.path.isdir(file_path) and os.access(file_path, os.R_OK): if not original_path.endswith('/'): # redirect browser, doing what apache does destination = self.path + '/' if self.command == 'GET' and self.query_data: destination += '?' + urllib.parse.urlencode(self.query_data, True) self.respond_redirect(destination) return for index in ['index.html', 'index.htm']: index = os.path.join(file_path, index) if os.path.isfile(index) and os.access(index, os.R_OK): self.respond_file(index, query=query) return if self.__config['serve_files_list_directories']: self.respond_list_directory(file_path, query=query) return self.respond_not_found() return
<SYSTEM_TASK:> Guess an appropriate MIME type based on the extension of the <END_TASK> <USER_TASK:> Description: def guess_mime_type(self, path): """ Guess an appropriate MIME type based on the extension of the provided path. :param str path: The of the file to analyze. :return: The guessed MIME type of the default if non are found. :rtype: str """
_, ext = posixpath.splitext(path) if ext in self.extensions_map: return self.extensions_map[ext] ext = ext.lower() return self.extensions_map[ext if ext in self.extensions_map else '']
<SYSTEM_TASK:> Check for the presence of a basic auth Authorization header and <END_TASK> <USER_TASK:> Description: def check_authorization(self): """ Check for the presence of a basic auth Authorization header and if the credentials contained within in are valid. :return: Whether or not the credentials are valid. :rtype: bool """
try: store = self.__config.get('basic_auth') if store is None: return True auth_info = self.headers.get('Authorization') if not auth_info: return False auth_info = auth_info.split() if len(auth_info) != 2 or auth_info[0] != 'Basic': return False auth_info = base64.b64decode(auth_info[1]).decode(sys.getdefaultencoding()) username = auth_info.split(':')[0] password = ':'.join(auth_info.split(':')[1:]) password_bytes = password.encode(sys.getdefaultencoding()) if hasattr(self, 'custom_authentication'): if self.custom_authentication(username, password): self.basic_auth_user = username return True return False if not username in store: self.server.logger.warning('received invalid username: ' + username) return False password_data = store[username] if password_data['type'] == 'plain': if password == password_data['value']: self.basic_auth_user = username return True elif hashlib.new(password_data['type'], password_bytes).digest() == password_data['value']: self.basic_auth_user = username return True self.server.logger.warning('received invalid password from user: ' + username) except Exception: pass return False
<SYSTEM_TASK:> Check for a cookie value by name. <END_TASK> <USER_TASK:> Description: def cookie_get(self, name): """ Check for a cookie value by name. :param str name: Name of the cookie value to retreive. :return: Returns the cookie value if it's set or None if it's not found. """
if not hasattr(self, 'cookies'): return None if self.cookies.get(name): return self.cookies.get(name).value return None