Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
with_metaclass
(meta, *bases)
Create a base class with a metaclass.
Create a base class with a metaclass.
def with_metaclass(meta, *bases): """ Create a base class with a metaclass. """ # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {})
[ "def", "with_metaclass", "(", "meta", ",", "*", "bases", ")", ":", "# This requires a bit of explanation: the basic idea is to make a dummy", "# metaclass for one level of class instantiation that replaces itself with", "# the actual metaclass.", "class", "metaclass", "(", "meta", ")", ":", "def", "__new__", "(", "cls", ",", "name", ",", "this_bases", ",", "d", ")", ":", "return", "meta", "(", "name", ",", "bases", ",", "d", ")", "return", "type", ".", "__new__", "(", "metaclass", ",", "'temporary_class'", ",", "(", ")", ",", "{", "}", ")" ]
[ 19, 0 ]
[ 29, 61 ]
python
en
['en', 'error', 'th']
False
MavWebRTCSignalServer.recv_msg_ping
(self, ws, raddr)
Wait for a message forever, and send a regular ping to prevent bad routers from closing the connection.
Wait for a message forever, and send a regular ping to prevent bad routers from closing the connection.
async def recv_msg_ping(self, ws, raddr): ''' Wait for a message forever, and send a regular ping to prevent bad routers from closing the connection. ''' msg = None while msg is None: try: msg = await asyncio.wait_for(ws.recv(), self.keepalive_timeout) except asyncio.TimeoutError: # self.logger.debug('Sending keepalive ping to {!r} in recv'.format(raddr)) await ws.ping() return msg
[ "async", "def", "recv_msg_ping", "(", "self", ",", "ws", ",", "raddr", ")", ":", "msg", "=", "None", "while", "msg", "is", "None", ":", "try", ":", "msg", "=", "await", "asyncio", ".", "wait_for", "(", "ws", ".", "recv", "(", ")", ",", "self", ".", "keepalive_timeout", ")", "except", "asyncio", ".", "TimeoutError", ":", "# self.logger.debug('Sending keepalive ping to {!r} in recv'.format(raddr))", "await", "ws", ".", "ping", "(", ")", "return", "msg" ]
[ 139, 4 ]
[ 151, 18 ]
python
en
['en', 'error', 'th']
False
MavWebRTCSignalServer.disconnect
(self, ws, peer_id)
Remove @peer_id from the list of sessions and close our connection to it. This informs the peer that the session and all calls have ended, and it must reconnect.
Remove
async def disconnect(self, ws, peer_id): ''' Remove @peer_id from the list of sessions and close our connection to it. This informs the peer that the session and all calls have ended, and it must reconnect. ''' if peer_id in self.sessions: del self.sessions[peer_id] # Close connection if ws and ws.open: # Don't care about errors asyncio.ensure_future(ws.close(reason='hangup'))
[ "async", "def", "disconnect", "(", "self", ",", "ws", ",", "peer_id", ")", ":", "if", "peer_id", "in", "self", ".", "sessions", ":", "del", "self", ".", "sessions", "[", "peer_id", "]", "# Close connection", "if", "ws", "and", "ws", ".", "open", ":", "# Don't care about errors", "asyncio", ".", "ensure_future", "(", "ws", ".", "close", "(", "reason", "=", "'hangup'", ")", ")" ]
[ 153, 4 ]
[ 164, 60 ]
python
en
['en', 'error', 'th']
False
MavWebRTCSignalServer.hello_peer
(self, ws)
Exchange hello, register peer
Exchange hello, register peer
async def hello_peer(self, ws): ''' Exchange hello, register peer ''' raddr = ws.remote_address hello = await ws.recv() hello, uid = hello.split(maxsplit=1) if hello != 'HELLO': await ws.close(code=1002, reason='invalid protocol') raise Exception("Invalid hello from {!r}".format(raddr)) if not uid or uid in self.peers or uid.split() != [uid]: # no whitespace await ws.close(code=1002, reason='invalid peer uid') raise Exception("Invalid uid {!r} from {!r}".format(uid, raddr)) # Send back a HELLO await ws.send('HELLO') return uid
[ "async", "def", "hello_peer", "(", "self", ",", "ws", ")", ":", "raddr", "=", "ws", ".", "remote_address", "hello", "=", "await", "ws", ".", "recv", "(", ")", "hello", ",", "uid", "=", "hello", ".", "split", "(", "maxsplit", "=", "1", ")", "if", "hello", "!=", "'HELLO'", ":", "await", "ws", ".", "close", "(", "code", "=", "1002", ",", "reason", "=", "'invalid protocol'", ")", "raise", "Exception", "(", "\"Invalid hello from {!r}\"", ".", "format", "(", "raddr", ")", ")", "if", "not", "uid", "or", "uid", "in", "self", ".", "peers", "or", "uid", ".", "split", "(", ")", "!=", "[", "uid", "]", ":", "# no whitespace", "await", "ws", ".", "close", "(", "code", "=", "1002", ",", "reason", "=", "'invalid peer uid'", ")", "raise", "Exception", "(", "\"Invalid uid {!r} from {!r}\"", ".", "format", "(", "uid", ",", "raddr", ")", ")", "# Send back a HELLO", "await", "ws", ".", "send", "(", "'HELLO'", ")", "return", "uid" ]
[ 301, 4 ]
[ 316, 18 ]
python
en
['en', 'error', 'th']
False
MavWebRTCSignalServer.handler
(self, ws, path)
All incoming messages are handled here. @path is unused.
All incoming messages are handled here.
async def handler(self, ws, path): ''' All incoming messages are handled here. @path is unused. ''' raddr = ws.remote_address self.logger.info("Connected to {!r}".format(raddr)) peer_id = await self.hello_peer(ws) try: await self.connection_handler(ws, peer_id) except websockets.ConnectionClosed: self.logger.info("Connection to peer {!r} closed, exiting handler".format(raddr)) finally: await self.remove_peer(peer_id)
[ "async", "def", "handler", "(", "self", ",", "ws", ",", "path", ")", ":", "raddr", "=", "ws", ".", "remote_address", "self", ".", "logger", ".", "info", "(", "\"Connected to {!r}\"", ".", "format", "(", "raddr", ")", ")", "peer_id", "=", "await", "self", ".", "hello_peer", "(", "ws", ")", "try", ":", "await", "self", ".", "connection_handler", "(", "ws", ",", "peer_id", ")", "except", "websockets", ".", "ConnectionClosed", ":", "self", ".", "logger", ".", "info", "(", "\"Connection to peer {!r} closed, exiting handler\"", ".", "format", "(", "raddr", ")", ")", "finally", ":", "await", "self", ".", "remove_peer", "(", "peer_id", ")" ]
[ 318, 4 ]
[ 330, 43 ]
python
en
['en', 'error', 'th']
False
Contacts.populate_database
(self)
Creates a sample database to work with
Creates a sample database to work with
def populate_database(self): "Creates a sample database to work with" addresses = ( ("Max Demian", "60329", "Frankfurt", "Gutleutstraße 214", "069 741 382", "0176 241 332", "[email protected]"), ("Mathias Hoover", "54340", "Schleich", "Güntzelstr. 12", "06507 58 02 52", "317 461 1232", "[email protected]"), ("Dirk Vogt", "92303", "Neumarkt", "Luebecker Tordamm 1", "09181 75 77 80", "7024380985", "[email protected]"), ("Barbara Baier", "15230", "Frankfurt (Oder)", "Kirchstr. 36", "05704 27 12 54", "1-323-472-0089", "[email protected]") ) with self.db: cur = self.db.cursor() cur.execute("DROP TABLE IF EXISTS {}".format(self.table)) cur.execute("CREATE TABLE {}(Name TEXT, Zip Text, City Text,\ Street Text, Phone Text, Mobile Text,\ Email TEXT)".format(self.table)) cur.executemany("INSERT INTO {} VALUES(?, ?, ?, ?, ?, ?, ?\ )".format(self.table), addresses)
[ "def", "populate_database", "(", "self", ")", ":", "addresses", "=", "(", "(", "\"Max Demian\"", ",", "\"60329\"", ",", "\"Frankfurt\"", ",", "\"Gutleutstraße 214\",", "", "\"069 741 382\"", ",", "\"0176 241 332\"", ",", "\"[email protected]\"", ")", ",", "(", "\"Mathias Hoover\"", ",", "\"54340\"", ",", "\"Schleich\"", ",", "\"Güntzelstr. 12\",", "", "\"06507 58 02 52\"", ",", "\"317 461 1232\"", ",", "\"[email protected]\"", ")", ",", "(", "\"Dirk Vogt\"", ",", "\"92303\"", ",", "\"Neumarkt\"", ",", "\"Luebecker Tordamm 1\"", ",", "\"09181 75 77 80\"", ",", "\"7024380985\"", ",", "\"[email protected]\"", ")", ",", "(", "\"Barbara Baier\"", ",", "\"15230\"", ",", "\"Frankfurt (Oder)\"", ",", "\"Kirchstr. 36\"", ",", "\"05704 27 12 54\"", ",", "\"1-323-472-0089\"", ",", "\"[email protected]\"", ")", ")", "with", "self", ".", "db", ":", "cur", "=", "self", ".", "db", ".", "cursor", "(", ")", "cur", ".", "execute", "(", "\"DROP TABLE IF EXISTS {}\"", ".", "format", "(", "self", ".", "table", ")", ")", "cur", ".", "execute", "(", "\"CREATE TABLE {}(Name TEXT, Zip Text, City Text,\\\n Street Text, Phone Text, Mobile Text,\\\n Email TEXT)\"", ".", "format", "(", "self", ".", "table", ")", ")", "cur", ".", "executemany", "(", "\"INSERT INTO {} VALUES(?, ?, ?, ?, ?, ?, ?\\\n )\"", ".", "format", "(", "self", ".", "table", ")", ",", "addresses", ")" ]
[ 18, 4 ]
[ 38, 66 ]
python
en
['en', 'en', 'en']
True
Contacts.insert
(self, name="", zipcode="", city="", street="", phone="", mobile="", email="", table=None)
Insert a new entry into the database in the selected table
Insert a new entry into the database in the selected table
def insert(self, name="", zipcode="", city="", street="", phone="", mobile="", email="", table=None): "Insert a new entry into the database in the selected table" if table is None: table = self.table with self.db: cur = self.db.cursor() cur.execute("INSERT INTO {} VALUES(?, ?, ?, ?, ?, ?, ?)\ ".format(table), (name, zipcode, city, street, phone, mobile, email))
[ "def", "insert", "(", "self", ",", "name", "=", "\"\"", ",", "zipcode", "=", "\"\"", ",", "city", "=", "\"\"", ",", "street", "=", "\"\"", ",", "phone", "=", "\"\"", ",", "mobile", "=", "\"\"", ",", "email", "=", "\"\"", ",", "table", "=", "None", ")", ":", "if", "table", "is", "None", ":", "table", "=", "self", ".", "table", "with", "self", ".", "db", ":", "cur", "=", "self", ".", "db", ".", "cursor", "(", ")", "cur", ".", "execute", "(", "\"INSERT INTO {} VALUES(?, ?, ?, ?, ?, ?, ?)\\\n \"", ".", "format", "(", "table", ")", ",", "(", "name", ",", "zipcode", ",", "city", ",", "street", ",", "phone", ",", "mobile", ",", "email", ")", ")" ]
[ 40, 4 ]
[ 49, 69 ]
python
en
['en', 'en', 'en']
True
Contacts.show_all
(self, table=None)
Print out all contacts of the currently selected table
Print out all contacts of the currently selected table
def show_all(self, table=None): "Print out all contacts of the currently selected table" if table is None: table = self.table # FIXME: SQLite wants to use ASCII here so we abide, for now. with self.db: cur = self.db.cursor() self.db.text_factory = str cur.execute("SELECT * FROM {}".format(table)) for i in cur.fetchall(): print "{}, {} {}, {}".format(i[0], i[1], i[2], i[3]) print "{}, {}, mobile: {}\n".format(i[6], i[4], i[5]) self.db.text_factory = lambda x: unicode(x, 'utf-8', 'ignore')
[ "def", "show_all", "(", "self", ",", "table", "=", "None", ")", ":", "if", "table", "is", "None", ":", "table", "=", "self", ".", "table", "# FIXME: SQLite wants to use ASCII here so we abide, for now.", "with", "self", ".", "db", ":", "cur", "=", "self", ".", "db", ".", "cursor", "(", ")", "self", ".", "db", ".", "text_factory", "=", "str", "cur", ".", "execute", "(", "\"SELECT * FROM {}\"", ".", "format", "(", "table", ")", ")", "for", "i", "in", "cur", ".", "fetchall", "(", ")", ":", "print", "\"{}, {} {}, {}\"", ".", "format", "(", "i", "[", "0", "]", ",", "i", "[", "1", "]", ",", "i", "[", "2", "]", ",", "i", "[", "3", "]", ")", "print", "\"{}, {}, mobile: {}\\n\"", ".", "format", "(", "i", "[", "6", "]", ",", "i", "[", "4", "]", ",", "i", "[", "5", "]", ")", "self", ".", "db", ".", "text_factory", "=", "lambda", "x", ":", "unicode", "(", "x", ",", "'utf-8'", ",", "'ignore'", ")" ]
[ 51, 4 ]
[ 63, 74 ]
python
en
['en', 'en', 'en']
True
BaseEngine.__init__
(self, params)
Initialize the template engine. `params` is a dict of configuration settings.
Initialize the template engine.
def __init__(self, params): """ Initialize the template engine. `params` is a dict of configuration settings. """ params = params.copy() self.name = params.pop('NAME') self.dirs = list(params.pop('DIRS')) self.app_dirs = params.pop('APP_DIRS') if params: raise ImproperlyConfigured( "Unknown parameters: {}".format(", ".join(params)))
[ "def", "__init__", "(", "self", ",", "params", ")", ":", "params", "=", "params", ".", "copy", "(", ")", "self", ".", "name", "=", "params", ".", "pop", "(", "'NAME'", ")", "self", ".", "dirs", "=", "list", "(", "params", ".", "pop", "(", "'DIRS'", ")", ")", "self", ".", "app_dirs", "=", "params", ".", "pop", "(", "'APP_DIRS'", ")", "if", "params", ":", "raise", "ImproperlyConfigured", "(", "\"Unknown parameters: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "params", ")", ")", ")" ]
[ 13, 4 ]
[ 25, 67 ]
python
en
['en', 'error', 'th']
False
BaseEngine.from_string
(self, template_code)
Create and return a template for the given source code. This method is optional.
Create and return a template for the given source code.
def from_string(self, template_code): """ Create and return a template for the given source code. This method is optional. """ raise NotImplementedError( "subclasses of BaseEngine should provide " "a from_string() method")
[ "def", "from_string", "(", "self", ",", "template_code", ")", ":", "raise", "NotImplementedError", "(", "\"subclasses of BaseEngine should provide \"", "\"a from_string() method\"", ")" ]
[ 33, 4 ]
[ 41, 37 ]
python
en
['en', 'error', 'th']
False
BaseEngine.get_template
(self, template_name)
Load and return a template for the given name. Raise TemplateDoesNotExist if no such template exists.
Load and return a template for the given name.
def get_template(self, template_name): """ Load and return a template for the given name. Raise TemplateDoesNotExist if no such template exists. """ raise NotImplementedError( "subclasses of BaseEngine must provide " "a get_template() method")
[ "def", "get_template", "(", "self", ",", "template_name", ")", ":", "raise", "NotImplementedError", "(", "\"subclasses of BaseEngine must provide \"", "\"a get_template() method\"", ")" ]
[ 43, 4 ]
[ 51, 38 ]
python
en
['en', 'error', 'th']
False
BaseEngine.template_dirs
(self)
Return a list of directories to search for templates.
Return a list of directories to search for templates.
def template_dirs(self): """ Return a list of directories to search for templates. """ # Immutable return value because it's cached and shared by callers. template_dirs = tuple(self.dirs) if self.app_dirs: template_dirs += get_app_template_dirs(self.app_dirname) return template_dirs
[ "def", "template_dirs", "(", "self", ")", ":", "# Immutable return value because it's cached and shared by callers.", "template_dirs", "=", "tuple", "(", "self", ".", "dirs", ")", "if", "self", ".", "app_dirs", ":", "template_dirs", "+=", "get_app_template_dirs", "(", "self", ".", "app_dirname", ")", "return", "template_dirs" ]
[ 57, 4 ]
[ 65, 28 ]
python
en
['en', 'error', 'th']
False
BaseEngine.iter_template_filenames
(self, template_name)
Iterate over candidate files for template_name. Ignore files that don't lie inside configured template dirs to avoid directory traversal attacks.
Iterate over candidate files for template_name.
def iter_template_filenames(self, template_name): """ Iterate over candidate files for template_name. Ignore files that don't lie inside configured template dirs to avoid directory traversal attacks. """ for template_dir in self.template_dirs: try: yield safe_join(template_dir, template_name) except SuspiciousFileOperation: # The joined path was located outside of this template_dir # (it might be inside another one, so this isn't fatal). pass
[ "def", "iter_template_filenames", "(", "self", ",", "template_name", ")", ":", "for", "template_dir", "in", "self", ".", "template_dirs", ":", "try", ":", "yield", "safe_join", "(", "template_dir", ",", "template_name", ")", "except", "SuspiciousFileOperation", ":", "# The joined path was located outside of this template_dir", "# (it might be inside another one, so this isn't fatal).", "pass" ]
[ 67, 4 ]
[ 80, 20 ]
python
en
['en', 'error', 'th']
False
_constant_time_compare
(val1, val2)
Return ``True`` if the two strings are equal, ``False`` otherwise. The time taken is independent of the number of characters that match. Do not use this function for anything else than comparision with known length targets. This is should be implemented in C in order to get it completely right. This is an alias of :func:`hmac.compare_digest` on Python>=2.7,3.3.
Return ``True`` if the two strings are equal, ``False`` otherwise.
def _constant_time_compare(val1, val2): """Return ``True`` if the two strings are equal, ``False`` otherwise. The time taken is independent of the number of characters that match. Do not use this function for anything else than comparision with known length targets. This is should be implemented in C in order to get it completely right. This is an alias of :func:`hmac.compare_digest` on Python>=2.7,3.3. """ len_eq = len(val1) == len(val2) if len_eq: result = 0 left = val1 else: result = 1 left = val2 for x, y in izip(bytearray(left), bytearray(val2)): result |= x ^ y return result == 0
[ "def", "_constant_time_compare", "(", "val1", ",", "val2", ")", ":", "len_eq", "=", "len", "(", "val1", ")", "==", "len", "(", "val2", ")", "if", "len_eq", ":", "result", "=", "0", "left", "=", "val1", "else", ":", "result", "=", "1", "left", "=", "val2", "for", "x", ",", "y", "in", "izip", "(", "bytearray", "(", "left", ")", ",", "bytearray", "(", "val2", ")", ")", ":", "result", "|=", "x", "^", "y", "return", "result", "==", "0" ]
[ 18, 0 ]
[ 40, 22 ]
python
en
['en', 'en', 'en']
True
process
()
Process messages with information about S3 objects
Process messages with information about S3 objects
def process(): """Process messages with information about S3 objects""" message = request.get_json().get('inputMessage', '') # add other processing as needed # for example, add event to PubSub topic or # download object using presigned URL, save in Cloud Storage, invoke ML APIs return jsonify({'In app code for endpoint, received message': message})
[ "def", "process", "(", ")", ":", "message", "=", "request", ".", "get_json", "(", ")", ".", "get", "(", "'inputMessage'", ",", "''", ")", "# add other processing as needed", "# for example, add event to PubSub topic or ", "# download object using presigned URL, save in Cloud Storage, invoke ML APIs", "return", "jsonify", "(", "{", "'In app code for endpoint, received message'", ":", "message", "}", ")" ]
[ 39, 0 ]
[ 45, 75 ]
python
en
['en', 'en', 'en']
True
unexpected_error
(e)
Handle exceptions by returning swagger-compliant json.
Handle exceptions by returning swagger-compliant json.
def unexpected_error(e): """Handle exceptions by returning swagger-compliant json.""" logging.exception('An error occured while processing the request.') response = jsonify({ 'code': http_client.INTERNAL_SERVER_ERROR, 'message': 'Exception: {}'.format(e)}) response.status_code = http_client.INTERNAL_SERVER_ERROR return response
[ "def", "unexpected_error", "(", "e", ")", ":", "logging", ".", "exception", "(", "'An error occured while processing the request.'", ")", "response", "=", "jsonify", "(", "{", "'code'", ":", "http_client", ".", "INTERNAL_SERVER_ERROR", ",", "'message'", ":", "'Exception: {}'", ".", "format", "(", "e", ")", "}", ")", "response", ".", "status_code", "=", "http_client", ".", "INTERNAL_SERVER_ERROR", "return", "response" ]
[ 49, 0 ]
[ 56, 19 ]
python
en
['en', 'en', 'en']
True
hello_monkey
()
Respond and greet the caller by name.
Respond and greet the caller by name.
def hello_monkey(): """Respond and greet the caller by name.""" # Try adding your own number to this list! callers = { "+14158675308": "Curious George", "+12349013030": "Boots", "+12348134522": "Virgil", } from_number = request.values.get('From', None) name = callers[from_number] if from_number in callers else "Monkey" resp = VoiceResponse() resp.say("Hello {}".format(name)) resp.sms("{}, thanks for the call!".format(name)) return str(resp)
[ "def", "hello_monkey", "(", ")", ":", "# Try adding your own number to this list!", "callers", "=", "{", "\"+14158675308\"", ":", "\"Curious George\"", ",", "\"+12349013030\"", ":", "\"Boots\"", ",", "\"+12348134522\"", ":", "\"Virgil\"", ",", "}", "from_number", "=", "request", ".", "values", ".", "get", "(", "'From'", ",", "None", ")", "name", "=", "callers", "[", "from_number", "]", "if", "from_number", "in", "callers", "else", "\"Monkey\"", "resp", "=", "VoiceResponse", "(", ")", "resp", ".", "say", "(", "\"Hello {}\"", ".", "format", "(", "name", ")", ")", "resp", ".", "sms", "(", "\"{}, thanks for the call!\"", ".", "format", "(", "name", ")", ")", "return", "str", "(", "resp", ")" ]
[ 8, 0 ]
[ 23, 20 ]
python
en
['en', 'en', 'en']
True
pbkdf2_hex
( data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None )
Like :func:`pbkdf2_bin`, but returns a hex-encoded string. .. versionadded:: 0.9 :param data: the data to derive. :param salt: the salt for the derivation. :param iterations: the number of iterations. :param keylen: the length of the resulting key. If not provided, the digest size will be used. :param hashfunc: the hash function to use. This can either be the string name of a known hash function, or a function from the hashlib module. Defaults to sha256.
Like :func:`pbkdf2_bin`, but returns a hex-encoded string.
def pbkdf2_hex( data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None ): """Like :func:`pbkdf2_bin`, but returns a hex-encoded string. .. versionadded:: 0.9 :param data: the data to derive. :param salt: the salt for the derivation. :param iterations: the number of iterations. :param keylen: the length of the resulting key. If not provided, the digest size will be used. :param hashfunc: the hash function to use. This can either be the string name of a known hash function, or a function from the hashlib module. Defaults to sha256. """ rv = pbkdf2_bin(data, salt, iterations, keylen, hashfunc) return to_native(codecs.encode(rv, "hex_codec"))
[ "def", "pbkdf2_hex", "(", "data", ",", "salt", ",", "iterations", "=", "DEFAULT_PBKDF2_ITERATIONS", ",", "keylen", "=", "None", ",", "hashfunc", "=", "None", ")", ":", "rv", "=", "pbkdf2_bin", "(", "data", ",", "salt", ",", "iterations", ",", "keylen", ",", "hashfunc", ")", "return", "to_native", "(", "codecs", ".", "encode", "(", "rv", ",", "\"hex_codec\"", ")", ")" ]
[ 36, 0 ]
[ 53, 52 ]
python
en
['en', 'hmn', 'en']
True
pbkdf2_bin
( data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None )
Returns a binary digest for the PBKDF2 hash algorithm of `data` with the given `salt`. It iterates `iterations` times and produces a key of `keylen` bytes. By default, SHA-256 is used as hash function; a different hashlib `hashfunc` can be provided. .. versionadded:: 0.9 :param data: the data to derive. :param salt: the salt for the derivation. :param iterations: the number of iterations. :param keylen: the length of the resulting key. If not provided the digest size will be used. :param hashfunc: the hash function to use. This can either be the string name of a known hash function or a function from the hashlib module. Defaults to sha256.
Returns a binary digest for the PBKDF2 hash algorithm of `data` with the given `salt`. It iterates `iterations` times and produces a key of `keylen` bytes. By default, SHA-256 is used as hash function; a different hashlib `hashfunc` can be provided.
def pbkdf2_bin( data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None ): """Returns a binary digest for the PBKDF2 hash algorithm of `data` with the given `salt`. It iterates `iterations` times and produces a key of `keylen` bytes. By default, SHA-256 is used as hash function; a different hashlib `hashfunc` can be provided. .. versionadded:: 0.9 :param data: the data to derive. :param salt: the salt for the derivation. :param iterations: the number of iterations. :param keylen: the length of the resulting key. If not provided the digest size will be used. :param hashfunc: the hash function to use. This can either be the string name of a known hash function or a function from the hashlib module. Defaults to sha256. """ if not hashfunc: hashfunc = "sha256" data = to_bytes(data) salt = to_bytes(salt) if callable(hashfunc): _test_hash = hashfunc() hash_name = getattr(_test_hash, "name", None) else: hash_name = hashfunc return hashlib.pbkdf2_hmac(hash_name, data, salt, iterations, keylen)
[ "def", "pbkdf2_bin", "(", "data", ",", "salt", ",", "iterations", "=", "DEFAULT_PBKDF2_ITERATIONS", ",", "keylen", "=", "None", ",", "hashfunc", "=", "None", ")", ":", "if", "not", "hashfunc", ":", "hashfunc", "=", "\"sha256\"", "data", "=", "to_bytes", "(", "data", ")", "salt", "=", "to_bytes", "(", "salt", ")", "if", "callable", "(", "hashfunc", ")", ":", "_test_hash", "=", "hashfunc", "(", ")", "hash_name", "=", "getattr", "(", "_test_hash", ",", "\"name\"", ",", "None", ")", "else", ":", "hash_name", "=", "hashfunc", "return", "hashlib", ".", "pbkdf2_hmac", "(", "hash_name", ",", "data", ",", "salt", ",", "iterations", ",", "keylen", ")" ]
[ 56, 0 ]
[ 86, 73 ]
python
en
['en', 'en', 'en']
True
safe_str_cmp
(a, b)
This function compares strings in somewhat constant time. This requires that the length of at least one string is known in advance. Returns `True` if the two strings are equal, or `False` if they are not. .. versionadded:: 0.7
This function compares strings in somewhat constant time. This requires that the length of at least one string is known in advance.
def safe_str_cmp(a, b): """This function compares strings in somewhat constant time. This requires that the length of at least one string is known in advance. Returns `True` if the two strings are equal, or `False` if they are not. .. versionadded:: 0.7 """ if isinstance(a, text_type): a = a.encode("utf-8") if isinstance(b, text_type): b = b.encode("utf-8") if _builtin_safe_str_cmp is not None: return _builtin_safe_str_cmp(a, b) if len(a) != len(b): return False rv = 0 if PY2: for x, y in izip(a, b): rv |= ord(x) ^ ord(y) else: for x, y in izip(a, b): rv |= x ^ y return rv == 0
[ "def", "safe_str_cmp", "(", "a", ",", "b", ")", ":", "if", "isinstance", "(", "a", ",", "text_type", ")", ":", "a", "=", "a", ".", "encode", "(", "\"utf-8\"", ")", "if", "isinstance", "(", "b", ",", "text_type", ")", ":", "b", "=", "b", ".", "encode", "(", "\"utf-8\"", ")", "if", "_builtin_safe_str_cmp", "is", "not", "None", ":", "return", "_builtin_safe_str_cmp", "(", "a", ",", "b", ")", "if", "len", "(", "a", ")", "!=", "len", "(", "b", ")", ":", "return", "False", "rv", "=", "0", "if", "PY2", ":", "for", "x", ",", "y", "in", "izip", "(", "a", ",", "b", ")", ":", "rv", "|=", "ord", "(", "x", ")", "^", "ord", "(", "y", ")", "else", ":", "for", "x", ",", "y", "in", "izip", "(", "a", ",", "b", ")", ":", "rv", "|=", "x", "^", "y", "return", "rv", "==", "0" ]
[ 89, 0 ]
[ 116, 18 ]
python
en
['en', 'en', 'en']
True
gen_salt
(length)
Generate a random string of SALT_CHARS with specified ``length``.
Generate a random string of SALT_CHARS with specified ``length``.
def gen_salt(length): """Generate a random string of SALT_CHARS with specified ``length``.""" if length <= 0: raise ValueError("Salt length must be positive") return "".join(_sys_rng.choice(SALT_CHARS) for _ in range_type(length))
[ "def", "gen_salt", "(", "length", ")", ":", "if", "length", "<=", "0", ":", "raise", "ValueError", "(", "\"Salt length must be positive\"", ")", "return", "\"\"", ".", "join", "(", "_sys_rng", ".", "choice", "(", "SALT_CHARS", ")", "for", "_", "in", "range_type", "(", "length", ")", ")" ]
[ 119, 0 ]
[ 123, 75 ]
python
en
['en', 'en', 'en']
True
_hash_internal
(method, salt, password)
Internal password hash helper. Supports plaintext without salt, unsalted and salted passwords. In case salted passwords are used hmac is used.
Internal password hash helper. Supports plaintext without salt, unsalted and salted passwords. In case salted passwords are used hmac is used.
def _hash_internal(method, salt, password): """Internal password hash helper. Supports plaintext without salt, unsalted and salted passwords. In case salted passwords are used hmac is used. """ if method == "plain": return password, method if isinstance(password, text_type): password = password.encode("utf-8") if method.startswith("pbkdf2:"): args = method[7:].split(":") if len(args) not in (1, 2): raise ValueError("Invalid number of arguments for PBKDF2") method = args.pop(0) iterations = args and int(args[0] or 0) or DEFAULT_PBKDF2_ITERATIONS is_pbkdf2 = True actual_method = "pbkdf2:%s:%d" % (method, iterations) else: is_pbkdf2 = False actual_method = method if is_pbkdf2: if not salt: raise ValueError("Salt is required for PBKDF2") rv = pbkdf2_hex(password, salt, iterations, hashfunc=method) elif salt: if isinstance(salt, text_type): salt = salt.encode("utf-8") mac = _create_mac(salt, password, method) rv = mac.hexdigest() else: rv = hashlib.new(method, password).hexdigest() return rv, actual_method
[ "def", "_hash_internal", "(", "method", ",", "salt", ",", "password", ")", ":", "if", "method", "==", "\"plain\"", ":", "return", "password", ",", "method", "if", "isinstance", "(", "password", ",", "text_type", ")", ":", "password", "=", "password", ".", "encode", "(", "\"utf-8\"", ")", "if", "method", ".", "startswith", "(", "\"pbkdf2:\"", ")", ":", "args", "=", "method", "[", "7", ":", "]", ".", "split", "(", "\":\"", ")", "if", "len", "(", "args", ")", "not", "in", "(", "1", ",", "2", ")", ":", "raise", "ValueError", "(", "\"Invalid number of arguments for PBKDF2\"", ")", "method", "=", "args", ".", "pop", "(", "0", ")", "iterations", "=", "args", "and", "int", "(", "args", "[", "0", "]", "or", "0", ")", "or", "DEFAULT_PBKDF2_ITERATIONS", "is_pbkdf2", "=", "True", "actual_method", "=", "\"pbkdf2:%s:%d\"", "%", "(", "method", ",", "iterations", ")", "else", ":", "is_pbkdf2", "=", "False", "actual_method", "=", "method", "if", "is_pbkdf2", ":", "if", "not", "salt", ":", "raise", "ValueError", "(", "\"Salt is required for PBKDF2\"", ")", "rv", "=", "pbkdf2_hex", "(", "password", ",", "salt", ",", "iterations", ",", "hashfunc", "=", "method", ")", "elif", "salt", ":", "if", "isinstance", "(", "salt", ",", "text_type", ")", ":", "salt", "=", "salt", ".", "encode", "(", "\"utf-8\"", ")", "mac", "=", "_create_mac", "(", "salt", ",", "password", ",", "method", ")", "rv", "=", "mac", ".", "hexdigest", "(", ")", "else", ":", "rv", "=", "hashlib", ".", "new", "(", "method", ",", "password", ")", ".", "hexdigest", "(", ")", "return", "rv", ",", "actual_method" ]
[ 126, 0 ]
[ 160, 28 ]
python
en
['en', 'lb', 'en']
True
generate_password_hash
(password, method="pbkdf2:sha256", salt_length=8)
Hash a password with the given method and salt with a string of the given length. The format of the string returned includes the method that was used so that :func:`check_password_hash` can check the hash. The format for the hashed string looks like this:: method$salt$hash This method can **not** generate unsalted passwords but it is possible to set param method='plain' in order to enforce plaintext passwords. If a salt is used, hmac is used internally to salt the password. If PBKDF2 is wanted it can be enabled by setting the method to ``pbkdf2:method:iterations`` where iterations is optional:: pbkdf2:sha256:80000$salt$hash pbkdf2:sha256$salt$hash :param password: the password to hash. :param method: the hash method to use (one that hashlib supports). Can optionally be in the format ``pbkdf2:<method>[:iterations]`` to enable PBKDF2. :param salt_length: the length of the salt in letters.
Hash a password with the given method and salt with a string of the given length. The format of the string returned includes the method that was used so that :func:`check_password_hash` can check the hash.
def generate_password_hash(password, method="pbkdf2:sha256", salt_length=8): """Hash a password with the given method and salt with a string of the given length. The format of the string returned includes the method that was used so that :func:`check_password_hash` can check the hash. The format for the hashed string looks like this:: method$salt$hash This method can **not** generate unsalted passwords but it is possible to set param method='plain' in order to enforce plaintext passwords. If a salt is used, hmac is used internally to salt the password. If PBKDF2 is wanted it can be enabled by setting the method to ``pbkdf2:method:iterations`` where iterations is optional:: pbkdf2:sha256:80000$salt$hash pbkdf2:sha256$salt$hash :param password: the password to hash. :param method: the hash method to use (one that hashlib supports). Can optionally be in the format ``pbkdf2:<method>[:iterations]`` to enable PBKDF2. :param salt_length: the length of the salt in letters. """ salt = gen_salt(salt_length) if method != "plain" else "" h, actual_method = _hash_internal(method, salt, password) return "%s$%s$%s" % (actual_method, salt, h)
[ "def", "generate_password_hash", "(", "password", ",", "method", "=", "\"pbkdf2:sha256\"", ",", "salt_length", "=", "8", ")", ":", "salt", "=", "gen_salt", "(", "salt_length", ")", "if", "method", "!=", "\"plain\"", "else", "\"\"", "h", ",", "actual_method", "=", "_hash_internal", "(", "method", ",", "salt", ",", "password", ")", "return", "\"%s$%s$%s\"", "%", "(", "actual_method", ",", "salt", ",", "h", ")" ]
[ 176, 0 ]
[ 203, 48 ]
python
en
['en', 'en', 'en']
True
check_password_hash
(pwhash, password)
check a password against a given salted and hashed password value. In order to support unsalted legacy passwords this method supports plain text passwords, md5 and sha1 hashes (both salted and unsalted). Returns `True` if the password matched, `False` otherwise. :param pwhash: a hashed string like returned by :func:`generate_password_hash`. :param password: the plaintext password to compare against the hash.
check a password against a given salted and hashed password value. In order to support unsalted legacy passwords this method supports plain text passwords, md5 and sha1 hashes (both salted and unsalted).
def check_password_hash(pwhash, password): """check a password against a given salted and hashed password value. In order to support unsalted legacy passwords this method supports plain text passwords, md5 and sha1 hashes (both salted and unsalted). Returns `True` if the password matched, `False` otherwise. :param pwhash: a hashed string like returned by :func:`generate_password_hash`. :param password: the plaintext password to compare against the hash. """ if pwhash.count("$") < 2: return False method, salt, hashval = pwhash.split("$", 2) return safe_str_cmp(_hash_internal(method, salt, password)[0], hashval)
[ "def", "check_password_hash", "(", "pwhash", ",", "password", ")", ":", "if", "pwhash", ".", "count", "(", "\"$\"", ")", "<", "2", ":", "return", "False", "method", ",", "salt", ",", "hashval", "=", "pwhash", ".", "split", "(", "\"$\"", ",", "2", ")", "return", "safe_str_cmp", "(", "_hash_internal", "(", "method", ",", "salt", ",", "password", ")", "[", "0", "]", ",", "hashval", ")" ]
[ 206, 0 ]
[ 220, 75 ]
python
en
['en', 'en', 'en']
True
safe_join
(directory, *pathnames)
Safely join zero or more untrusted path components to a base directory to avoid escaping the base directory. :param directory: The trusted base directory. :param pathnames: The untrusted path components relative to the base directory. :return: A safe path, otherwise ``None``.
Safely join zero or more untrusted path components to a base directory to avoid escaping the base directory.
def safe_join(directory, *pathnames): """Safely join zero or more untrusted path components to a base directory to avoid escaping the base directory. :param directory: The trusted base directory. :param pathnames: The untrusted path components relative to the base directory. :return: A safe path, otherwise ``None``. """ parts = [directory] for filename in pathnames: if filename != "": filename = posixpath.normpath(filename) if ( any(sep in filename for sep in _os_alt_seps) or os.path.isabs(filename) or filename == ".." or filename.startswith("../") ): return None parts.append(filename) return posixpath.join(*parts)
[ "def", "safe_join", "(", "directory", ",", "*", "pathnames", ")", ":", "parts", "=", "[", "directory", "]", "for", "filename", "in", "pathnames", ":", "if", "filename", "!=", "\"\"", ":", "filename", "=", "posixpath", ".", "normpath", "(", "filename", ")", "if", "(", "any", "(", "sep", "in", "filename", "for", "sep", "in", "_os_alt_seps", ")", "or", "os", ".", "path", ".", "isabs", "(", "filename", ")", "or", "filename", "==", "\"..\"", "or", "filename", ".", "startswith", "(", "\"../\"", ")", ")", ":", "return", "None", "parts", ".", "append", "(", "filename", ")", "return", "posixpath", ".", "join", "(", "*", "parts", ")" ]
[ 223, 0 ]
[ 248, 33 ]
python
en
['en', 'en', 'en']
True
StatisticalAnalysis.__init__
(self, sample1, sample2, method='ttest')
Class for statical analyse. This class compares 2 list of sample and generate a statical analyse :param sample1: List of results of a recommender 1 in K folds (list with len K) :type sample1: list :param sample2: List of results of a recommender 2 in K folds (list with len K) :type sample2: list :param method: :type method: str, default 'ttest'
Class for statical analyse. This class compares 2 list of sample and generate a statical analyse
def __init__(self, sample1, sample2, method='ttest'): """ Class for statical analyse. This class compares 2 list of sample and generate a statical analyse :param sample1: List of results of a recommender 1 in K folds (list with len K) :type sample1: list :param sample2: List of results of a recommender 2 in K folds (list with len K) :type sample2: list :param method: :type method: str, default 'ttest' """ self.sample1 = np.array(sample1) self.sample2 = np.array(sample2) self.method = method
[ "def", "__init__", "(", "self", ",", "sample1", ",", "sample2", ",", "method", "=", "'ttest'", ")", ":", "self", ".", "sample1", "=", "np", ".", "array", "(", "sample1", ")", "self", ".", "sample2", "=", "np", ".", "array", "(", "sample2", ")", "self", ".", "method", "=", "method" ]
[ 18, 4 ]
[ 34, 28 ]
python
en
['en', 'error', 'th']
False
StatisticalAnalysis.general_analysis
(self)
Analyzing the difference Instead you might compute the difference and apply some common measure like the sum of absolute differences (SAD), the sum of squared differences (SSD) or the correlation coefficient:
Analyzing the difference
def general_analysis(self): """ Analyzing the difference Instead you might compute the difference and apply some common measure like the sum of absolute differences (SAD), the sum of squared differences (SSD) or the correlation coefficient: """ print("=== Information About Samples ===") print("Standard Deviation Sample1: " + str(np.std(self.sample1))) print("Standard Deviation Sample2: " + str(np.std(self.sample2)) + "\n") print("=== Analyzing the Difference Between Samples ===") print("SAD:" + str(np.sum(np.abs(self.sample1 - self.sample2)))) print("SSD:" + str(np.sum(np.square(self.sample1 - self.sample2)))) print("Correlation:" + str(np.corrcoef(np.array((self.sample1, self.sample2)))[0, 1]) + "\n")
[ "def", "general_analysis", "(", "self", ")", ":", "print", "(", "\"=== Information About Samples ===\"", ")", "print", "(", "\"Standard Deviation Sample1: \"", "+", "str", "(", "np", ".", "std", "(", "self", ".", "sample1", ")", ")", ")", "print", "(", "\"Standard Deviation Sample2: \"", "+", "str", "(", "np", ".", "std", "(", "self", ".", "sample2", ")", ")", "+", "\"\\n\"", ")", "print", "(", "\"=== Analyzing the Difference Between Samples ===\"", ")", "print", "(", "\"SAD:\"", "+", "str", "(", "np", ".", "sum", "(", "np", ".", "abs", "(", "self", ".", "sample1", "-", "self", ".", "sample2", ")", ")", ")", ")", "print", "(", "\"SSD:\"", "+", "str", "(", "np", ".", "sum", "(", "np", ".", "square", "(", "self", ".", "sample1", "-", "self", ".", "sample2", ")", ")", ")", ")", "print", "(", "\"Correlation:\"", "+", "str", "(", "np", ".", "corrcoef", "(", "np", ".", "array", "(", "(", "self", ".", "sample1", ",", "self", ".", "sample2", ")", ")", ")", "[", "0", ",", "1", "]", ")", "+", "\"\\n\"", ")" ]
[ 36, 4 ]
[ 50, 101 ]
python
en
['en', 'error', 'th']
False
StatisticalAnalysis.ttest
(self)
T-student Calculates the T-test for the means of TWO INDEPENDENT samples of scores. This is a two-sided test for the null hypothesis that 2 independent samples have identical average (expected) values This test assumes that the populations have identical variances.
T-student Calculates the T-test for the means of TWO INDEPENDENT samples of scores. This is a two-sided test for the null hypothesis that 2 independent samples have identical average (expected) values This test assumes that the populations have identical variances.
def ttest(self): """ T-student Calculates the T-test for the means of TWO INDEPENDENT samples of scores. This is a two-sided test for the null hypothesis that 2 independent samples have identical average (expected) values This test assumes that the populations have identical variances. """ t, p = ttest_ind(self.sample1, self.sample2) print("=== T- Student Analysis ===") print("The calculated t-statistic: " + str(t)) print("The two-tailed p-value: " + str(p) + "\n")
[ "def", "ttest", "(", "self", ")", ":", "t", ",", "p", "=", "ttest_ind", "(", "self", ".", "sample1", ",", "self", ".", "sample2", ")", "print", "(", "\"=== T- Student Analysis ===\"", ")", "print", "(", "\"The calculated t-statistic: \"", "+", "str", "(", "t", ")", ")", "print", "(", "\"The two-tailed p-value: \"", "+", "str", "(", "p", ")", "+", "\"\\n\"", ")" ]
[ 52, 4 ]
[ 67, 57 ]
python
en
['en', 'error', 'th']
False
StatisticalAnalysis.wilcoxon
(self)
Wilcoxon The Wilcoxon signed-rank test tests the null hypothesis that two related paired samples come from the same distribution. In particular, it tests whether the distribution of the differences x - y is symmetric about zero. It is a non-parametric version of the paired T-test.
Wilcoxon The Wilcoxon signed-rank test tests the null hypothesis that two related paired samples come from the same distribution. In particular, it tests whether the distribution of the differences x - y is symmetric about zero. It is a non-parametric version of the paired T-test.
def wilcoxon(self): """ Wilcoxon The Wilcoxon signed-rank test tests the null hypothesis that two related paired samples come from the same distribution. In particular, it tests whether the distribution of the differences x - y is symmetric about zero. It is a non-parametric version of the paired T-test. """ t, p = ranksums(self.sample1, self.sample2) print("=== Wilcoxon Analysis ===") print("The calculated t-statistic: " + str(t)) print("The two-tailed p-value: " + str(p) + "\n")
[ "def", "wilcoxon", "(", "self", ")", ":", "t", ",", "p", "=", "ranksums", "(", "self", ".", "sample1", ",", "self", ".", "sample2", ")", "print", "(", "\"=== Wilcoxon Analysis ===\"", ")", "print", "(", "\"The calculated t-statistic: \"", "+", "str", "(", "t", ")", ")", "print", "(", "\"The two-tailed p-value: \"", "+", "str", "(", "p", ")", "+", "\"\\n\"", ")" ]
[ 69, 4 ]
[ 81, 57 ]
python
en
['en', 'error', 'th']
False
Engine.get_default
()
Return the first DjangoTemplates backend that's configured, or raise ImproperlyConfigured if none are configured. This is required for preserving historical APIs that rely on a globally available, implicitly configured engine such as: >>> from django.template import Context, Template >>> template = Template("Hello {{ name }}!") >>> context = Context({'name': "world"}) >>> template.render(context) 'Hello world!'
Return the first DjangoTemplates backend that's configured, or raise ImproperlyConfigured if none are configured.
def get_default(): """ Return the first DjangoTemplates backend that's configured, or raise ImproperlyConfigured if none are configured. This is required for preserving historical APIs that rely on a globally available, implicitly configured engine such as: >>> from django.template import Context, Template >>> template = Template("Hello {{ name }}!") >>> context = Context({'name': "world"}) >>> template.render(context) 'Hello world!' """ # Since Engine is imported in django.template and since # DjangoTemplates is a wrapper around this Engine class, # local imports are required to avoid import loops. from django.template import engines from django.template.backends.django import DjangoTemplates for engine in engines.all(): if isinstance(engine, DjangoTemplates): return engine.engine raise ImproperlyConfigured('No DjangoTemplates backend is configured.')
[ "def", "get_default", "(", ")", ":", "# Since Engine is imported in django.template and since", "# DjangoTemplates is a wrapper around this Engine class,", "# local imports are required to avoid import loops.", "from", "django", ".", "template", "import", "engines", "from", "django", ".", "template", ".", "backends", ".", "django", "import", "DjangoTemplates", "for", "engine", "in", "engines", ".", "all", "(", ")", ":", "if", "isinstance", "(", "engine", ",", "DjangoTemplates", ")", ":", "return", "engine", ".", "engine", "raise", "ImproperlyConfigured", "(", "'No DjangoTemplates backend is configured.'", ")" ]
[ 56, 4 ]
[ 78, 79 ]
python
en
['en', 'error', 'th']
False
Engine.from_string
(self, template_code)
Return a compiled Template object for the given template code, handling template inheritance recursively.
Return a compiled Template object for the given template code, handling template inheritance recursively.
def from_string(self, template_code): """ Return a compiled Template object for the given template code, handling template inheritance recursively. """ return Template(template_code, engine=self)
[ "def", "from_string", "(", "self", ",", "template_code", ")", ":", "return", "Template", "(", "template_code", ",", "engine", "=", "self", ")" ]
[ 130, 4 ]
[ 135, 51 ]
python
en
['en', 'error', 'th']
False
Engine.get_template
(self, template_name)
Return a compiled Template object for the given template name, handling template inheritance recursively.
Return a compiled Template object for the given template name, handling template inheritance recursively.
def get_template(self, template_name): """ Return a compiled Template object for the given template name, handling template inheritance recursively. """ template, origin = self.find_template(template_name) if not hasattr(template, 'render'): # template needs to be compiled template = Template(template, origin, template_name, engine=self) return template
[ "def", "get_template", "(", "self", ",", "template_name", ")", ":", "template", ",", "origin", "=", "self", ".", "find_template", "(", "template_name", ")", "if", "not", "hasattr", "(", "template", ",", "'render'", ")", ":", "# template needs to be compiled", "template", "=", "Template", "(", "template", ",", "origin", ",", "template_name", ",", "engine", "=", "self", ")", "return", "template" ]
[ 137, 4 ]
[ 146, 23 ]
python
en
['en', 'error', 'th']
False
Engine.render_to_string
(self, template_name, context=None)
Render the template specified by template_name with the given context. For use in Django's test suite.
Render the template specified by template_name with the given context. For use in Django's test suite.
def render_to_string(self, template_name, context=None): """ Render the template specified by template_name with the given context. For use in Django's test suite. """ if isinstance(template_name, (list, tuple)): t = self.select_template(template_name) else: t = self.get_template(template_name) # Django < 1.8 accepted a Context in `context` even though that's # unintended. Preserve this ability but don't rewrap `context`. if isinstance(context, Context): return t.render(context) else: return t.render(Context(context, autoescape=self.autoescape))
[ "def", "render_to_string", "(", "self", ",", "template_name", ",", "context", "=", "None", ")", ":", "if", "isinstance", "(", "template_name", ",", "(", "list", ",", "tuple", ")", ")", ":", "t", "=", "self", ".", "select_template", "(", "template_name", ")", "else", ":", "t", "=", "self", ".", "get_template", "(", "template_name", ")", "# Django < 1.8 accepted a Context in `context` even though that's", "# unintended. Preserve this ability but don't rewrap `context`.", "if", "isinstance", "(", "context", ",", "Context", ")", ":", "return", "t", ".", "render", "(", "context", ")", "else", ":", "return", "t", ".", "render", "(", "Context", "(", "context", ",", "autoescape", "=", "self", ".", "autoescape", ")", ")" ]
[ 148, 4 ]
[ 162, 73 ]
python
en
['en', 'error', 'th']
False
Engine.select_template
(self, template_name_list)
Given a list of template names, return the first that can be loaded.
Given a list of template names, return the first that can be loaded.
def select_template(self, template_name_list): """ Given a list of template names, return the first that can be loaded. """ if not template_name_list: raise TemplateDoesNotExist("No template names provided") not_found = [] for template_name in template_name_list: try: return self.get_template(template_name) except TemplateDoesNotExist as exc: if exc.args[0] not in not_found: not_found.append(exc.args[0]) continue # If we get here, none of the templates could be loaded raise TemplateDoesNotExist(', '.join(not_found))
[ "def", "select_template", "(", "self", ",", "template_name_list", ")", ":", "if", "not", "template_name_list", ":", "raise", "TemplateDoesNotExist", "(", "\"No template names provided\"", ")", "not_found", "=", "[", "]", "for", "template_name", "in", "template_name_list", ":", "try", ":", "return", "self", ".", "get_template", "(", "template_name", ")", "except", "TemplateDoesNotExist", "as", "exc", ":", "if", "exc", ".", "args", "[", "0", "]", "not", "in", "not_found", ":", "not_found", ".", "append", "(", "exc", ".", "args", "[", "0", "]", ")", "continue", "# If we get here, none of the templates could be loaded", "raise", "TemplateDoesNotExist", "(", "', '", ".", "join", "(", "not_found", ")", ")" ]
[ 164, 4 ]
[ 179, 56 ]
python
en
['en', 'error', 'th']
False
print_error
(input_str)
Print given text in red color for Error text :param input_str:
Print given text in red color for Error text :param input_str:
def print_error(input_str): ''' Print given text in red color for Error text :param input_str: ''' print("\033[1;31;40m" + input_str + "\033[0m")
[ "def", "print_error", "(", "input_str", ")", ":", "print", "(", "\"\\033[1;31;40m\"", "+", "input_str", "+", "\"\\033[0m\"", ")" ]
[ 51, 0 ]
[ 56, 50 ]
python
en
['en', 'error', 'th']
False
print_ok
(input_str)
Print given text in green color for Ok text :param input_str:
Print given text in green color for Ok text :param input_str:
def print_ok(input_str): ''' Print given text in green color for Ok text :param input_str: ''' print("\033[1;32;40m" + input_str + "\033[0m")
[ "def", "print_ok", "(", "input_str", ")", ":", "print", "(", "\"\\033[1;32;40m\"", "+", "input_str", "+", "\"\\033[0m\"", ")" ]
[ 59, 0 ]
[ 64, 50 ]
python
en
['en', 'error', 'th']
False
print_warning
(input_str)
Print given text in yellow color for warning text :param input_str:
Print given text in yellow color for warning text :param input_str:
def print_warning(input_str): ''' Print given text in yellow color for warning text :param input_str: ''' print("\033[1;33;40m" + input_str + "\033[0m")
[ "def", "print_warning", "(", "input_str", ")", ":", "print", "(", "\"\\033[1;33;40m\"", "+", "input_str", "+", "\"\\033[0m\"", ")" ]
[ 67, 0 ]
[ 72, 50 ]
python
en
['en', 'error', 'th']
False
print_notice
(input_str)
Print given text in white background :param input_str:
Print given text in white background :param input_str:
def print_notice(input_str): ''' Print given text in white background :param input_str: ''' print("\033[0;30;47m" + input_str + "\033[0m")
[ "def", "print_notice", "(", "input_str", ")", ":", "print", "(", "\"\\033[0;30;47m\"", "+", "input_str", "+", "\"\\033[0m\"", ")" ]
[ 75, 0 ]
[ 80, 50 ]
python
en
['en', 'error', 'th']
False
print_command_response
(input_str)
Print given text in green color for Ok text :param input_str:
Print given text in green color for Ok text :param input_str:
def print_command_response(input_str): ''' Print given text in green color for Ok text :param input_str: ''' print("\033[1;34;40m" + input_str + "\033[0m")
[ "def", "print_command_response", "(", "input_str", ")", ":", "print", "(", "\"\\033[1;34;40m\"", "+", "input_str", "+", "\"\\033[0m\"", ")" ]
[ 83, 0 ]
[ 88, 50 ]
python
en
['en', 'error', 'th']
False
download_omsagent
()
Download omsagent this downloaded file would be installed :return: True if downloaded successfully
Download omsagent this downloaded file would be installed :return: True if downloaded successfully
def download_omsagent(): ''' Download omsagent this downloaded file would be installed :return: True if downloaded successfully ''' print("Trying to download the omsagent.") print_notice("wget " + oms_agent_url) download_command = subprocess.Popen(["wget", oms_agent_url], stdout=subprocess.PIPE) o, e = download_command.communicate() time.sleep(3) if e is not None: handle_error(e, error_response_str="Error: could not download omsagent.") return False print_ok("Downloaded omsagent successfully.") return True
[ "def", "download_omsagent", "(", ")", ":", "print", "(", "\"Trying to download the omsagent.\"", ")", "print_notice", "(", "\"wget \"", "+", "oms_agent_url", ")", "download_command", "=", "subprocess", ".", "Popen", "(", "[", "\"wget\"", ",", "oms_agent_url", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "o", ",", "e", "=", "download_command", ".", "communicate", "(", ")", "time", ".", "sleep", "(", "3", ")", "if", "e", "is", "not", "None", ":", "handle_error", "(", "e", ",", "error_response_str", "=", "\"Error: could not download omsagent.\"", ")", "return", "False", "print_ok", "(", "\"Downloaded omsagent successfully.\"", ")", "return", "True" ]
[ 91, 0 ]
[ 105, 15 ]
python
en
['en', 'error', 'th']
False
install_omsagent
(workspace_id, primary_key, oms_agent_install_url)
Installing the downloaded omsagent :param workspace_id: :param primary_key: :return:
Installing the downloaded omsagent :param workspace_id: :param primary_key: :return:
def install_omsagent(workspace_id, primary_key, oms_agent_install_url): ''' Installing the downloaded omsagent :param workspace_id: :param primary_key: :return: ''' print("Installing omsagent") command_tokens = ["sh", omsagent_file_name, "-w", workspace_id, "-s", primary_key, "-d", oms_agent_install_url] print_notice(" ".join(command_tokens)) install_omsagent_command = subprocess.Popen(command_tokens, stdout=subprocess.PIPE) o, e = install_omsagent_command.communicate() time.sleep(3) if e is not None: handle_error(e, error_response_str="Error: could not install omsagent.") return False print_ok("Installed omsagent successfully.") return True
[ "def", "install_omsagent", "(", "workspace_id", ",", "primary_key", ",", "oms_agent_install_url", ")", ":", "print", "(", "\"Installing omsagent\"", ")", "command_tokens", "=", "[", "\"sh\"", ",", "omsagent_file_name", ",", "\"-w\"", ",", "workspace_id", ",", "\"-s\"", ",", "primary_key", ",", "\"-d\"", ",", "oms_agent_install_url", "]", "print_notice", "(", "\" \"", ".", "join", "(", "command_tokens", ")", ")", "install_omsagent_command", "=", "subprocess", ".", "Popen", "(", "command_tokens", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "o", ",", "e", "=", "install_omsagent_command", ".", "communicate", "(", ")", "time", ".", "sleep", "(", "3", ")", "if", "e", "is", "not", "None", ":", "handle_error", "(", "e", ",", "error_response_str", "=", "\"Error: could not install omsagent.\"", ")", "return", "False", "print_ok", "(", "\"Installed omsagent successfully.\"", ")", "return", "True" ]
[ 115, 0 ]
[ 132, 15 ]
python
en
['en', 'error', 'th']
False
process_check
(process_name)
function who check using the ps -ef command if the 'process_name' is running :param process_name: :return: True if the process is running else False
function who check using the ps -ef command if the 'process_name' is running :param process_name: :return: True if the process is running else False
def process_check(process_name): ''' function who check using the ps -ef command if the 'process_name' is running :param process_name: :return: True if the process is running else False ''' p1 = subprocess.Popen(["ps", "-ef"], stdout=subprocess.PIPE) p2 = subprocess.Popen(["grep", "-i", process_name], stdin=p1.stdout, stdout=subprocess.PIPE) o, e = p2.communicate() tokens = o.decode(encoding='UTF-8').split('\n') tokens.remove('') return len(tokens)
[ "def", "process_check", "(", "process_name", ")", ":", "p1", "=", "subprocess", ".", "Popen", "(", "[", "\"ps\"", ",", "\"-ef\"", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "p2", "=", "subprocess", ".", "Popen", "(", "[", "\"grep\"", ",", "\"-i\"", ",", "process_name", "]", ",", "stdin", "=", "p1", ".", "stdout", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "o", ",", "e", "=", "p2", ".", "communicate", "(", ")", "tokens", "=", "o", ".", "decode", "(", "encoding", "=", "'UTF-8'", ")", ".", "split", "(", "'\\n'", ")", "tokens", ".", "remove", "(", "''", ")", "return", "len", "(", "tokens", ")" ]
[ 135, 0 ]
[ 146, 22 ]
python
en
['en', 'error', 'th']
False
create_daemon_forwarding_configuration
(omsagent_incoming_port, daemon_configuration_path, daemon_name)
Create the daemon configuration to forward messages over TCP to the oms agent :param omsagent_incoming_port: port for communication between the omsagent the the daemon :param daemon_configuration_path: path of the configuration file :param daemon_name: name of the daemon :return:
Create the daemon configuration to forward messages over TCP to the oms agent :param omsagent_incoming_port: port for communication between the omsagent the the daemon :param daemon_configuration_path: path of the configuration file :param daemon_name: name of the daemon :return:
def create_daemon_forwarding_configuration(omsagent_incoming_port, daemon_configuration_path, daemon_name): ''' Create the daemon configuration to forward messages over TCP to the oms agent :param omsagent_incoming_port: port for communication between the omsagent the the daemon :param daemon_configuration_path: path of the configuration file :param daemon_name: name of the daemon :return: ''' print("Creating " + daemon_name + " daemon configuration.") print("Configuration is changed to forward daemon incoming syslog messages into the omsagent.") print("Every command containing \'CEF\' string will be forwarded.") print("Path:") print_notice(daemon_configuration_path) file_content = get_daemon_configuration_content(daemon_name, omsagent_incoming_port) append_content_to_file(file_content, daemon_configuration_path, overide=True) print_ok("Configuration for " + daemon_name + " daemon was changed successfully.") return True
[ "def", "create_daemon_forwarding_configuration", "(", "omsagent_incoming_port", ",", "daemon_configuration_path", ",", "daemon_name", ")", ":", "print", "(", "\"Creating \"", "+", "daemon_name", "+", "\" daemon configuration.\"", ")", "print", "(", "\"Configuration is changed to forward daemon incoming syslog messages into the omsagent.\"", ")", "print", "(", "\"Every command containing \\'CEF\\' string will be forwarded.\"", ")", "print", "(", "\"Path:\"", ")", "print_notice", "(", "daemon_configuration_path", ")", "file_content", "=", "get_daemon_configuration_content", "(", "daemon_name", ",", "omsagent_incoming_port", ")", "append_content_to_file", "(", "file_content", ",", "daemon_configuration_path", ",", "overide", "=", "True", ")", "print_ok", "(", "\"Configuration for \"", "+", "daemon_name", "+", "\" daemon was changed successfully.\"", ")", "return", "True" ]
[ 149, 0 ]
[ 166, 15 ]
python
en
['en', 'error', 'th']
False
set_omsagent_configuration
(workspace_id, omsagent_incoming_port)
Download the omsagent configuration and then change the omsagent incoming port if required and change the protocol if required :param workspace_id: :param omsagent_incoming_port: :param tcp: :param udp: :return:
Download the omsagent configuration and then change the omsagent incoming port if required and change the protocol if required :param workspace_id: :param omsagent_incoming_port: :param tcp: :param udp: :return:
def set_omsagent_configuration(workspace_id, omsagent_incoming_port): ''' Download the omsagent configuration and then change the omsagent incoming port if required and change the protocol if required :param workspace_id: :param omsagent_incoming_port: :param tcp: :param udp: :return: ''' configuration_path = "/etc/opt/microsoft/omsagent/" + workspace_id + "/conf/omsagent.d/security_events.conf" print("Creating omsagent configuration to listen to syslog daemon forwarding port - " + omsagent_incoming_port) print("Configuration location is - " + configuration_path) command_tokens = ["sudo", "wget", "-O", configuration_path, oms_agent_configuration_url] print("Download configuration into the correct directory") print_notice(" ".join(command_tokens)) time.sleep(3) set_omsagent_configuration_command = subprocess.Popen(command_tokens, stdout=subprocess.PIPE) o, e = set_omsagent_configuration_command.communicate() if e is not None: handle_error(e, error_response_str="Error: could not download omsagent configuration.") return False print_ok("Configuration for omsagent downloaded successfully.") print("Trying to changed omsagent configuration") if omsagent_incoming_port is not omsagent_default_incoming_port: if change_omsagent_configuration_port(omsagent_incoming_port=omsagent_incoming_port, configuration_path=configuration_path): print_ok("Incoming port for omsagent was changed to " + omsagent_incoming_port) else: print_error("Could not change omsagent incoming port") if change_omsagent_protocol(configuration_path=configuration_path): print_ok("Finished changing omsagent configuration") return True else: print_error("Could not change the omsagent configuration") return False
[ "def", "set_omsagent_configuration", "(", "workspace_id", ",", "omsagent_incoming_port", ")", ":", "configuration_path", "=", "\"/etc/opt/microsoft/omsagent/\"", "+", "workspace_id", "+", "\"/conf/omsagent.d/security_events.conf\"", "print", "(", "\"Creating omsagent configuration to listen to syslog daemon forwarding port - \"", "+", "omsagent_incoming_port", ")", "print", "(", "\"Configuration location is - \"", "+", "configuration_path", ")", "command_tokens", "=", "[", "\"sudo\"", ",", "\"wget\"", ",", "\"-O\"", ",", "configuration_path", ",", "oms_agent_configuration_url", "]", "print", "(", "\"Download configuration into the correct directory\"", ")", "print_notice", "(", "\" \"", ".", "join", "(", "command_tokens", ")", ")", "time", ".", "sleep", "(", "3", ")", "set_omsagent_configuration_command", "=", "subprocess", ".", "Popen", "(", "command_tokens", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "o", ",", "e", "=", "set_omsagent_configuration_command", ".", "communicate", "(", ")", "if", "e", "is", "not", "None", ":", "handle_error", "(", "e", ",", "error_response_str", "=", "\"Error: could not download omsagent configuration.\"", ")", "return", "False", "print_ok", "(", "\"Configuration for omsagent downloaded successfully.\"", ")", "print", "(", "\"Trying to changed omsagent configuration\"", ")", "if", "omsagent_incoming_port", "is", "not", "omsagent_default_incoming_port", ":", "if", "change_omsagent_configuration_port", "(", "omsagent_incoming_port", "=", "omsagent_incoming_port", ",", "configuration_path", "=", "configuration_path", ")", ":", "print_ok", "(", "\"Incoming port for omsagent was changed to \"", "+", "omsagent_incoming_port", ")", "else", ":", "print_error", "(", "\"Could not change omsagent incoming port\"", ")", "if", "change_omsagent_protocol", "(", "configuration_path", "=", "configuration_path", ")", ":", "print_ok", "(", "\"Finished changing omsagent configuration\"", ")", "return", "True", "else", ":", "print_error", "(", "\"Could not change the omsagent configuration\"", ")", "return", "False" ]
[ 169, 0 ]
[ 203, 20 ]
python
en
['en', 'error', 'th']
False
set_rsyslog_configuration
()
Set the configuration for rsyslog we support from version 7 and above :return:
Set the configuration for rsyslog we support from version 7 and above :return:
def set_rsyslog_configuration(): ''' Set the configuration for rsyslog we support from version 7 and above :return: ''' if is_rsyslog_new_configuration(): set_rsyslog_new_configuration() else: set_rsyslog_old_configuration()
[ "def", "set_rsyslog_configuration", "(", ")", ":", "if", "is_rsyslog_new_configuration", "(", ")", ":", "set_rsyslog_new_configuration", "(", ")", "else", ":", "set_rsyslog_old_configuration", "(", ")" ]
[ 263, 0 ]
[ 272, 39 ]
python
en
['en', 'error', 'th']
False
change_omsagent_protocol
(configuration_path)
Changing the omsagent protocol, since the protocol type is set on the omsagent configuration file :param configuration_path:
Changing the omsagent protocol, since the protocol type is set on the omsagent configuration file :param configuration_path:
def change_omsagent_protocol(configuration_path): ''' Changing the omsagent protocol, since the protocol type is set on the omsagent configuration file :param configuration_path: ''' try: # if opening this file failed the installation of the oms-agent has failed fin = open(configuration_path, "rt") with open("tmp.txt", "wt") as fout: for line in fin: if "protocol_type" in line and "udp" in line: fout.write(line.replace("udp", "tcp")) print_notice("Changing protocol type from udp to tcp in "+configuration_path) print("Line changed: " + line) else: fout.write(line) except IOError: print_error("Oms-agent installation has failed please remove oms-agent and try again.") return False command_tokens = ["sudo", "mv", "tmp.txt", configuration_path] write_new_content = subprocess.Popen(command_tokens, stdout=subprocess.PIPE) time.sleep(3) o, e = write_new_content.communicate() if e is not None: handle_error(e, error_response_str="Error: could not change omsagent configuration port in ." + configuration_path) return False print_ok("Omsagent configuration was changed to fit required protocol - " + configuration_path) return True
[ "def", "change_omsagent_protocol", "(", "configuration_path", ")", ":", "try", ":", "# if opening this file failed the installation of the oms-agent has failed", "fin", "=", "open", "(", "configuration_path", ",", "\"rt\"", ")", "with", "open", "(", "\"tmp.txt\"", ",", "\"wt\"", ")", "as", "fout", ":", "for", "line", "in", "fin", ":", "if", "\"protocol_type\"", "in", "line", "and", "\"udp\"", "in", "line", ":", "fout", ".", "write", "(", "line", ".", "replace", "(", "\"udp\"", ",", "\"tcp\"", ")", ")", "print_notice", "(", "\"Changing protocol type from udp to tcp in \"", "+", "configuration_path", ")", "print", "(", "\"Line changed: \"", "+", "line", ")", "else", ":", "fout", ".", "write", "(", "line", ")", "except", "IOError", ":", "print_error", "(", "\"Oms-agent installation has failed please remove oms-agent and try again.\"", ")", "return", "False", "command_tokens", "=", "[", "\"sudo\"", ",", "\"mv\"", ",", "\"tmp.txt\"", ",", "configuration_path", "]", "write_new_content", "=", "subprocess", ".", "Popen", "(", "command_tokens", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "time", ".", "sleep", "(", "3", ")", "o", ",", "e", "=", "write_new_content", ".", "communicate", "(", ")", "if", "e", "is", "not", "None", ":", "handle_error", "(", "e", ",", "error_response_str", "=", "\"Error: could not change omsagent configuration port in .\"", "+", "configuration_path", ")", "return", "False", "print_ok", "(", "\"Omsagent configuration was changed to fit required protocol - \"", "+", "configuration_path", ")", "return", "True" ]
[ 275, 0 ]
[ 303, 15 ]
python
en
['en', 'error', 'th']
False
change_omsagent_configuration_port
(omsagent_incoming_port, configuration_path)
Changing the omsagent configuration port if required :param omsagent_incoming_port: :param configuration_path:
Changing the omsagent configuration port if required :param omsagent_incoming_port: :param configuration_path:
def change_omsagent_configuration_port(omsagent_incoming_port, configuration_path): ''' Changing the omsagent configuration port if required :param omsagent_incoming_port: :param configuration_path: ''' with open(configuration_path, "rt") as fin: with open("tmp.txt", "wt") as fout: for line in fin: fout.write(line.replace(omsagent_default_incoming_port, omsagent_incoming_port)) command_tokens = ["sudo", "mv", "tmp.txt", configuration_path] write_new_content = subprocess.Popen(command_tokens, stdout=subprocess.PIPE) time.sleep(3) o, e = write_new_content.communicate() if e is not None: handle_error(e, error_response_str="Error: could not change omsagent configuration port in ." + configuration_path) return False print_ok("Omsagent incoming port was changed in configuration - " + configuration_path) return True
[ "def", "change_omsagent_configuration_port", "(", "omsagent_incoming_port", ",", "configuration_path", ")", ":", "with", "open", "(", "configuration_path", ",", "\"rt\"", ")", "as", "fin", ":", "with", "open", "(", "\"tmp.txt\"", ",", "\"wt\"", ")", "as", "fout", ":", "for", "line", "in", "fin", ":", "fout", ".", "write", "(", "line", ".", "replace", "(", "omsagent_default_incoming_port", ",", "omsagent_incoming_port", ")", ")", "command_tokens", "=", "[", "\"sudo\"", ",", "\"mv\"", ",", "\"tmp.txt\"", ",", "configuration_path", "]", "write_new_content", "=", "subprocess", ".", "Popen", "(", "command_tokens", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "time", ".", "sleep", "(", "3", ")", "o", ",", "e", "=", "write_new_content", ".", "communicate", "(", ")", "if", "e", "is", "not", "None", ":", "handle_error", "(", "e", ",", "error_response_str", "=", "\"Error: could not change omsagent configuration port in .\"", "+", "configuration_path", ")", "return", "False", "print_ok", "(", "\"Omsagent incoming port was changed in configuration - \"", "+", "configuration_path", ")", "return", "True" ]
[ 306, 0 ]
[ 324, 15 ]
python
en
['en', 'error', 'th']
False
restart_rsyslog
()
Restart the Rsyslog daemon
Restart the Rsyslog daemon
def restart_rsyslog(): ''' Restart the Rsyslog daemon ''' print("Restarting rsyslog daemon.") command_tokens = ["sudo", "service", "rsyslog", "restart"] print_notice(" ".join(command_tokens)) restart_rsyslog_command = subprocess.Popen(command_tokens, stdout=subprocess.PIPE) time.sleep(3) o, e = restart_rsyslog_command.communicate() if e is not None: handle_error(e, error_response_str="Could not restart rsyslog daemon") return False print_ok("Rsyslog daemon restarted successfully") return True
[ "def", "restart_rsyslog", "(", ")", ":", "print", "(", "\"Restarting rsyslog daemon.\"", ")", "command_tokens", "=", "[", "\"sudo\"", ",", "\"service\"", ",", "\"rsyslog\"", ",", "\"restart\"", "]", "print_notice", "(", "\" \"", ".", "join", "(", "command_tokens", ")", ")", "restart_rsyslog_command", "=", "subprocess", ".", "Popen", "(", "command_tokens", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "time", ".", "sleep", "(", "3", ")", "o", ",", "e", "=", "restart_rsyslog_command", ".", "communicate", "(", ")", "if", "e", "is", "not", "None", ":", "handle_error", "(", "e", ",", "error_response_str", "=", "\"Could not restart rsyslog daemon\"", ")", "return", "False", "print_ok", "(", "\"Rsyslog daemon restarted successfully\"", ")", "return", "True" ]
[ 327, 0 ]
[ 341, 15 ]
python
en
['en', 'error', 'th']
False
restart_syslog_ng
()
Restart the syslog-ng daemon
Restart the syslog-ng daemon
def restart_syslog_ng(): ''' Restart the syslog-ng daemon ''' print("Restarting syslog-ng daemon.") command_tokens = ["sudo", "service", "syslog-ng", "restart"] print_notice(" ".join(command_tokens)) restart_rsyslog_command = subprocess.Popen(command_tokens, stdout=subprocess.PIPE) time.sleep(3) o, e = restart_rsyslog_command.communicate() if e is not None: handle_error(e, error_response_str="Could not restart syslog-ng daemon") return False print_ok("Syslog-ng daemon restarted successfully") return True
[ "def", "restart_syslog_ng", "(", ")", ":", "print", "(", "\"Restarting syslog-ng daemon.\"", ")", "command_tokens", "=", "[", "\"sudo\"", ",", "\"service\"", ",", "\"syslog-ng\"", ",", "\"restart\"", "]", "print_notice", "(", "\" \"", ".", "join", "(", "command_tokens", ")", ")", "restart_rsyslog_command", "=", "subprocess", ".", "Popen", "(", "command_tokens", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "time", ".", "sleep", "(", "3", ")", "o", ",", "e", "=", "restart_rsyslog_command", ".", "communicate", "(", ")", "if", "e", "is", "not", "None", ":", "handle_error", "(", "e", ",", "error_response_str", "=", "\"Could not restart syslog-ng daemon\"", ")", "return", "False", "print_ok", "(", "\"Syslog-ng daemon restarted successfully\"", ")", "return", "True" ]
[ 344, 0 ]
[ 358, 15 ]
python
en
['en', 'error', 'th']
False
restart_omsagent
(workspace_id)
Restart the omsagent :param workspace_id:
Restart the omsagent :param workspace_id:
def restart_omsagent(workspace_id): ''' Restart the omsagent :param workspace_id: ''' print("Trying to restart omsagent") command_tokens = ["sudo", "/opt/microsoft/omsagent/bin/service_control", "restart", workspace_id] print_notice(" ".join(command_tokens)) restart_omsagent_command = subprocess.Popen(command_tokens, stdout=subprocess.PIPE) time.sleep(3) o, e = restart_omsagent_command.communicate() if e is not None: handle_error(e, error_response_str="Error: could not restart omsagent") return False print_ok("Omsagent restarted successfully") return True
[ "def", "restart_omsagent", "(", "workspace_id", ")", ":", "print", "(", "\"Trying to restart omsagent\"", ")", "command_tokens", "=", "[", "\"sudo\"", ",", "\"/opt/microsoft/omsagent/bin/service_control\"", ",", "\"restart\"", ",", "workspace_id", "]", "print_notice", "(", "\" \"", ".", "join", "(", "command_tokens", ")", ")", "restart_omsagent_command", "=", "subprocess", ".", "Popen", "(", "command_tokens", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "time", ".", "sleep", "(", "3", ")", "o", ",", "e", "=", "restart_omsagent_command", ".", "communicate", "(", ")", "if", "e", "is", "not", "None", ":", "handle_error", "(", "e", ",", "error_response_str", "=", "\"Error: could not restart omsagent\"", ")", "return", "False", "print_ok", "(", "\"Omsagent restarted successfully\"", ")", "return", "True" ]
[ 361, 0 ]
[ 376, 15 ]
python
en
['en', 'error', 'th']
False
get_daemon_configuration_content
(daemon_name, omsagent_incoming_port)
Return the correct configuration according to the daemon name :param daemon_name: :param omsagent_incoming_port: :return:
Return the correct configuration according to the daemon name :param daemon_name: :param omsagent_incoming_port: :return:
def get_daemon_configuration_content(daemon_name, omsagent_incoming_port): ''' Return the correct configuration according to the daemon name :param daemon_name: :param omsagent_incoming_port: :return: ''' if daemon_name is rsyslog_daemon_name: return get_rsyslog_daemon_configuration_content(omsagent_incoming_port) elif daemon_name is syslog_ng_daemon_name: return get_syslog_ng_damon_configuration_content(omsagent_incoming_port) else: print_error("Could not create daemon configuration.") return False
[ "def", "get_daemon_configuration_content", "(", "daemon_name", ",", "omsagent_incoming_port", ")", ":", "if", "daemon_name", "is", "rsyslog_daemon_name", ":", "return", "get_rsyslog_daemon_configuration_content", "(", "omsagent_incoming_port", ")", "elif", "daemon_name", "is", "syslog_ng_daemon_name", ":", "return", "get_syslog_ng_damon_configuration_content", "(", "omsagent_incoming_port", ")", "else", ":", "print_error", "(", "\"Could not create daemon configuration.\"", ")", "return", "False" ]
[ 379, 0 ]
[ 392, 20 ]
python
en
['en', 'error', 'th']
False
get_rsyslog_daemon_configuration_content
(omsagent_incoming_port)
Rsyslog accept every message containing CEF or ASA(for Cisco ASA
Rsyslog accept every message containing CEF or ASA(for Cisco ASA
def get_rsyslog_daemon_configuration_content(omsagent_incoming_port): '''Rsyslog accept every message containing CEF or ASA(for Cisco ASA''' rsyslog_daemon_configuration_content = ":rawmsg, regex, \"CEF\"|\"ASA\" \n*.* @@127.0.0.1:"+ omsagent_incoming_port print("Rsyslog daemon configuration content:") content = rsyslog_daemon_configuration_content print_command_response(content) return content
[ "def", "get_rsyslog_daemon_configuration_content", "(", "omsagent_incoming_port", ")", ":", "rsyslog_daemon_configuration_content", "=", "\":rawmsg, regex, \\\"CEF\\\"|\\\"ASA\\\" \\n*.* @@127.0.0.1:\"", "+", "omsagent_incoming_port", "print", "(", "\"Rsyslog daemon configuration content:\"", ")", "content", "=", "rsyslog_daemon_configuration_content", "print_command_response", "(", "content", ")", "return", "content" ]
[ 395, 0 ]
[ 401, 18 ]
python
en
['en', 'gl', 'en']
True
is_rsyslog
()
Returns True if the daemon is 'Rsyslog'
Returns True if the daemon is 'Rsyslog'
def is_rsyslog(): ''' Returns True if the daemon is 'Rsyslog' ''' # Meaning ps -ef | grep "daemon name" has returned more then the grep result return process_check(rsyslog_daemon_name) > 1
[ "def", "is_rsyslog", "(", ")", ":", "# Meaning ps -ef | grep \"daemon name\" has returned more then the grep result", "return", "process_check", "(", "rsyslog_daemon_name", ")", ">", "1" ]
[ 415, 0 ]
[ 420, 49 ]
python
en
['en', 'error', 'th']
False
is_syslog_ng
()
Returns True if the daemon is 'Syslogng'
Returns True if the daemon is 'Syslogng'
def is_syslog_ng(): ''' Returns True if the daemon is 'Syslogng' ''' # Meaning ps -ef | grep "daemon name" has returned more then the grep result return process_check(syslog_ng_daemon_name) > 1
[ "def", "is_syslog_ng", "(", ")", ":", "# Meaning ps -ef | grep \"daemon name\" has returned more then the grep result", "return", "process_check", "(", "syslog_ng_daemon_name", ")", ">", "1" ]
[ 423, 0 ]
[ 428, 51 ]
python
en
['en', 'error', 'th']
False
set_syslog_ng_configuration
()
syslog ng have a default configuration which enables the incoming ports and define the source pipe to the daemon this will verify it is configured correctly :return:
syslog ng have a default configuration which enables the incoming ports and define the source pipe to the daemon this will verify it is configured correctly :return:
def set_syslog_ng_configuration(): ''' syslog ng have a default configuration which enables the incoming ports and define the source pipe to the daemon this will verify it is configured correctly :return: ''' comment_line = False snet_found = False with open(syslog_ng_conf_path, "rt") as fin: with open("tmp.txt", "wt") as fout: for line in fin: # fount snet if "s_net" in line and not "#": snet_found = True # found source that is not s_net - should remove it elif "source" in line and "#" not in line and "s_net" not in line and "log" not in line: comment_line = True # if starting a new definition stop commenting elif comment_line is True and "#" not in line and ("source" in line or "destination" in line or "filter" in line or "log" in line): # stop commenting out comment_line = False # write line correctly fout.write(line if not comment_line else ("#" + line)) command_tokens = ["sudo", "mv", "tmp.txt", syslog_ng_conf_path] write_new_content = subprocess.Popen(command_tokens, stdout=subprocess.PIPE) time.sleep(3) o, e = write_new_content.communicate() if e is not None: handle_error(e, error_response_str="Error: could not change Rsyslog.conf configuration in -" + syslog_ng_conf_path) return False if not snet_found: append_content_to_file(line=syslog_ng_source_content, file_path=syslog_ng_conf_path) print_ok("Rsyslog.conf configuration was changed to fit required protocol - " + syslog_ng_conf_path) return True
[ "def", "set_syslog_ng_configuration", "(", ")", ":", "comment_line", "=", "False", "snet_found", "=", "False", "with", "open", "(", "syslog_ng_conf_path", ",", "\"rt\"", ")", "as", "fin", ":", "with", "open", "(", "\"tmp.txt\"", ",", "\"wt\"", ")", "as", "fout", ":", "for", "line", "in", "fin", ":", "# fount snet", "if", "\"s_net\"", "in", "line", "and", "not", "\"#\"", ":", "snet_found", "=", "True", "# found source that is not s_net - should remove it", "elif", "\"source\"", "in", "line", "and", "\"#\"", "not", "in", "line", "and", "\"s_net\"", "not", "in", "line", "and", "\"log\"", "not", "in", "line", ":", "comment_line", "=", "True", "# if starting a new definition stop commenting", "elif", "comment_line", "is", "True", "and", "\"#\"", "not", "in", "line", "and", "(", "\"source\"", "in", "line", "or", "\"destination\"", "in", "line", "or", "\"filter\"", "in", "line", "or", "\"log\"", "in", "line", ")", ":", "# stop commenting out", "comment_line", "=", "False", "# write line correctly", "fout", ".", "write", "(", "line", "if", "not", "comment_line", "else", "(", "\"#\"", "+", "line", ")", ")", "command_tokens", "=", "[", "\"sudo\"", ",", "\"mv\"", ",", "\"tmp.txt\"", ",", "syslog_ng_conf_path", "]", "write_new_content", "=", "subprocess", ".", "Popen", "(", "command_tokens", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "time", ".", "sleep", "(", "3", ")", "o", ",", "e", "=", "write_new_content", ".", "communicate", "(", ")", "if", "e", "is", "not", "None", ":", "handle_error", "(", "e", ",", "error_response_str", "=", "\"Error: could not change Rsyslog.conf configuration in -\"", "+", "syslog_ng_conf_path", ")", "return", "False", "if", "not", "snet_found", ":", "append_content_to_file", "(", "line", "=", "syslog_ng_source_content", ",", "file_path", "=", "syslog_ng_conf_path", ")", "print_ok", "(", "\"Rsyslog.conf configuration was changed to fit required protocol - \"", "+", "syslog_ng_conf_path", ")", "return", "True" ]
[ 431, 0 ]
[ 464, 15 ]
python
en
['en', 'error', 'th']
False
async_unsafe
(message)
Decorator to mark functions as async-unsafe. Someone trying to access the function while in an async context will get an error message.
Decorator to mark functions as async-unsafe. Someone trying to access the function while in an async context will get an error message.
def async_unsafe(message): """ Decorator to mark functions as async-unsafe. Someone trying to access the function while in an async context will get an error message. """ def decorator(func): @functools.wraps(func) def inner(*args, **kwargs): if not os.environ.get('DJANGO_ALLOW_ASYNC_UNSAFE'): # Detect a running event loop in this thread. try: event_loop = asyncio.get_event_loop() except RuntimeError: pass else: if event_loop.is_running(): raise SynchronousOnlyOperation(message) # Pass onwards. return func(*args, **kwargs) return inner # If the message is actually a function, then be a no-arguments decorator. if callable(message): func = message message = 'You cannot call this from an async context - use a thread or sync_to_async.' return decorator(func) else: return decorator
[ "def", "async_unsafe", "(", "message", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "os", ".", "environ", ".", "get", "(", "'DJANGO_ALLOW_ASYNC_UNSAFE'", ")", ":", "# Detect a running event loop in this thread.", "try", ":", "event_loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "except", "RuntimeError", ":", "pass", "else", ":", "if", "event_loop", ".", "is_running", "(", ")", ":", "raise", "SynchronousOnlyOperation", "(", "message", ")", "# Pass onwards.", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "inner", "# If the message is actually a function, then be a no-arguments decorator.", "if", "callable", "(", "message", ")", ":", "func", "=", "message", "message", "=", "'You cannot call this from an async context - use a thread or sync_to_async.'", "return", "decorator", "(", "func", ")", "else", ":", "return", "decorator" ]
[ 7, 0 ]
[ 33, 24 ]
python
en
['en', 'error', 'th']
False
_get_prepared_distribution
( req, # type: InstallRequirement req_tracker, # type: RequirementTracker finder, # type: PackageFinder build_isolation, # type: bool )
Prepare a distribution for installation.
Prepare a distribution for installation.
def _get_prepared_distribution( req, # type: InstallRequirement req_tracker, # type: RequirementTracker finder, # type: PackageFinder build_isolation, # type: bool ): # type: (...) -> Distribution """Prepare a distribution for installation.""" abstract_dist = make_distribution_for_install_requirement(req) with req_tracker.track(req): abstract_dist.prepare_distribution_metadata(finder, build_isolation) return abstract_dist.get_pkg_resources_distribution()
[ "def", "_get_prepared_distribution", "(", "req", ",", "# type: InstallRequirement", "req_tracker", ",", "# type: RequirementTracker", "finder", ",", "# type: PackageFinder", "build_isolation", ",", "# type: bool", ")", ":", "# type: (...) -> Distribution", "abstract_dist", "=", "make_distribution_for_install_requirement", "(", "req", ")", "with", "req_tracker", ".", "track", "(", "req", ")", ":", "abstract_dist", ".", "prepare_distribution_metadata", "(", "finder", ",", "build_isolation", ")", "return", "abstract_dist", ".", "get_pkg_resources_distribution", "(", ")" ]
[ 49, 0 ]
[ 60, 57 ]
python
it
['it', 'it', 'en']
True
_copy2_ignoring_special_files
(src, dest)
Copying special files is not supported, but as a convenience to users we skip errors copying them. This supports tools that may create e.g. socket files in the project source directory.
Copying special files is not supported, but as a convenience to users we skip errors copying them. This supports tools that may create e.g. socket files in the project source directory.
def _copy2_ignoring_special_files(src, dest): # type: (str, str) -> None """Copying special files is not supported, but as a convenience to users we skip errors copying them. This supports tools that may create e.g. socket files in the project source directory. """ try: copy2_fixed(src, dest) except shutil.SpecialFileError as e: # SpecialFileError may be raised due to either the source or # destination. If the destination was the cause then we would actually # care, but since the destination directory is deleted prior to # copy we ignore all of them assuming it is caused by the source. logger.warning( "Ignoring special file error '%s' encountered copying %s to %s.", str(e), src, dest, )
[ "def", "_copy2_ignoring_special_files", "(", "src", ",", "dest", ")", ":", "# type: (str, str) -> None", "try", ":", "copy2_fixed", "(", "src", ",", "dest", ")", "except", "shutil", ".", "SpecialFileError", "as", "e", ":", "# SpecialFileError may be raised due to either the source or", "# destination. If the destination was the cause then we would actually", "# care, but since the destination directory is deleted prior to", "# copy we ignore all of them assuming it is caused by the source.", "logger", ".", "warning", "(", "\"Ignoring special file error '%s' encountered copying %s to %s.\"", ",", "str", "(", "e", ")", ",", "src", ",", "dest", ",", ")" ]
[ 108, 0 ]
[ 126, 9 ]
python
en
['en', 'en', 'en']
True
get_file_url
( link, # type: Link download_dir=None, # type: Optional[str] hashes=None # type: Optional[Hashes] )
Get file and optionally check its hash.
Get file and optionally check its hash.
def get_file_url( link, # type: Link download_dir=None, # type: Optional[str] hashes=None # type: Optional[Hashes] ): # type: (...) -> File """Get file and optionally check its hash. """ # If a download dir is specified, is the file already there and valid? already_downloaded_path = None if download_dir: already_downloaded_path = _check_download_dir( link, download_dir, hashes ) if already_downloaded_path: from_path = already_downloaded_path else: from_path = link.file_path # If --require-hashes is off, `hashes` is either empty, the # link's embedded hash, or MissingHashes; it is required to # match. If --require-hashes is on, we are satisfied by any # hash in `hashes` matching: a URL-based or an option-based # one; no internet-sourced hash will be in `hashes`. if hashes: hashes.check_against_path(from_path) return File(from_path, None)
[ "def", "get_file_url", "(", "link", ",", "# type: Link", "download_dir", "=", "None", ",", "# type: Optional[str]", "hashes", "=", "None", "# type: Optional[Hashes]", ")", ":", "# type: (...) -> File", "# If a download dir is specified, is the file already there and valid?", "already_downloaded_path", "=", "None", "if", "download_dir", ":", "already_downloaded_path", "=", "_check_download_dir", "(", "link", ",", "download_dir", ",", "hashes", ")", "if", "already_downloaded_path", ":", "from_path", "=", "already_downloaded_path", "else", ":", "from_path", "=", "link", ".", "file_path", "# If --require-hashes is off, `hashes` is either empty, the", "# link's embedded hash, or MissingHashes; it is required to", "# match. If --require-hashes is on, we are satisfied by any", "# hash in `hashes` matching: a URL-based or an option-based", "# one; no internet-sourced hash will be in `hashes`.", "if", "hashes", ":", "hashes", ".", "check_against_path", "(", "from_path", ")", "return", "File", "(", "from_path", ",", "None", ")" ]
[ 160, 0 ]
[ 187, 32 ]
python
en
['en', 'en', 'en']
True
unpack_url
( link, # type: Link location, # type: str download, # type: Downloader download_dir=None, # type: Optional[str] hashes=None, # type: Optional[Hashes] )
Unpack link into location, downloading if required. :param hashes: A Hashes object, one of whose embedded hashes must match, or HashMismatch will be raised. If the Hashes is empty, no matches are required, and unhashable types of requirements (like VCS ones, which would ordinarily raise HashUnsupported) are allowed.
Unpack link into location, downloading if required.
def unpack_url( link, # type: Link location, # type: str download, # type: Downloader download_dir=None, # type: Optional[str] hashes=None, # type: Optional[Hashes] ): # type: (...) -> Optional[File] """Unpack link into location, downloading if required. :param hashes: A Hashes object, one of whose embedded hashes must match, or HashMismatch will be raised. If the Hashes is empty, no matches are required, and unhashable types of requirements (like VCS ones, which would ordinarily raise HashUnsupported) are allowed. """ # non-editable vcs urls if link.is_vcs: unpack_vcs_link(link, location) return None # Once out-of-tree-builds are no longer supported, could potentially # replace the below condition with `assert not link.is_existing_dir` # - unpack_url does not need to be called for in-tree-builds. # # As further cleanup, _copy_source_tree and accompanying tests can # be removed. if link.is_existing_dir(): deprecated( "A future pip version will change local packages to be built " "in-place without first copying to a temporary directory. " "We recommend you use --use-feature=in-tree-build to test " "your packages with this new behavior before it becomes the " "default.\n", replacement=None, gone_in="21.3", issue=7555 ) if os.path.isdir(location): rmtree(location) _copy_source_tree(link.file_path, location) return None # file urls if link.is_file: file = get_file_url(link, download_dir, hashes=hashes) # http urls else: file = get_http_url( link, download, download_dir, hashes=hashes, ) # unpack the archive to the build dir location. even when only downloading # archives, they have to be unpacked to parse dependencies, except wheels if not link.is_wheel: unpack_file(file.path, location, file.content_type) return file
[ "def", "unpack_url", "(", "link", ",", "# type: Link", "location", ",", "# type: str", "download", ",", "# type: Downloader", "download_dir", "=", "None", ",", "# type: Optional[str]", "hashes", "=", "None", ",", "# type: Optional[Hashes]", ")", ":", "# type: (...) -> Optional[File]", "# non-editable vcs urls", "if", "link", ".", "is_vcs", ":", "unpack_vcs_link", "(", "link", ",", "location", ")", "return", "None", "# Once out-of-tree-builds are no longer supported, could potentially", "# replace the below condition with `assert not link.is_existing_dir`", "# - unpack_url does not need to be called for in-tree-builds.", "#", "# As further cleanup, _copy_source_tree and accompanying tests can", "# be removed.", "if", "link", ".", "is_existing_dir", "(", ")", ":", "deprecated", "(", "\"A future pip version will change local packages to be built \"", "\"in-place without first copying to a temporary directory. \"", "\"We recommend you use --use-feature=in-tree-build to test \"", "\"your packages with this new behavior before it becomes the \"", "\"default.\\n\"", ",", "replacement", "=", "None", ",", "gone_in", "=", "\"21.3\"", ",", "issue", "=", "7555", ")", "if", "os", ".", "path", ".", "isdir", "(", "location", ")", ":", "rmtree", "(", "location", ")", "_copy_source_tree", "(", "link", ".", "file_path", ",", "location", ")", "return", "None", "# file urls", "if", "link", ".", "is_file", ":", "file", "=", "get_file_url", "(", "link", ",", "download_dir", ",", "hashes", "=", "hashes", ")", "# http urls", "else", ":", "file", "=", "get_http_url", "(", "link", ",", "download", ",", "download_dir", ",", "hashes", "=", "hashes", ",", ")", "# unpack the archive to the build dir location. even when only downloading", "# archives, they have to be unpacked to parse dependencies, except wheels", "if", "not", "link", ".", "is_wheel", ":", "unpack_file", "(", "file", ".", "path", ",", "location", ",", "file", ".", "content_type", ")", "return", "file" ]
[ 190, 0 ]
[ 250, 15 ]
python
en
['it', 'en', 'en']
True
_check_download_dir
(link, download_dir, hashes)
Check download_dir for previously downloaded file with correct hash If a correct file is found return its path else None
Check download_dir for previously downloaded file with correct hash If a correct file is found return its path else None
def _check_download_dir(link, download_dir, hashes): # type: (Link, str, Optional[Hashes]) -> Optional[str] """ Check download_dir for previously downloaded file with correct hash If a correct file is found return its path else None """ download_path = os.path.join(download_dir, link.filename) if not os.path.exists(download_path): return None # If already downloaded, does its hash match? logger.info('File was already downloaded %s', download_path) if hashes: try: hashes.check_against_path(download_path) except HashMismatch: logger.warning( 'Previously-downloaded file %s has bad hash. ' 'Re-downloading.', download_path ) os.unlink(download_path) return None return download_path
[ "def", "_check_download_dir", "(", "link", ",", "download_dir", ",", "hashes", ")", ":", "# type: (Link, str, Optional[Hashes]) -> Optional[str]", "download_path", "=", "os", ".", "path", ".", "join", "(", "download_dir", ",", "link", ".", "filename", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "download_path", ")", ":", "return", "None", "# If already downloaded, does its hash match?", "logger", ".", "info", "(", "'File was already downloaded %s'", ",", "download_path", ")", "if", "hashes", ":", "try", ":", "hashes", ".", "check_against_path", "(", "download_path", ")", "except", "HashMismatch", ":", "logger", ".", "warning", "(", "'Previously-downloaded file %s has bad hash. '", "'Re-downloading.'", ",", "download_path", ")", "os", ".", "unlink", "(", "download_path", ")", "return", "None", "return", "download_path" ]
[ 253, 0 ]
[ 276, 24 ]
python
en
['en', 'en', 'en']
True
RequirementPreparer._log_preparing_link
(self, req)
Provide context for the requirement being prepared.
Provide context for the requirement being prepared.
def _log_preparing_link(self, req): # type: (InstallRequirement) -> None """Provide context for the requirement being prepared.""" if req.link.is_file and not req.original_link_is_in_wheel_cache: message = "Processing %s" information = str(display_path(req.link.file_path)) else: message = "Collecting %s" information = str(req.req or req) if (message, information) != self._previous_requirement_header: self._previous_requirement_header = (message, information) logger.info(message, information) if req.original_link_is_in_wheel_cache: with indent_log(): logger.info("Using cached %s", req.link.filename)
[ "def", "_log_preparing_link", "(", "self", ",", "req", ")", ":", "# type: (InstallRequirement) -> None", "if", "req", ".", "link", ".", "is_file", "and", "not", "req", ".", "original_link_is_in_wheel_cache", ":", "message", "=", "\"Processing %s\"", "information", "=", "str", "(", "display_path", "(", "req", ".", "link", ".", "file_path", ")", ")", "else", ":", "message", "=", "\"Collecting %s\"", "information", "=", "str", "(", "req", ".", "req", "or", "req", ")", "if", "(", "message", ",", "information", ")", "!=", "self", ".", "_previous_requirement_header", ":", "self", ".", "_previous_requirement_header", "=", "(", "message", ",", "information", ")", "logger", ".", "info", "(", "message", ",", "information", ")", "if", "req", ".", "original_link_is_in_wheel_cache", ":", "with", "indent_log", "(", ")", ":", "logger", ".", "info", "(", "\"Using cached %s\"", ",", "req", ".", "link", ".", "filename", ")" ]
[ 334, 4 ]
[ 350, 65 ]
python
en
['en', 'en', 'en']
True
RequirementPreparer._ensure_link_req_src_dir
(self, req, parallel_builds)
Ensure source_dir of a linked InstallRequirement.
Ensure source_dir of a linked InstallRequirement.
def _ensure_link_req_src_dir(self, req, parallel_builds): # type: (InstallRequirement, bool) -> None """Ensure source_dir of a linked InstallRequirement.""" # Since source_dir is only set for editable requirements. if req.link.is_wheel: # We don't need to unpack wheels, so no need for a source # directory. return assert req.source_dir is None if req.link.is_existing_dir() and self.in_tree_build: # build local directories in-tree req.source_dir = req.link.file_path return # We always delete unpacked sdists after pip runs. req.ensure_has_source_dir( self.build_dir, autodelete=True, parallel_builds=parallel_builds, ) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` if is_installable_dir(req.source_dir): raise PreviousBuildDirError( "pip can't proceed with requirements '{}' due to a" "pre-existing build directory ({}). This is likely " "due to a previous installation that failed . pip is " "being responsible and not assuming it can delete this. " "Please delete it and try again.".format(req, req.source_dir) )
[ "def", "_ensure_link_req_src_dir", "(", "self", ",", "req", ",", "parallel_builds", ")", ":", "# type: (InstallRequirement, bool) -> None", "# Since source_dir is only set for editable requirements.", "if", "req", ".", "link", ".", "is_wheel", ":", "# We don't need to unpack wheels, so no need for a source", "# directory.", "return", "assert", "req", ".", "source_dir", "is", "None", "if", "req", ".", "link", ".", "is_existing_dir", "(", ")", "and", "self", ".", "in_tree_build", ":", "# build local directories in-tree", "req", ".", "source_dir", "=", "req", ".", "link", ".", "file_path", "return", "# We always delete unpacked sdists after pip runs.", "req", ".", "ensure_has_source_dir", "(", "self", ".", "build_dir", ",", "autodelete", "=", "True", ",", "parallel_builds", "=", "parallel_builds", ",", ")", "# If a checkout exists, it's unwise to keep going. version", "# inconsistencies are logged later, but do not fail the", "# installation.", "# FIXME: this won't upgrade when there's an existing", "# package unpacked in `req.source_dir`", "if", "is_installable_dir", "(", "req", ".", "source_dir", ")", ":", "raise", "PreviousBuildDirError", "(", "\"pip can't proceed with requirements '{}' due to a\"", "\"pre-existing build directory ({}). This is likely \"", "\"due to a previous installation that failed . pip is \"", "\"being responsible and not assuming it can delete this. \"", "\"Please delete it and try again.\"", ".", "format", "(", "req", ",", "req", ".", "source_dir", ")", ")" ]
[ 352, 4 ]
[ 385, 13 ]
python
en
['en', 'en', 'en']
True
RequirementPreparer._fetch_metadata_using_lazy_wheel
(self, link)
Fetch metadata using lazy wheel, if possible.
Fetch metadata using lazy wheel, if possible.
def _fetch_metadata_using_lazy_wheel(self, link): # type: (Link) -> Optional[Distribution] """Fetch metadata using lazy wheel, if possible.""" if not self.use_lazy_wheel: return None if self.require_hashes: logger.debug('Lazy wheel is not used as hash checking is required') return None if link.is_file or not link.is_wheel: logger.debug( 'Lazy wheel is not used as ' '%r does not points to a remote wheel', link, ) return None wheel = Wheel(link.filename) name = canonicalize_name(wheel.name) logger.info( 'Obtaining dependency information from %s %s', name, wheel.version, ) url = link.url.split('#', 1)[0] try: return dist_from_wheel_url(name, url, self._session) except HTTPRangeRequestUnsupported: logger.debug('%s does not support range requests', url) return None
[ "def", "_fetch_metadata_using_lazy_wheel", "(", "self", ",", "link", ")", ":", "# type: (Link) -> Optional[Distribution]", "if", "not", "self", ".", "use_lazy_wheel", ":", "return", "None", "if", "self", ".", "require_hashes", ":", "logger", ".", "debug", "(", "'Lazy wheel is not used as hash checking is required'", ")", "return", "None", "if", "link", ".", "is_file", "or", "not", "link", ".", "is_wheel", ":", "logger", ".", "debug", "(", "'Lazy wheel is not used as '", "'%r does not points to a remote wheel'", ",", "link", ",", ")", "return", "None", "wheel", "=", "Wheel", "(", "link", ".", "filename", ")", "name", "=", "canonicalize_name", "(", "wheel", ".", "name", ")", "logger", ".", "info", "(", "'Obtaining dependency information from %s %s'", ",", "name", ",", "wheel", ".", "version", ",", ")", "url", "=", "link", ".", "url", ".", "split", "(", "'#'", ",", "1", ")", "[", "0", "]", "try", ":", "return", "dist_from_wheel_url", "(", "name", ",", "url", ",", "self", ".", "_session", ")", "except", "HTTPRangeRequestUnsupported", ":", "logger", ".", "debug", "(", "'%s does not support range requests'", ",", "url", ")", "return", "None" ]
[ 420, 4 ]
[ 447, 23 ]
python
en
['en', 'en', 'en']
True
RequirementPreparer._complete_partial_requirements
( self, partially_downloaded_reqs, # type: Iterable[InstallRequirement] parallel_builds=False, # type: bool )
Download any requirements which were only fetched by metadata.
Download any requirements which were only fetched by metadata.
def _complete_partial_requirements( self, partially_downloaded_reqs, # type: Iterable[InstallRequirement] parallel_builds=False, # type: bool ): # type: (...) -> None """Download any requirements which were only fetched by metadata.""" # Download to a temporary directory. These will be copied over as # needed for downstream 'download', 'wheel', and 'install' commands. temp_dir = TempDirectory(kind="unpack", globally_managed=True).path # Map each link to the requirement that owns it. This allows us to set # `req.local_file_path` on the appropriate requirement after passing # all the links at once into BatchDownloader. links_to_fully_download = {} # type: Dict[Link, InstallRequirement] for req in partially_downloaded_reqs: assert req.link links_to_fully_download[req.link] = req batch_download = self._batch_download( links_to_fully_download.keys(), temp_dir, ) for link, (filepath, _) in batch_download: logger.debug("Downloading link %s to %s", link, filepath) req = links_to_fully_download[link] req.local_file_path = filepath # This step is necessary to ensure all lazy wheels are processed # successfully by the 'download', 'wheel', and 'install' commands. for req in partially_downloaded_reqs: self._prepare_linked_requirement(req, parallel_builds)
[ "def", "_complete_partial_requirements", "(", "self", ",", "partially_downloaded_reqs", ",", "# type: Iterable[InstallRequirement]", "parallel_builds", "=", "False", ",", "# type: bool", ")", ":", "# type: (...) -> None", "# Download to a temporary directory. These will be copied over as", "# needed for downstream 'download', 'wheel', and 'install' commands.", "temp_dir", "=", "TempDirectory", "(", "kind", "=", "\"unpack\"", ",", "globally_managed", "=", "True", ")", ".", "path", "# Map each link to the requirement that owns it. This allows us to set", "# `req.local_file_path` on the appropriate requirement after passing", "# all the links at once into BatchDownloader.", "links_to_fully_download", "=", "{", "}", "# type: Dict[Link, InstallRequirement]", "for", "req", "in", "partially_downloaded_reqs", ":", "assert", "req", ".", "link", "links_to_fully_download", "[", "req", ".", "link", "]", "=", "req", "batch_download", "=", "self", ".", "_batch_download", "(", "links_to_fully_download", ".", "keys", "(", ")", ",", "temp_dir", ",", ")", "for", "link", ",", "(", "filepath", ",", "_", ")", "in", "batch_download", ":", "logger", ".", "debug", "(", "\"Downloading link %s to %s\"", ",", "link", ",", "filepath", ")", "req", "=", "links_to_fully_download", "[", "link", "]", "req", ".", "local_file_path", "=", "filepath", "# This step is necessary to ensure all lazy wheels are processed", "# successfully by the 'download', 'wheel', and 'install' commands.", "for", "req", "in", "partially_downloaded_reqs", ":", "self", ".", "_prepare_linked_requirement", "(", "req", ",", "parallel_builds", ")" ]
[ 449, 4 ]
[ 480, 66 ]
python
en
['en', 'en', 'en']
True
RequirementPreparer.prepare_linked_requirement
(self, req, parallel_builds=False)
Prepare a requirement to be obtained from req.link.
Prepare a requirement to be obtained from req.link.
def prepare_linked_requirement(self, req, parallel_builds=False): # type: (InstallRequirement, bool) -> Distribution """Prepare a requirement to be obtained from req.link.""" assert req.link link = req.link self._log_preparing_link(req) with indent_log(): # Check if the relevant file is already available # in the download directory file_path = None if self.download_dir is not None and link.is_wheel: hashes = self._get_linked_req_hashes(req) file_path = _check_download_dir(req.link, self.download_dir, hashes) if file_path is not None: # The file is already available, so mark it as downloaded self._downloaded[req.link.url] = file_path, None else: # The file is not available, attempt to fetch only metadata wheel_dist = self._fetch_metadata_using_lazy_wheel(link) if wheel_dist is not None: req.needs_more_preparation = True return wheel_dist # None of the optimizations worked, fully prepare the requirement return self._prepare_linked_requirement(req, parallel_builds)
[ "def", "prepare_linked_requirement", "(", "self", ",", "req", ",", "parallel_builds", "=", "False", ")", ":", "# type: (InstallRequirement, bool) -> Distribution", "assert", "req", ".", "link", "link", "=", "req", ".", "link", "self", ".", "_log_preparing_link", "(", "req", ")", "with", "indent_log", "(", ")", ":", "# Check if the relevant file is already available", "# in the download directory", "file_path", "=", "None", "if", "self", ".", "download_dir", "is", "not", "None", "and", "link", ".", "is_wheel", ":", "hashes", "=", "self", ".", "_get_linked_req_hashes", "(", "req", ")", "file_path", "=", "_check_download_dir", "(", "req", ".", "link", ",", "self", ".", "download_dir", ",", "hashes", ")", "if", "file_path", "is", "not", "None", ":", "# The file is already available, so mark it as downloaded", "self", ".", "_downloaded", "[", "req", ".", "link", ".", "url", "]", "=", "file_path", ",", "None", "else", ":", "# The file is not available, attempt to fetch only metadata", "wheel_dist", "=", "self", ".", "_fetch_metadata_using_lazy_wheel", "(", "link", ")", "if", "wheel_dist", "is", "not", "None", ":", "req", ".", "needs_more_preparation", "=", "True", "return", "wheel_dist", "# None of the optimizations worked, fully prepare the requirement", "return", "self", ".", "_prepare_linked_requirement", "(", "req", ",", "parallel_builds", ")" ]
[ 482, 4 ]
[ 507, 73 ]
python
en
['en', 'en', 'en']
True
RequirementPreparer.prepare_linked_requirements_more
(self, reqs, parallel_builds=False)
Prepare linked requirements more, if needed.
Prepare linked requirements more, if needed.
def prepare_linked_requirements_more(self, reqs, parallel_builds=False): # type: (Iterable[InstallRequirement], bool) -> None """Prepare linked requirements more, if needed.""" reqs = [req for req in reqs if req.needs_more_preparation] for req in reqs: # Determine if any of these requirements were already downloaded. if self.download_dir is not None and req.link.is_wheel: hashes = self._get_linked_req_hashes(req) file_path = _check_download_dir(req.link, self.download_dir, hashes) if file_path is not None: self._downloaded[req.link.url] = file_path, None req.needs_more_preparation = False # Prepare requirements we found were already downloaded for some # reason. The other downloads will be completed separately. partially_downloaded_reqs = [] # type: List[InstallRequirement] for req in reqs: if req.needs_more_preparation: partially_downloaded_reqs.append(req) else: self._prepare_linked_requirement(req, parallel_builds) # TODO: separate this part out from RequirementPreparer when the v1 # resolver can be removed! self._complete_partial_requirements( partially_downloaded_reqs, parallel_builds=parallel_builds, )
[ "def", "prepare_linked_requirements_more", "(", "self", ",", "reqs", ",", "parallel_builds", "=", "False", ")", ":", "# type: (Iterable[InstallRequirement], bool) -> None", "reqs", "=", "[", "req", "for", "req", "in", "reqs", "if", "req", ".", "needs_more_preparation", "]", "for", "req", "in", "reqs", ":", "# Determine if any of these requirements were already downloaded.", "if", "self", ".", "download_dir", "is", "not", "None", "and", "req", ".", "link", ".", "is_wheel", ":", "hashes", "=", "self", ".", "_get_linked_req_hashes", "(", "req", ")", "file_path", "=", "_check_download_dir", "(", "req", ".", "link", ",", "self", ".", "download_dir", ",", "hashes", ")", "if", "file_path", "is", "not", "None", ":", "self", ".", "_downloaded", "[", "req", ".", "link", ".", "url", "]", "=", "file_path", ",", "None", "req", ".", "needs_more_preparation", "=", "False", "# Prepare requirements we found were already downloaded for some", "# reason. The other downloads will be completed separately.", "partially_downloaded_reqs", "=", "[", "]", "# type: List[InstallRequirement]", "for", "req", "in", "reqs", ":", "if", "req", ".", "needs_more_preparation", ":", "partially_downloaded_reqs", ".", "append", "(", "req", ")", "else", ":", "self", ".", "_prepare_linked_requirement", "(", "req", ",", "parallel_builds", ")", "# TODO: separate this part out from RequirementPreparer when the v1", "# resolver can be removed!", "self", ".", "_complete_partial_requirements", "(", "partially_downloaded_reqs", ",", "parallel_builds", "=", "parallel_builds", ",", ")" ]
[ 509, 4 ]
[ 535, 9 ]
python
en
['en', 'en', 'en']
True
RequirementPreparer.prepare_editable_requirement
( self, req, # type: InstallRequirement )
Prepare an editable requirement
Prepare an editable requirement
def prepare_editable_requirement( self, req, # type: InstallRequirement ): # type: (...) -> Distribution """Prepare an editable requirement """ assert req.editable, "cannot prepare a non-editable req as editable" logger.info('Obtaining %s', req) with indent_log(): if self.require_hashes: raise InstallationError( 'The editable requirement {} cannot be installed when ' 'requiring hashes, because there is no single file to ' 'hash.'.format(req) ) req.ensure_has_source_dir(self.src_dir) req.update_editable() dist = _get_prepared_distribution( req, self.req_tracker, self.finder, self.build_isolation, ) req.check_if_exists(self.use_user_site) return dist
[ "def", "prepare_editable_requirement", "(", "self", ",", "req", ",", "# type: InstallRequirement", ")", ":", "# type: (...) -> Distribution", "assert", "req", ".", "editable", ",", "\"cannot prepare a non-editable req as editable\"", "logger", ".", "info", "(", "'Obtaining %s'", ",", "req", ")", "with", "indent_log", "(", ")", ":", "if", "self", ".", "require_hashes", ":", "raise", "InstallationError", "(", "'The editable requirement {} cannot be installed when '", "'requiring hashes, because there is no single file to '", "'hash.'", ".", "format", "(", "req", ")", ")", "req", ".", "ensure_has_source_dir", "(", "self", ".", "src_dir", ")", "req", ".", "update_editable", "(", ")", "dist", "=", "_get_prepared_distribution", "(", "req", ",", "self", ".", "req_tracker", ",", "self", ".", "finder", ",", "self", ".", "build_isolation", ",", ")", "req", ".", "check_if_exists", "(", "self", ".", "use_user_site", ")", "return", "dist" ]
[ 600, 4 ]
[ 627, 19 ]
python
en
['en', 'en', 'en']
True
RequirementPreparer.prepare_installed_requirement
( self, req, # type: InstallRequirement skip_reason # type: str )
Prepare an already-installed requirement
Prepare an already-installed requirement
def prepare_installed_requirement( self, req, # type: InstallRequirement skip_reason # type: str ): # type: (...) -> Distribution """Prepare an already-installed requirement """ assert req.satisfied_by, "req should have been satisfied but isn't" assert skip_reason is not None, ( "did not get skip reason skipped but req.satisfied_by " "is set to {}".format(req.satisfied_by) ) logger.info( 'Requirement %s: %s (%s)', skip_reason, req, req.satisfied_by.version ) with indent_log(): if self.require_hashes: logger.debug( 'Since it is already installed, we are trusting this ' 'package without checking its hash. To ensure a ' 'completely repeatable environment, install into an ' 'empty virtualenv.' ) return InstalledDistribution(req).get_pkg_resources_distribution()
[ "def", "prepare_installed_requirement", "(", "self", ",", "req", ",", "# type: InstallRequirement", "skip_reason", "# type: str", ")", ":", "# type: (...) -> Distribution", "assert", "req", ".", "satisfied_by", ",", "\"req should have been satisfied but isn't\"", "assert", "skip_reason", "is", "not", "None", ",", "(", "\"did not get skip reason skipped but req.satisfied_by \"", "\"is set to {}\"", ".", "format", "(", "req", ".", "satisfied_by", ")", ")", "logger", ".", "info", "(", "'Requirement %s: %s (%s)'", ",", "skip_reason", ",", "req", ",", "req", ".", "satisfied_by", ".", "version", ")", "with", "indent_log", "(", ")", ":", "if", "self", ".", "require_hashes", ":", "logger", ".", "debug", "(", "'Since it is already installed, we are trusting this '", "'package without checking its hash. To ensure a '", "'completely repeatable environment, install into an '", "'empty virtualenv.'", ")", "return", "InstalledDistribution", "(", "req", ")", ".", "get_pkg_resources_distribution", "(", ")" ]
[ 629, 4 ]
[ 654, 78 ]
python
en
['en', 'en', 'en']
True
get_template
(template_name, using=None)
Load and return a template for the given name. Raise TemplateDoesNotExist if no such template exists.
Load and return a template for the given name.
def get_template(template_name, using=None): """ Load and return a template for the given name. Raise TemplateDoesNotExist if no such template exists. """ chain = [] engines = _engine_list(using) for engine in engines: try: return engine.get_template(template_name) except TemplateDoesNotExist as e: chain.append(e) raise TemplateDoesNotExist(template_name, chain=chain)
[ "def", "get_template", "(", "template_name", ",", "using", "=", "None", ")", ":", "chain", "=", "[", "]", "engines", "=", "_engine_list", "(", "using", ")", "for", "engine", "in", "engines", ":", "try", ":", "return", "engine", ".", "get_template", "(", "template_name", ")", "except", "TemplateDoesNotExist", "as", "e", ":", "chain", ".", "append", "(", "e", ")", "raise", "TemplateDoesNotExist", "(", "template_name", ",", "chain", "=", "chain", ")" ]
[ 4, 0 ]
[ 18, 58 ]
python
en
['en', 'error', 'th']
False
select_template
(template_name_list, using=None)
Load and return a template for one of the given names. Try names in order and return the first template found. Raise TemplateDoesNotExist if no such template exists.
Load and return a template for one of the given names.
def select_template(template_name_list, using=None): """ Load and return a template for one of the given names. Try names in order and return the first template found. Raise TemplateDoesNotExist if no such template exists. """ if isinstance(template_name_list, str): raise TypeError( 'select_template() takes an iterable of template names but got a ' 'string: %r. Use get_template() if you want to load a single ' 'template by name.' % template_name_list ) chain = [] engines = _engine_list(using) for template_name in template_name_list: for engine in engines: try: return engine.get_template(template_name) except TemplateDoesNotExist as e: chain.append(e) if template_name_list: raise TemplateDoesNotExist(', '.join(template_name_list), chain=chain) else: raise TemplateDoesNotExist("No template names provided")
[ "def", "select_template", "(", "template_name_list", ",", "using", "=", "None", ")", ":", "if", "isinstance", "(", "template_name_list", ",", "str", ")", ":", "raise", "TypeError", "(", "'select_template() takes an iterable of template names but got a '", "'string: %r. Use get_template() if you want to load a single '", "'template by name.'", "%", "template_name_list", ")", "chain", "=", "[", "]", "engines", "=", "_engine_list", "(", "using", ")", "for", "template_name", "in", "template_name_list", ":", "for", "engine", "in", "engines", ":", "try", ":", "return", "engine", ".", "get_template", "(", "template_name", ")", "except", "TemplateDoesNotExist", "as", "e", ":", "chain", ".", "append", "(", "e", ")", "if", "template_name_list", ":", "raise", "TemplateDoesNotExist", "(", "', '", ".", "join", "(", "template_name_list", ")", ",", "chain", "=", "chain", ")", "else", ":", "raise", "TemplateDoesNotExist", "(", "\"No template names provided\"", ")" ]
[ 21, 0 ]
[ 48, 64 ]
python
en
['en', 'error', 'th']
False
render_to_string
(template_name, context=None, request=None, using=None)
Load a template and render it with a context. Return a string. template_name may be a string or a list of strings.
Load a template and render it with a context. Return a string.
def render_to_string(template_name, context=None, request=None, using=None): """ Load a template and render it with a context. Return a string. template_name may be a string or a list of strings. """ if isinstance(template_name, (list, tuple)): template = select_template(template_name, using=using) else: template = get_template(template_name, using=using) return template.render(context, request)
[ "def", "render_to_string", "(", "template_name", ",", "context", "=", "None", ",", "request", "=", "None", ",", "using", "=", "None", ")", ":", "if", "isinstance", "(", "template_name", ",", "(", "list", ",", "tuple", ")", ")", ":", "template", "=", "select_template", "(", "template_name", ",", "using", "=", "using", ")", "else", ":", "template", "=", "get_template", "(", "template_name", ",", "using", "=", "using", ")", "return", "template", ".", "render", "(", "context", ",", "request", ")" ]
[ 51, 0 ]
[ 61, 44 ]
python
en
['en', 'error', 'th']
False
dist_from_wheel_url
(name: str, url: str, session: PipSession)
Return a pkg_resources.Distribution from the given wheel URL. This uses HTTP range requests to only fetch the potion of the wheel containing metadata, just enough for the object to be constructed. If such requests are not supported, HTTPRangeRequestUnsupported is raised.
Return a pkg_resources.Distribution from the given wheel URL.
def dist_from_wheel_url(name: str, url: str, session: PipSession) -> Distribution: """Return a pkg_resources.Distribution from the given wheel URL. This uses HTTP range requests to only fetch the potion of the wheel containing metadata, just enough for the object to be constructed. If such requests are not supported, HTTPRangeRequestUnsupported is raised. """ with LazyZipOverHTTP(url, session) as wheel: # For read-only ZIP files, ZipFile only needs methods read, # seek, seekable and tell, not the whole IO protocol. zip_file = ZipFile(wheel) # type: ignore # After context manager exit, wheel.name # is an invalid file by intention. return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name)
[ "def", "dist_from_wheel_url", "(", "name", ":", "str", ",", "url", ":", "str", ",", "session", ":", "PipSession", ")", "->", "Distribution", ":", "with", "LazyZipOverHTTP", "(", "url", ",", "session", ")", "as", "wheel", ":", "# For read-only ZIP files, ZipFile only needs methods read,", "# seek, seekable and tell, not the whole IO protocol.", "zip_file", "=", "ZipFile", "(", "wheel", ")", "# type: ignore", "# After context manager exit, wheel.name", "# is an invalid file by intention.", "return", "pkg_resources_distribution_for_wheel", "(", "zip_file", ",", "name", ",", "wheel", ".", "name", ")" ]
[ 22, 0 ]
[ 36, 79 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.mode
(self)
Opening mode, which is always rb.
Opening mode, which is always rb.
def mode(self) -> str: """Opening mode, which is always rb.""" return "rb"
[ "def", "mode", "(", "self", ")", "->", "str", ":", "return", "\"rb\"" ]
[ 65, 4 ]
[ 67, 19 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.name
(self)
Path to the underlying file.
Path to the underlying file.
def name(self) -> str: """Path to the underlying file.""" return self._file.name
[ "def", "name", "(", "self", ")", "->", "str", ":", "return", "self", ".", "_file", ".", "name" ]
[ 70, 4 ]
[ 72, 30 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.seekable
(self)
Return whether random access is supported, which is True.
Return whether random access is supported, which is True.
def seekable(self) -> bool: """Return whether random access is supported, which is True.""" return True
[ "def", "seekable", "(", "self", ")", "->", "bool", ":", "return", "True" ]
[ 74, 4 ]
[ 76, 19 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.close
(self)
Close the file.
Close the file.
def close(self) -> None: """Close the file.""" self._file.close()
[ "def", "close", "(", "self", ")", "->", "None", ":", "self", ".", "_file", ".", "close", "(", ")" ]
[ 78, 4 ]
[ 80, 26 ]
python
en
['en', 'it', 'en']
True
LazyZipOverHTTP.closed
(self)
Whether the file is closed.
Whether the file is closed.
def closed(self) -> bool: """Whether the file is closed.""" return self._file.closed
[ "def", "closed", "(", "self", ")", "->", "bool", ":", "return", "self", ".", "_file", ".", "closed" ]
[ 83, 4 ]
[ 85, 32 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.read
(self, size: int = -1)
Read up to size bytes from the object and return them. As a convenience, if size is unspecified or -1, all bytes until EOF are returned. Fewer than size bytes may be returned if EOF is reached.
Read up to size bytes from the object and return them.
def read(self, size: int = -1) -> bytes: """Read up to size bytes from the object and return them. As a convenience, if size is unspecified or -1, all bytes until EOF are returned. Fewer than size bytes may be returned if EOF is reached. """ download_size = max(size, self._chunk_size) start, length = self.tell(), self._length stop = length if size < 0 else min(start + download_size, length) start = max(0, stop - download_size) self._download(start, stop - 1) return self._file.read(size)
[ "def", "read", "(", "self", ",", "size", ":", "int", "=", "-", "1", ")", "->", "bytes", ":", "download_size", "=", "max", "(", "size", ",", "self", ".", "_chunk_size", ")", "start", ",", "length", "=", "self", ".", "tell", "(", ")", ",", "self", ".", "_length", "stop", "=", "length", "if", "size", "<", "0", "else", "min", "(", "start", "+", "download_size", ",", "length", ")", "start", "=", "max", "(", "0", ",", "stop", "-", "download_size", ")", "self", ".", "_download", "(", "start", ",", "stop", "-", "1", ")", "return", "self", ".", "_file", ".", "read", "(", "size", ")" ]
[ 87, 4 ]
[ 99, 36 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.readable
(self)
Return whether the file is readable, which is True.
Return whether the file is readable, which is True.
def readable(self) -> bool: """Return whether the file is readable, which is True.""" return True
[ "def", "readable", "(", "self", ")", "->", "bool", ":", "return", "True" ]
[ 101, 4 ]
[ 103, 19 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.seek
(self, offset: int, whence: int = 0)
Change stream position and return the new absolute position. Seek to offset relative position indicated by whence: * 0: Start of stream (the default). pos should be >= 0; * 1: Current position - pos may be negative; * 2: End of stream - pos usually negative.
Change stream position and return the new absolute position.
def seek(self, offset: int, whence: int = 0) -> int: """Change stream position and return the new absolute position. Seek to offset relative position indicated by whence: * 0: Start of stream (the default). pos should be >= 0; * 1: Current position - pos may be negative; * 2: End of stream - pos usually negative. """ return self._file.seek(offset, whence)
[ "def", "seek", "(", "self", ",", "offset", ":", "int", ",", "whence", ":", "int", "=", "0", ")", "->", "int", ":", "return", "self", ".", "_file", ".", "seek", "(", "offset", ",", "whence", ")" ]
[ 105, 4 ]
[ 113, 46 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.tell
(self)
Return the current position.
Return the current position.
def tell(self) -> int: """Return the current position.""" return self._file.tell()
[ "def", "tell", "(", "self", ")", "->", "int", ":", "return", "self", ".", "_file", ".", "tell", "(", ")" ]
[ 115, 4 ]
[ 117, 32 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.truncate
(self, size: Optional[int] = None)
Resize the stream to the given size in bytes. If size is unspecified resize to the current position. The current stream position isn't changed. Return the new file size.
Resize the stream to the given size in bytes.
def truncate(self, size: Optional[int] = None) -> int: """Resize the stream to the given size in bytes. If size is unspecified resize to the current position. The current stream position isn't changed. Return the new file size. """ return self._file.truncate(size)
[ "def", "truncate", "(", "self", ",", "size", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "int", ":", "return", "self", ".", "_file", ".", "truncate", "(", "size", ")" ]
[ 119, 4 ]
[ 127, 40 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP.writable
(self)
Return False.
Return False.
def writable(self) -> bool: """Return False.""" return False
[ "def", "writable", "(", "self", ")", "->", "bool", ":", "return", "False" ]
[ 129, 4 ]
[ 131, 20 ]
python
en
['en', 'ms', 'en']
False
LazyZipOverHTTP._stay
(self)
Return a context manager keeping the position. At the end of the block, seek back to original position.
Return a context manager keeping the position.
def _stay(self) -> Iterator[None]: """Return a context manager keeping the position. At the end of the block, seek back to original position. """ pos = self.tell() try: yield finally: self.seek(pos)
[ "def", "_stay", "(", "self", ")", "->", "Iterator", "[", "None", "]", ":", "pos", "=", "self", ".", "tell", "(", ")", "try", ":", "yield", "finally", ":", "self", ".", "seek", "(", "pos", ")" ]
[ 141, 4 ]
[ 150, 26 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP._check_zip
(self)
Check and download until the file is a valid ZIP.
Check and download until the file is a valid ZIP.
def _check_zip(self) -> None: """Check and download until the file is a valid ZIP.""" end = self._length - 1 for start in reversed(range(0, end, self._chunk_size)): self._download(start, end) with self._stay(): try: # For read-only ZIP files, ZipFile only needs # methods read, seek, seekable and tell. ZipFile(self) # type: ignore except BadZipfile: pass else: break
[ "def", "_check_zip", "(", "self", ")", "->", "None", ":", "end", "=", "self", ".", "_length", "-", "1", "for", "start", "in", "reversed", "(", "range", "(", "0", ",", "end", ",", "self", ".", "_chunk_size", ")", ")", ":", "self", ".", "_download", "(", "start", ",", "end", ")", "with", "self", ".", "_stay", "(", ")", ":", "try", ":", "# For read-only ZIP files, ZipFile only needs", "# methods read, seek, seekable and tell.", "ZipFile", "(", "self", ")", "# type: ignore", "except", "BadZipfile", ":", "pass", "else", ":", "break" ]
[ 152, 4 ]
[ 165, 25 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP._stream_response
( self, start: int, end: int, base_headers: Dict[str, str] = HEADERS )
Return HTTP response to a range request from start to end.
Return HTTP response to a range request from start to end.
def _stream_response( self, start: int, end: int, base_headers: Dict[str, str] = HEADERS ) -> Response: """Return HTTP response to a range request from start to end.""" headers = base_headers.copy() headers["Range"] = f"bytes={start}-{end}" # TODO: Get range requests to be correctly cached headers["Cache-Control"] = "no-cache" return self._session.get(self._url, headers=headers, stream=True)
[ "def", "_stream_response", "(", "self", ",", "start", ":", "int", ",", "end", ":", "int", ",", "base_headers", ":", "Dict", "[", "str", ",", "str", "]", "=", "HEADERS", ")", "->", "Response", ":", "headers", "=", "base_headers", ".", "copy", "(", ")", "headers", "[", "\"Range\"", "]", "=", "f\"bytes={start}-{end}\"", "# TODO: Get range requests to be correctly cached", "headers", "[", "\"Cache-Control\"", "]", "=", "\"no-cache\"", "return", "self", ".", "_session", ".", "get", "(", "self", ".", "_url", ",", "headers", "=", "headers", ",", "stream", "=", "True", ")" ]
[ 167, 4 ]
[ 175, 73 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP._merge
( self, start: int, end: int, left: int, right: int )
Return an iterator of intervals to be fetched. Args: start (int): Start of needed interval end (int): End of needed interval left (int): Index of first overlapping downloaded data right (int): Index after last overlapping downloaded data
Return an iterator of intervals to be fetched.
def _merge( self, start: int, end: int, left: int, right: int ) -> Iterator[Tuple[int, int]]: """Return an iterator of intervals to be fetched. Args: start (int): Start of needed interval end (int): End of needed interval left (int): Index of first overlapping downloaded data right (int): Index after last overlapping downloaded data """ lslice, rslice = self._left[left:right], self._right[left:right] i = start = min([start] + lslice[:1]) end = max([end] + rslice[-1:]) for j, k in zip(lslice, rslice): if j > i: yield i, j - 1 i = k + 1 if i <= end: yield i, end self._left[left:right], self._right[left:right] = [start], [end]
[ "def", "_merge", "(", "self", ",", "start", ":", "int", ",", "end", ":", "int", ",", "left", ":", "int", ",", "right", ":", "int", ")", "->", "Iterator", "[", "Tuple", "[", "int", ",", "int", "]", "]", ":", "lslice", ",", "rslice", "=", "self", ".", "_left", "[", "left", ":", "right", "]", ",", "self", ".", "_right", "[", "left", ":", "right", "]", "i", "=", "start", "=", "min", "(", "[", "start", "]", "+", "lslice", "[", ":", "1", "]", ")", "end", "=", "max", "(", "[", "end", "]", "+", "rslice", "[", "-", "1", ":", "]", ")", "for", "j", ",", "k", "in", "zip", "(", "lslice", ",", "rslice", ")", ":", "if", "j", ">", "i", ":", "yield", "i", ",", "j", "-", "1", "i", "=", "k", "+", "1", "if", "i", "<=", "end", ":", "yield", "i", ",", "end", "self", ".", "_left", "[", "left", ":", "right", "]", ",", "self", ".", "_right", "[", "left", ":", "right", "]", "=", "[", "start", "]", ",", "[", "end", "]" ]
[ 177, 4 ]
[ 197, 72 ]
python
en
['en', 'en', 'en']
True
LazyZipOverHTTP._download
(self, start: int, end: int)
Download bytes from start to end inclusively.
Download bytes from start to end inclusively.
def _download(self, start: int, end: int) -> None: """Download bytes from start to end inclusively.""" with self._stay(): left = bisect_left(self._right, start) right = bisect_right(self._left, end) for start, end in self._merge(start, end, left, right): response = self._stream_response(start, end) response.raise_for_status() self.seek(start) for chunk in response_chunks(response, self._chunk_size): self._file.write(chunk)
[ "def", "_download", "(", "self", ",", "start", ":", "int", ",", "end", ":", "int", ")", "->", "None", ":", "with", "self", ".", "_stay", "(", ")", ":", "left", "=", "bisect_left", "(", "self", ".", "_right", ",", "start", ")", "right", "=", "bisect_right", "(", "self", ".", "_left", ",", "end", ")", "for", "start", ",", "end", "in", "self", ".", "_merge", "(", "start", ",", "end", ",", "left", ",", "right", ")", ":", "response", "=", "self", ".", "_stream_response", "(", "start", ",", "end", ")", "response", ".", "raise_for_status", "(", ")", "self", ".", "seek", "(", "start", ")", "for", "chunk", "in", "response_chunks", "(", "response", ",", "self", ".", "_chunk_size", ")", ":", "self", ".", "_file", ".", "write", "(", "chunk", ")" ]
[ 199, 4 ]
[ 209, 43 ]
python
en
['en', 'en', 'en']
True
SearchScope.create
( cls, find_links: List[str], index_urls: List[str], )
Create a SearchScope object after normalizing the `find_links`.
Create a SearchScope object after normalizing the `find_links`.
def create( cls, find_links: List[str], index_urls: List[str], ) -> "SearchScope": """ Create a SearchScope object after normalizing the `find_links`. """ # Build find_links. If an argument starts with ~, it may be # a local file relative to a home directory. So try normalizing # it and if it exists, use the normalized version. # This is deliberately conservative - it might be fine just to # blindly normalize anything starting with a ~... built_find_links: List[str] = [] for link in find_links: if link.startswith('~'): new_link = normalize_path(link) if os.path.exists(new_link): link = new_link built_find_links.append(link) # If we don't have TLS enabled, then WARN if anyplace we're looking # relies on TLS. if not has_tls(): for link in itertools.chain(index_urls, built_find_links): parsed = urllib.parse.urlparse(link) if parsed.scheme == 'https': logger.warning( 'pip is configured with locations that require ' 'TLS/SSL, however the ssl module in Python is not ' 'available.' ) break return cls( find_links=built_find_links, index_urls=index_urls, )
[ "def", "create", "(", "cls", ",", "find_links", ":", "List", "[", "str", "]", ",", "index_urls", ":", "List", "[", "str", "]", ",", ")", "->", "\"SearchScope\"", ":", "# Build find_links. If an argument starts with ~, it may be", "# a local file relative to a home directory. So try normalizing", "# it and if it exists, use the normalized version.", "# This is deliberately conservative - it might be fine just to", "# blindly normalize anything starting with a ~...", "built_find_links", ":", "List", "[", "str", "]", "=", "[", "]", "for", "link", "in", "find_links", ":", "if", "link", ".", "startswith", "(", "'~'", ")", ":", "new_link", "=", "normalize_path", "(", "link", ")", "if", "os", ".", "path", ".", "exists", "(", "new_link", ")", ":", "link", "=", "new_link", "built_find_links", ".", "append", "(", "link", ")", "# If we don't have TLS enabled, then WARN if anyplace we're looking", "# relies on TLS.", "if", "not", "has_tls", "(", ")", ":", "for", "link", "in", "itertools", ".", "chain", "(", "index_urls", ",", "built_find_links", ")", ":", "parsed", "=", "urllib", ".", "parse", ".", "urlparse", "(", "link", ")", "if", "parsed", ".", "scheme", "==", "'https'", ":", "logger", ".", "warning", "(", "'pip is configured with locations that require '", "'TLS/SSL, however the ssl module in Python is not '", "'available.'", ")", "break", "return", "cls", "(", "find_links", "=", "built_find_links", ",", "index_urls", "=", "index_urls", ",", ")" ]
[ 25, 4 ]
[ 62, 9 ]
python
en
['en', 'error', 'th']
False
SearchScope.get_index_urls_locations
(self, project_name: str)
Returns the locations found via self.index_urls Checks the url_name on the main (first in the list) index and use this url_name to produce all locations
Returns the locations found via self.index_urls
def get_index_urls_locations(self, project_name: str) -> List[str]: """Returns the locations found via self.index_urls Checks the url_name on the main (first in the list) index and use this url_name to produce all locations """ def mkurl_pypi_url(url: str) -> str: loc = posixpath.join( url, urllib.parse.quote(canonicalize_name(project_name))) # For maximum compatibility with easy_install, ensure the path # ends in a trailing slash. Although this isn't in the spec # (and PyPI can handle it without the slash) some other index # implementations might break if they relied on easy_install's # behavior. if not loc.endswith('/'): loc = loc + '/' return loc return [mkurl_pypi_url(url) for url in self.index_urls]
[ "def", "get_index_urls_locations", "(", "self", ",", "project_name", ":", "str", ")", "->", "List", "[", "str", "]", ":", "def", "mkurl_pypi_url", "(", "url", ":", "str", ")", "->", "str", ":", "loc", "=", "posixpath", ".", "join", "(", "url", ",", "urllib", ".", "parse", ".", "quote", "(", "canonicalize_name", "(", "project_name", ")", ")", ")", "# For maximum compatibility with easy_install, ensure the path", "# ends in a trailing slash. Although this isn't in the spec", "# (and PyPI can handle it without the slash) some other index", "# implementations might break if they relied on easy_install's", "# behavior.", "if", "not", "loc", ".", "endswith", "(", "'/'", ")", ":", "loc", "=", "loc", "+", "'/'", "return", "loc", "return", "[", "mkurl_pypi_url", "(", "url", ")", "for", "url", "in", "self", ".", "index_urls", "]" ]
[ 105, 4 ]
[ 125, 63 ]
python
en
['en', 'la', 'en']
True
make_headers
( keep_alive=None, accept_encoding=None, user_agent=None, basic_auth=None, proxy_basic_auth=None, disable_cache=None, )
Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example:: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'}
Shortcuts for generating request headers.
def make_headers( keep_alive=None, accept_encoding=None, user_agent=None, basic_auth=None, proxy_basic_auth=None, disable_cache=None, ): """ Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example:: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} """ headers = {} if accept_encoding: if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): accept_encoding = ",".join(accept_encoding) else: accept_encoding = ACCEPT_ENCODING headers["accept-encoding"] = accept_encoding if user_agent: headers["user-agent"] = user_agent if keep_alive: headers["connection"] = "keep-alive" if basic_auth: headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8") if proxy_basic_auth: headers["proxy-authorization"] = "Basic " + b64encode( b(proxy_basic_auth) ).decode("utf-8") if disable_cache: headers["cache-control"] = "no-cache" return headers
[ "def", "make_headers", "(", "keep_alive", "=", "None", ",", "accept_encoding", "=", "None", ",", "user_agent", "=", "None", ",", "basic_auth", "=", "None", ",", "proxy_basic_auth", "=", "None", ",", "disable_cache", "=", "None", ",", ")", ":", "headers", "=", "{", "}", "if", "accept_encoding", ":", "if", "isinstance", "(", "accept_encoding", ",", "str", ")", ":", "pass", "elif", "isinstance", "(", "accept_encoding", ",", "list", ")", ":", "accept_encoding", "=", "\",\"", ".", "join", "(", "accept_encoding", ")", "else", ":", "accept_encoding", "=", "ACCEPT_ENCODING", "headers", "[", "\"accept-encoding\"", "]", "=", "accept_encoding", "if", "user_agent", ":", "headers", "[", "\"user-agent\"", "]", "=", "user_agent", "if", "keep_alive", ":", "headers", "[", "\"connection\"", "]", "=", "\"keep-alive\"", "if", "basic_auth", ":", "headers", "[", "\"authorization\"", "]", "=", "\"Basic \"", "+", "b64encode", "(", "b", "(", "basic_auth", ")", ")", ".", "decode", "(", "\"utf-8\"", ")", "if", "proxy_basic_auth", ":", "headers", "[", "\"proxy-authorization\"", "]", "=", "\"Basic \"", "+", "b64encode", "(", "b", "(", "proxy_basic_auth", ")", ")", ".", "decode", "(", "\"utf-8\"", ")", "if", "disable_cache", ":", "headers", "[", "\"cache-control\"", "]", "=", "\"no-cache\"", "return", "headers" ]
[ 25, 0 ]
[ 94, 18 ]
python
en
['en', 'error', 'th']
False
set_file_position
(body, pos)
If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use.
If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use.
def set_file_position(body, pos): """ If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use. """ if pos is not None: rewind_body(body, pos) elif getattr(body, "tell", None) is not None: try: pos = body.tell() except (IOError, OSError): # This differentiates from None, allowing us to catch # a failed `tell()` later when trying to rewind the body. pos = _FAILEDTELL return pos
[ "def", "set_file_position", "(", "body", ",", "pos", ")", ":", "if", "pos", "is", "not", "None", ":", "rewind_body", "(", "body", ",", "pos", ")", "elif", "getattr", "(", "body", ",", "\"tell\"", ",", "None", ")", "is", "not", "None", ":", "try", ":", "pos", "=", "body", ".", "tell", "(", ")", "except", "(", "IOError", ",", "OSError", ")", ":", "# This differentiates from None, allowing us to catch", "# a failed `tell()` later when trying to rewind the body.", "pos", "=", "_FAILEDTELL", "return", "pos" ]
[ 97, 0 ]
[ 112, 14 ]
python
en
['en', 'error', 'th']
False
rewind_body
(body, body_pos)
Attempt to rewind body to a certain position. Primarily used for request redirects and retries. :param body: File-like object that supports seek. :param int pos: Position to seek to in file.
Attempt to rewind body to a certain position. Primarily used for request redirects and retries.
def rewind_body(body, body_pos): """ Attempt to rewind body to a certain position. Primarily used for request redirects and retries. :param body: File-like object that supports seek. :param int pos: Position to seek to in file. """ body_seek = getattr(body, "seek", None) if body_seek is not None and isinstance(body_pos, integer_types): try: body_seek(body_pos) except (IOError, OSError): raise UnrewindableBodyError( "An error occurred when rewinding request body for redirect/retry." ) elif body_pos is _FAILEDTELL: raise UnrewindableBodyError( "Unable to record file position for rewinding " "request body during a redirect/retry." ) else: raise ValueError( "body_pos must be of type integer, instead it was %s." % type(body_pos) )
[ "def", "rewind_body", "(", "body", ",", "body_pos", ")", ":", "body_seek", "=", "getattr", "(", "body", ",", "\"seek\"", ",", "None", ")", "if", "body_seek", "is", "not", "None", "and", "isinstance", "(", "body_pos", ",", "integer_types", ")", ":", "try", ":", "body_seek", "(", "body_pos", ")", "except", "(", "IOError", ",", "OSError", ")", ":", "raise", "UnrewindableBodyError", "(", "\"An error occurred when rewinding request body for redirect/retry.\"", ")", "elif", "body_pos", "is", "_FAILEDTELL", ":", "raise", "UnrewindableBodyError", "(", "\"Unable to record file position for rewinding \"", "\"request body during a redirect/retry.\"", ")", "else", ":", "raise", "ValueError", "(", "\"body_pos must be of type integer, instead it was %s.\"", "%", "type", "(", "body_pos", ")", ")" ]
[ 115, 0 ]
[ 142, 9 ]
python
en
['en', 'error', 'th']
False
DictSorted.__setitem__
(self, key, value)
self[key] = value syntax
self[key] = value syntax
def __setitem__(self, key, value): '''self[key] = value syntax''' if key not in self.ordered_keys: self.ordered_keys.append(key) super(DictSorted, self).__setitem__(key, value)
[ "def", "__setitem__", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "not", "in", "self", ".", "ordered_keys", ":", "self", ".", "ordered_keys", ".", "append", "(", "key", ")", "super", "(", "DictSorted", ",", "self", ")", ".", "__setitem__", "(", "key", ",", "value", ")" ]
[ 137, 4 ]
[ 141, 55 ]
python
cy
['es', 'cy', 'en']
False
DictSorted.__iter__
(self)
for x in self syntax
for x in self syntax
def __iter__(self): '''for x in self syntax''' return self.ordered_keys.__iter__()
[ "def", "__iter__", "(", "self", ")", ":", "return", "self", ".", "ordered_keys", ".", "__iter__", "(", ")" ]
[ 157, 4 ]
[ 159, 43 ]
python
en
['en', 'en', 'en']
True
i18n_patterns
(*urls, prefix_default_language=True)
Add the language code prefix to every URL pattern within this function. This may only be used in the root URLconf, not in an included URLconf.
Add the language code prefix to every URL pattern within this function. This may only be used in the root URLconf, not in an included URLconf.
def i18n_patterns(*urls, prefix_default_language=True): """ Add the language code prefix to every URL pattern within this function. This may only be used in the root URLconf, not in an included URLconf. """ if not settings.USE_I18N: return list(urls) return [ URLResolver( LocalePrefixPattern(prefix_default_language=prefix_default_language), list(urls), ) ]
[ "def", "i18n_patterns", "(", "*", "urls", ",", "prefix_default_language", "=", "True", ")", ":", "if", "not", "settings", ".", "USE_I18N", ":", "return", "list", "(", "urls", ")", "return", "[", "URLResolver", "(", "LocalePrefixPattern", "(", "prefix_default_language", "=", "prefix_default_language", ")", ",", "list", "(", "urls", ")", ",", ")", "]" ]
[ 7, 0 ]
[ 19, 5 ]
python
en
['en', 'error', 'th']
False
is_language_prefix_patterns_used
(urlconf)
Return a tuple of two booleans: ( `True` if i18n_patterns() (LocalePrefixPattern) is used in the URLconf, `True` if the default language should be prefixed )
Return a tuple of two booleans: ( `True` if i18n_patterns() (LocalePrefixPattern) is used in the URLconf, `True` if the default language should be prefixed )
def is_language_prefix_patterns_used(urlconf): """ Return a tuple of two booleans: ( `True` if i18n_patterns() (LocalePrefixPattern) is used in the URLconf, `True` if the default language should be prefixed ) """ for url_pattern in get_resolver(urlconf).url_patterns: if isinstance(url_pattern.pattern, LocalePrefixPattern): return True, url_pattern.pattern.prefix_default_language return False, False
[ "def", "is_language_prefix_patterns_used", "(", "urlconf", ")", ":", "for", "url_pattern", "in", "get_resolver", "(", "urlconf", ")", ".", "url_patterns", ":", "if", "isinstance", "(", "url_pattern", ".", "pattern", ",", "LocalePrefixPattern", ")", ":", "return", "True", ",", "url_pattern", ".", "pattern", ".", "prefix_default_language", "return", "False", ",", "False" ]
[ 23, 0 ]
[ 33, 23 ]
python
en
['en', 'error', 'th']
False
std_call
(func)
Return the correct STDCALL function for certain OSR routines on Win32 platforms.
Return the correct STDCALL function for certain OSR routines on Win32 platforms.
def std_call(func): """ Return the correct STDCALL function for certain OSR routines on Win32 platforms. """ if os.name == 'nt': return lwingdal[func] else: return lgdal[func]
[ "def", "std_call", "(", "func", ")", ":", "if", "os", ".", "name", "==", "'nt'", ":", "return", "lwingdal", "[", "func", "]", "else", ":", "return", "lgdal", "[", "func", "]" ]
[ 63, 0 ]
[ 71, 26 ]
python
en
['en', 'error', 'th']
False
gdal_version
()
Return only the GDAL version number information.
Return only the GDAL version number information.
def gdal_version(): "Return only the GDAL version number information." return _version_info(b'RELEASE_NAME')
[ "def", "gdal_version", "(", ")", ":", "return", "_version_info", "(", "b'RELEASE_NAME'", ")" ]
[ 82, 0 ]
[ 84, 41 ]
python
en
['en', 'da', 'en']
True