Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
PublicKey.load_pkcs1_openssl_pem
(cls, keyfile)
Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL. These files can be recognised in that they start with BEGIN PUBLIC KEY rather than BEGIN RSA PUBLIC KEY. The contents of the file before the "-----BEGIN PUBLIC KEY-----" and after the "-----END PUBLIC KEY-----" lines is ignored. :param keyfile: contents of a PEM-encoded file that contains the public key, from OpenSSL. :type keyfile: bytes :return: a PublicKey object
Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL.
def load_pkcs1_openssl_pem(cls, keyfile): """Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL. These files can be recognised in that they start with BEGIN PUBLIC KEY rather than BEGIN RSA PUBLIC KEY. The contents of the file before the "-----BEGIN PUBLIC KEY-----" and after the "-----END PUBLIC KEY-----" lines is ignored. :param keyfile: contents of a PEM-encoded file that contains the public key, from OpenSSL. :type keyfile: bytes :return: a PublicKey object """ der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY') return cls.load_pkcs1_openssl_der(der)
[ "def", "load_pkcs1_openssl_pem", "(", "cls", ",", "keyfile", ")", ":", "der", "=", "rsa", ".", "pem", ".", "load_pem", "(", "keyfile", ",", "'PUBLIC KEY'", ")", "return", "cls", ".", "load_pkcs1_openssl_der", "(", "der", ")" ]
[ 305, 4 ]
[ 321, 46 ]
python
en
['en', 'fy', 'en']
True
PublicKey.load_pkcs1_openssl_der
(cls, keyfile)
Loads a PKCS#1 DER-encoded public key file from OpenSSL. :param keyfile: contents of a DER-encoded file that contains the public key, from OpenSSL. :return: a PublicKey object :rtype: bytes
Loads a PKCS#1 DER-encoded public key file from OpenSSL.
def load_pkcs1_openssl_der(cls, keyfile): """Loads a PKCS#1 DER-encoded public key file from OpenSSL. :param keyfile: contents of a DER-encoded file that contains the public key, from OpenSSL. :return: a PublicKey object :rtype: bytes """ from rsa.asn1 import OpenSSLPubKey from pyasn1.codec.der import decoder from pyasn1.type import univ (keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey()) if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'): raise TypeError("This is not a DER-encoded OpenSSL-compatible public key") return cls._load_pkcs1_der(keyinfo['key'][1:])
[ "def", "load_pkcs1_openssl_der", "(", "cls", ",", "keyfile", ")", ":", "from", "rsa", ".", "asn1", "import", "OpenSSLPubKey", "from", "pyasn1", ".", "codec", ".", "der", "import", "decoder", "from", "pyasn1", ".", "type", "import", "univ", "(", "keyinfo", ",", "_", ")", "=", "decoder", ".", "decode", "(", "keyfile", ",", "asn1Spec", "=", "OpenSSLPubKey", "(", ")", ")", "if", "keyinfo", "[", "'header'", "]", "[", "'oid'", "]", "!=", "univ", ".", "ObjectIdentifier", "(", "'1.2.840.113549.1.1.1'", ")", ":", "raise", "TypeError", "(", "\"This is not a DER-encoded OpenSSL-compatible public key\"", ")", "return", "cls", ".", "_load_pkcs1_der", "(", "keyinfo", "[", "'key'", "]", "[", "1", ":", "]", ")" ]
[ 324, 4 ]
[ 343, 54 ]
python
en
['en', 'fy', 'en']
True
PrivateKey.__getstate__
(self)
Returns the key as tuple for pickling.
Returns the key as tuple for pickling.
def __getstate__(self): """Returns the key as tuple for pickling.""" return self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef
[ "def", "__getstate__", "(", "self", ")", ":", "return", "self", ".", "n", ",", "self", ".", "e", ",", "self", ".", "d", ",", "self", ".", "p", ",", "self", ".", "q", ",", "self", ".", "exp1", ",", "self", ".", "exp2", ",", "self", ".", "coef" ]
[ 389, 4 ]
[ 391, 86 ]
python
en
['en', 'en', 'en']
True
PrivateKey.__setstate__
(self, state)
Sets the key from tuple.
Sets the key from tuple.
def __setstate__(self, state): """Sets the key from tuple.""" self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef = state
[ "def", "__setstate__", "(", "self", ",", "state", ")", ":", "self", ".", "n", ",", "self", ".", "e", ",", "self", ".", "d", ",", "self", ".", "p", ",", "self", ".", "q", ",", "self", ".", "exp1", ",", "self", ".", "exp2", ",", "self", ".", "coef", "=", "state" ]
[ 393, 4 ]
[ 395, 87 ]
python
en
['en', 'en', 'en']
True
PrivateKey.blinded_decrypt
(self, encrypted)
Decrypts the message using blinding to prevent side-channel attacks. :param encrypted: the encrypted message :type encrypted: int :returns: the decrypted message :rtype: int
Decrypts the message using blinding to prevent side-channel attacks.
def blinded_decrypt(self, encrypted): """Decrypts the message using blinding to prevent side-channel attacks. :param encrypted: the encrypted message :type encrypted: int :returns: the decrypted message :rtype: int """ blind_r = rsa.randnum.randint(self.n - 1) blinded = self.blind(encrypted, blind_r) # blind before decrypting decrypted = rsa.core.decrypt_int(blinded, self.d, self.n) return self.unblind(decrypted, blind_r)
[ "def", "blinded_decrypt", "(", "self", ",", "encrypted", ")", ":", "blind_r", "=", "rsa", ".", "randnum", ".", "randint", "(", "self", ".", "n", "-", "1", ")", "blinded", "=", "self", ".", "blind", "(", "encrypted", ",", "blind_r", ")", "# blind before decrypting", "decrypted", "=", "rsa", ".", "core", ".", "decrypt_int", "(", "blinded", ",", "self", ".", "d", ",", "self", ".", "n", ")", "return", "self", ".", "unblind", "(", "decrypted", ",", "blind_r", ")" ]
[ 419, 4 ]
[ 433, 47 ]
python
en
['en', 'en', 'en']
True
PrivateKey.blinded_encrypt
(self, message)
Encrypts the message using blinding to prevent side-channel attacks. :param message: the message to encrypt :type message: int :returns: the encrypted message :rtype: int
Encrypts the message using blinding to prevent side-channel attacks.
def blinded_encrypt(self, message): """Encrypts the message using blinding to prevent side-channel attacks. :param message: the message to encrypt :type message: int :returns: the encrypted message :rtype: int """ blind_r = rsa.randnum.randint(self.n - 1) blinded = self.blind(message, blind_r) # blind before encrypting encrypted = rsa.core.encrypt_int(blinded, self.d, self.n) return self.unblind(encrypted, blind_r)
[ "def", "blinded_encrypt", "(", "self", ",", "message", ")", ":", "blind_r", "=", "rsa", ".", "randnum", ".", "randint", "(", "self", ".", "n", "-", "1", ")", "blinded", "=", "self", ".", "blind", "(", "message", ",", "blind_r", ")", "# blind before encrypting", "encrypted", "=", "rsa", ".", "core", ".", "encrypt_int", "(", "blinded", ",", "self", ".", "d", ",", "self", ".", "n", ")", "return", "self", ".", "unblind", "(", "encrypted", ",", "blind_r", ")" ]
[ 435, 4 ]
[ 448, 47 ]
python
en
['en', 'en', 'en']
True
PrivateKey._load_pkcs1_der
(cls, keyfile)
Loads a key in PKCS#1 DER format. :param keyfile: contents of a DER-encoded file that contains the private key. :type keyfile: bytes :return: a PrivateKey object First let's construct a DER encoded key: >>> import base64 >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt' >>> der = base64.standard_b64decode(b64der) This loads the file: >>> PrivateKey._load_pkcs1_der(der) PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
Loads a key in PKCS#1 DER format.
def _load_pkcs1_der(cls, keyfile): """Loads a key in PKCS#1 DER format. :param keyfile: contents of a DER-encoded file that contains the private key. :type keyfile: bytes :return: a PrivateKey object First let's construct a DER encoded key: >>> import base64 >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt' >>> der = base64.standard_b64decode(b64der) This loads the file: >>> PrivateKey._load_pkcs1_der(der) PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) """ from pyasn1.codec.der import decoder (priv, _) = decoder.decode(keyfile) # ASN.1 contents of DER encoded private key: # # RSAPrivateKey ::= SEQUENCE { # version Version, # modulus INTEGER, -- n # publicExponent INTEGER, -- e # privateExponent INTEGER, -- d # prime1 INTEGER, -- p # prime2 INTEGER, -- q # exponent1 INTEGER, -- d mod (p-1) # exponent2 INTEGER, -- d mod (q-1) # coefficient INTEGER, -- (inverse of q) mod p # otherPrimeInfos OtherPrimeInfos OPTIONAL # } if priv[0] != 0: raise ValueError('Unable to read this file, version %s != 0' % priv[0]) as_ints = map(int, priv[1:6]) key = cls(*as_ints) exp1, exp2, coef = map(int, priv[6:9]) if (key.exp1, key.exp2, key.coef) != (exp1, exp2, coef): warnings.warn( 'You have provided a malformed keyfile. Either the exponents ' 'or the coefficient are incorrect. Using the correct values ' 'instead.', UserWarning, ) return key
[ "def", "_load_pkcs1_der", "(", "cls", ",", "keyfile", ")", ":", "from", "pyasn1", ".", "codec", ".", "der", "import", "decoder", "(", "priv", ",", "_", ")", "=", "decoder", ".", "decode", "(", "keyfile", ")", "# ASN.1 contents of DER encoded private key:", "#", "# RSAPrivateKey ::= SEQUENCE {", "# version Version,", "# modulus INTEGER, -- n", "# publicExponent INTEGER, -- e", "# privateExponent INTEGER, -- d", "# prime1 INTEGER, -- p", "# prime2 INTEGER, -- q", "# exponent1 INTEGER, -- d mod (p-1)", "# exponent2 INTEGER, -- d mod (q-1)", "# coefficient INTEGER, -- (inverse of q) mod p", "# otherPrimeInfos OtherPrimeInfos OPTIONAL", "# }", "if", "priv", "[", "0", "]", "!=", "0", ":", "raise", "ValueError", "(", "'Unable to read this file, version %s != 0'", "%", "priv", "[", "0", "]", ")", "as_ints", "=", "map", "(", "int", ",", "priv", "[", "1", ":", "6", "]", ")", "key", "=", "cls", "(", "*", "as_ints", ")", "exp1", ",", "exp2", ",", "coef", "=", "map", "(", "int", ",", "priv", "[", "6", ":", "9", "]", ")", "if", "(", "key", ".", "exp1", ",", "key", ".", "exp2", ",", "key", ".", "coef", ")", "!=", "(", "exp1", ",", "exp2", ",", "coef", ")", ":", "warnings", ".", "warn", "(", "'You have provided a malformed keyfile. Either the exponents '", "'or the coefficient are incorrect. Using the correct values '", "'instead.'", ",", "UserWarning", ",", ")", "return", "key" ]
[ 451, 4 ]
[ 506, 18 ]
python
en
['en', 'fy', 'en']
True
PrivateKey._save_pkcs1_der
(self)
Saves the private key in PKCS#1 DER format. :returns: the DER-encoded private key. :rtype: bytes
Saves the private key in PKCS#1 DER format.
def _save_pkcs1_der(self): """Saves the private key in PKCS#1 DER format. :returns: the DER-encoded private key. :rtype: bytes """ from pyasn1.type import univ, namedtype from pyasn1.codec.der import encoder class AsnPrivKey(univ.Sequence): componentType = namedtype.NamedTypes( namedtype.NamedType('version', univ.Integer()), namedtype.NamedType('modulus', univ.Integer()), namedtype.NamedType('publicExponent', univ.Integer()), namedtype.NamedType('privateExponent', univ.Integer()), namedtype.NamedType('prime1', univ.Integer()), namedtype.NamedType('prime2', univ.Integer()), namedtype.NamedType('exponent1', univ.Integer()), namedtype.NamedType('exponent2', univ.Integer()), namedtype.NamedType('coefficient', univ.Integer()), ) # Create the ASN object asn_key = AsnPrivKey() asn_key.setComponentByName('version', 0) asn_key.setComponentByName('modulus', self.n) asn_key.setComponentByName('publicExponent', self.e) asn_key.setComponentByName('privateExponent', self.d) asn_key.setComponentByName('prime1', self.p) asn_key.setComponentByName('prime2', self.q) asn_key.setComponentByName('exponent1', self.exp1) asn_key.setComponentByName('exponent2', self.exp2) asn_key.setComponentByName('coefficient', self.coef) return encoder.encode(asn_key)
[ "def", "_save_pkcs1_der", "(", "self", ")", ":", "from", "pyasn1", ".", "type", "import", "univ", ",", "namedtype", "from", "pyasn1", ".", "codec", ".", "der", "import", "encoder", "class", "AsnPrivKey", "(", "univ", ".", "Sequence", ")", ":", "componentType", "=", "namedtype", ".", "NamedTypes", "(", "namedtype", ".", "NamedType", "(", "'version'", ",", "univ", ".", "Integer", "(", ")", ")", ",", "namedtype", ".", "NamedType", "(", "'modulus'", ",", "univ", ".", "Integer", "(", ")", ")", ",", "namedtype", ".", "NamedType", "(", "'publicExponent'", ",", "univ", ".", "Integer", "(", ")", ")", ",", "namedtype", ".", "NamedType", "(", "'privateExponent'", ",", "univ", ".", "Integer", "(", ")", ")", ",", "namedtype", ".", "NamedType", "(", "'prime1'", ",", "univ", ".", "Integer", "(", ")", ")", ",", "namedtype", ".", "NamedType", "(", "'prime2'", ",", "univ", ".", "Integer", "(", ")", ")", ",", "namedtype", ".", "NamedType", "(", "'exponent1'", ",", "univ", ".", "Integer", "(", ")", ")", ",", "namedtype", ".", "NamedType", "(", "'exponent2'", ",", "univ", ".", "Integer", "(", ")", ")", ",", "namedtype", ".", "NamedType", "(", "'coefficient'", ",", "univ", ".", "Integer", "(", ")", ")", ",", ")", "# Create the ASN object", "asn_key", "=", "AsnPrivKey", "(", ")", "asn_key", ".", "setComponentByName", "(", "'version'", ",", "0", ")", "asn_key", ".", "setComponentByName", "(", "'modulus'", ",", "self", ".", "n", ")", "asn_key", ".", "setComponentByName", "(", "'publicExponent'", ",", "self", ".", "e", ")", "asn_key", ".", "setComponentByName", "(", "'privateExponent'", ",", "self", ".", "d", ")", "asn_key", ".", "setComponentByName", "(", "'prime1'", ",", "self", ".", "p", ")", "asn_key", ".", "setComponentByName", "(", "'prime2'", ",", "self", ".", "q", ")", "asn_key", ".", "setComponentByName", "(", "'exponent1'", ",", "self", ".", "exp1", ")", "asn_key", ".", "setComponentByName", "(", "'exponent2'", ",", "self", ".", "exp2", ")", "asn_key", ".", "setComponentByName", "(", "'coefficient'", ",", "self", ".", "coef", ")", "return", "encoder", ".", "encode", "(", "asn_key", ")" ]
[ 508, 4 ]
[ 543, 38 ]
python
en
['en', 'en', 'en']
True
PrivateKey._load_pkcs1_pem
(cls, keyfile)
Loads a PKCS#1 PEM-encoded private key file. The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and after the "-----END RSA PRIVATE KEY-----" lines is ignored. :param keyfile: contents of a PEM-encoded file that contains the private key. :type keyfile: bytes :return: a PrivateKey object
Loads a PKCS#1 PEM-encoded private key file.
def _load_pkcs1_pem(cls, keyfile): """Loads a PKCS#1 PEM-encoded private key file. The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and after the "-----END RSA PRIVATE KEY-----" lines is ignored. :param keyfile: contents of a PEM-encoded file that contains the private key. :type keyfile: bytes :return: a PrivateKey object """ der = rsa.pem.load_pem(keyfile, b'RSA PRIVATE KEY') return cls._load_pkcs1_der(der)
[ "def", "_load_pkcs1_pem", "(", "cls", ",", "keyfile", ")", ":", "der", "=", "rsa", ".", "pem", ".", "load_pem", "(", "keyfile", ",", "b'RSA PRIVATE KEY'", ")", "return", "cls", ".", "_load_pkcs1_der", "(", "der", ")" ]
[ 546, 4 ]
[ 559, 39 ]
python
en
['en', 'sq', 'en']
True
PrivateKey._save_pkcs1_pem
(self)
Saves a PKCS#1 PEM-encoded private key file. :return: contents of a PEM-encoded file that contains the private key. :rtype: bytes
Saves a PKCS#1 PEM-encoded private key file.
def _save_pkcs1_pem(self): """Saves a PKCS#1 PEM-encoded private key file. :return: contents of a PEM-encoded file that contains the private key. :rtype: bytes """ der = self._save_pkcs1_der() return rsa.pem.save_pem(der, b'RSA PRIVATE KEY')
[ "def", "_save_pkcs1_pem", "(", "self", ")", ":", "der", "=", "self", ".", "_save_pkcs1_der", "(", ")", "return", "rsa", ".", "pem", ".", "save_pem", "(", "der", ",", "b'RSA PRIVATE KEY'", ")" ]
[ 561, 4 ]
[ 569, 56 ]
python
en
['en', 'hi-Latn', 'en']
True
create_jwt
(project_id, private_key_file, algorithm)
Creates a JWT (https://jwt.io) to establish an MQTT connection. Args: project_id: The cloud project ID this device belongs to private_key_file: A path to a file containing either an RSA256 or ES256 private key. algorithm: The encryption algorithm to use. Either 'RS256' or 'ES256' Returns: An MQTT generated from the given project_id and private key, which expires in 20 minutes. After 20 minutes, your client will be disconnected, and a new JWT will have to be generated. Raises: ValueError: If the private_key_file does not contain a known key.
Creates a JWT (https://jwt.io) to establish an MQTT connection. Args: project_id: The cloud project ID this device belongs to private_key_file: A path to a file containing either an RSA256 or ES256 private key. algorithm: The encryption algorithm to use. Either 'RS256' or 'ES256' Returns: An MQTT generated from the given project_id and private key, which expires in 20 minutes. After 20 minutes, your client will be disconnected, and a new JWT will have to be generated. Raises: ValueError: If the private_key_file does not contain a known key.
def create_jwt(project_id, private_key_file, algorithm): """Creates a JWT (https://jwt.io) to establish an MQTT connection. Args: project_id: The cloud project ID this device belongs to private_key_file: A path to a file containing either an RSA256 or ES256 private key. algorithm: The encryption algorithm to use. Either 'RS256' or 'ES256' Returns: An MQTT generated from the given project_id and private key, which expires in 20 minutes. After 20 minutes, your client will be disconnected, and a new JWT will have to be generated. Raises: ValueError: If the private_key_file does not contain a known key. """ token = { # The time that the token was issued at 'iat': datetime.datetime.utcnow(), # The time the token expires. 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=60), # The audience field should always be set to the GCP project id. 'aud': project_id } # Read the private key file. with open(private_key_file, 'r') as f: private_key = f.read() print('Creating JWT using {} from private key file {}'.format( algorithm, private_key_file)) return jwt.encode(token, private_key, algorithm=algorithm)
[ "def", "create_jwt", "(", "project_id", ",", "private_key_file", ",", "algorithm", ")", ":", "token", "=", "{", "# The time that the token was issued at", "'iat'", ":", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ",", "# The time the token expires.", "'exp'", ":", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "+", "datetime", ".", "timedelta", "(", "minutes", "=", "60", ")", ",", "# The audience field should always be set to the GCP project id.", "'aud'", ":", "project_id", "}", "# Read the private key file.", "with", "open", "(", "private_key_file", ",", "'r'", ")", "as", "f", ":", "private_key", "=", "f", ".", "read", "(", ")", "print", "(", "'Creating JWT using {} from private key file {}'", ".", "format", "(", "algorithm", ",", "private_key_file", ")", ")", "return", "jwt", ".", "encode", "(", "token", ",", "private_key", ",", "algorithm", "=", "algorithm", ")" ]
[ 33, 0 ]
[ 64, 62 ]
python
en
['en', 'gd', 'en']
True
error_str
(rc)
Convert a Paho error to a human readable string.
Convert a Paho error to a human readable string.
def error_str(rc): """Convert a Paho error to a human readable string.""" return '{}: {}'.format(rc, mqtt.error_string(rc))
[ "def", "error_str", "(", "rc", ")", ":", "return", "'{}: {}'", ".", "format", "(", "rc", ",", "mqtt", ".", "error_string", "(", "rc", ")", ")" ]
[ 67, 0 ]
[ 69, 53 ]
python
en
['en', 'gd', 'en']
True
on_connect
(unused_client, unused_userdata, unused_flags, rc)
Callback for when a device connects.
Callback for when a device connects.
def on_connect(unused_client, unused_userdata, unused_flags, rc): """Callback for when a device connects.""" print('on_connect', error_str(rc))
[ "def", "on_connect", "(", "unused_client", ",", "unused_userdata", ",", "unused_flags", ",", "rc", ")", ":", "print", "(", "'on_connect'", ",", "error_str", "(", "rc", ")", ")" ]
[ 72, 0 ]
[ 74, 38 ]
python
en
['en', 'en', 'en']
True
on_disconnect
(unused_client, unused_userdata, rc)
Paho callback for when a device disconnects.
Paho callback for when a device disconnects.
def on_disconnect(unused_client, unused_userdata, rc): """Paho callback for when a device disconnects.""" print('on_disconnect', error_str(rc))
[ "def", "on_disconnect", "(", "unused_client", ",", "unused_userdata", ",", "rc", ")", ":", "print", "(", "'on_disconnect'", ",", "error_str", "(", "rc", ")", ")" ]
[ 77, 0 ]
[ 79, 41 ]
python
en
['en', 'en', 'en']
True
on_publish
(unused_client, unused_userdata, unused_mid)
Paho callback when a message is sent to the broker.
Paho callback when a message is sent to the broker.
def on_publish(unused_client, unused_userdata, unused_mid): """Paho callback when a message is sent to the broker.""" print('on_publish')
[ "def", "on_publish", "(", "unused_client", ",", "unused_userdata", ",", "unused_mid", ")", ":", "print", "(", "'on_publish'", ")" ]
[ 82, 0 ]
[ 84, 23 ]
python
en
['en', 'en', 'en']
True
parse_command_line_args
()
Parse command line arguments.
Parse command line arguments.
def parse_command_line_args(): """Parse command line arguments.""" parser = argparse.ArgumentParser(description=( 'Example Google Cloud IoT Core MQTT device connection code.')) parser.add_argument( '--project_id', default=os.environ.get('GOOGLE_CLOUD_PROJECT'), help='GCP cloud project name') parser.add_argument( '--registry_id', required=True, help='Cloud IoT Core registry id') parser.add_argument( '--device_id', required=True, help='Cloud IoT Core device id') parser.add_argument( '--private_key_file', required=True, help='Path to private key file.') parser.add_argument( '--algorithm', choices=('RS256', 'ES256'), required=True, help='Which encryption algorithm to use to generate the JWT.') parser.add_argument( '--cloud_region', default='us-central1', help='GCP cloud region') parser.add_argument( '--ca_certs', default='roots.pem', help=('CA root from https://pki.google.com/roots.pem')) parser.add_argument( '--num_messages', type=int, default=100, help='Number of messages to publish.') parser.add_argument( '--message_type', choices=('event', 'state'), default='event', required=True, help=('Indicates whether the message to be published is a ' 'telemetry event or a device state message.')) parser.add_argument( '--mqtt_bridge_hostname', default='mqtt.googleapis.com', help='MQTT bridge hostname.') parser.add_argument( '--mqtt_bridge_port', default=8883, type=int, help='MQTT bridge port.') return parser.parse_args()
[ "def", "parse_command_line_args", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "(", "'Example Google Cloud IoT Core MQTT device connection code.'", ")", ")", "parser", ".", "add_argument", "(", "'--project_id'", ",", "default", "=", "os", ".", "environ", ".", "get", "(", "'GOOGLE_CLOUD_PROJECT'", ")", ",", "help", "=", "'GCP cloud project name'", ")", "parser", ".", "add_argument", "(", "'--registry_id'", ",", "required", "=", "True", ",", "help", "=", "'Cloud IoT Core registry id'", ")", "parser", ".", "add_argument", "(", "'--device_id'", ",", "required", "=", "True", ",", "help", "=", "'Cloud IoT Core device id'", ")", "parser", ".", "add_argument", "(", "'--private_key_file'", ",", "required", "=", "True", ",", "help", "=", "'Path to private key file.'", ")", "parser", ".", "add_argument", "(", "'--algorithm'", ",", "choices", "=", "(", "'RS256'", ",", "'ES256'", ")", ",", "required", "=", "True", ",", "help", "=", "'Which encryption algorithm to use to generate the JWT.'", ")", "parser", ".", "add_argument", "(", "'--cloud_region'", ",", "default", "=", "'us-central1'", ",", "help", "=", "'GCP cloud region'", ")", "parser", ".", "add_argument", "(", "'--ca_certs'", ",", "default", "=", "'roots.pem'", ",", "help", "=", "(", "'CA root from https://pki.google.com/roots.pem'", ")", ")", "parser", ".", "add_argument", "(", "'--num_messages'", ",", "type", "=", "int", ",", "default", "=", "100", ",", "help", "=", "'Number of messages to publish.'", ")", "parser", ".", "add_argument", "(", "'--message_type'", ",", "choices", "=", "(", "'event'", ",", "'state'", ")", ",", "default", "=", "'event'", ",", "required", "=", "True", ",", "help", "=", "(", "'Indicates whether the message to be published is a '", "'telemetry event or a device state message.'", ")", ")", "parser", ".", "add_argument", "(", "'--mqtt_bridge_hostname'", ",", "default", "=", "'mqtt.googleapis.com'", ",", "help", "=", "'MQTT bridge hostname.'", ")", "parser", ".", "add_argument", "(", "'--mqtt_bridge_port'", ",", "default", "=", "8883", ",", "type", "=", "int", ",", "help", "=", "'MQTT bridge port.'", ")", "return", "parser", ".", "parse_args", "(", ")" ]
[ 87, 0 ]
[ 135, 30 ]
python
en
['en', 'fr', 'en']
True
KMLSitemap._build_kml_sources
(self, sources)
Go through the given sources and return a 3-tuple of the application label, module name, and field name of every GeometryField encountered in the sources. If no sources are provided, then all models.
Go through the given sources and return a 3-tuple of the application label, module name, and field name of every GeometryField encountered in the sources.
def _build_kml_sources(self, sources): """ Go through the given sources and return a 3-tuple of the application label, module name, and field name of every GeometryField encountered in the sources. If no sources are provided, then all models. """ kml_sources = [] if sources is None: sources = apps.get_models() for source in sources: if isinstance(source, models.base.ModelBase): for field in source._meta.fields: if isinstance(field, GeometryField): kml_sources.append((source._meta.app_label, source._meta.model_name, field.name)) elif isinstance(source, (list, tuple)): if len(source) != 3: raise ValueError('Must specify a 3-tuple of (app_label, module_name, field_name).') kml_sources.append(source) else: raise TypeError('KML Sources must be a model or a 3-tuple.') return kml_sources
[ "def", "_build_kml_sources", "(", "self", ",", "sources", ")", ":", "kml_sources", "=", "[", "]", "if", "sources", "is", "None", ":", "sources", "=", "apps", ".", "get_models", "(", ")", "for", "source", "in", "sources", ":", "if", "isinstance", "(", "source", ",", "models", ".", "base", ".", "ModelBase", ")", ":", "for", "field", "in", "source", ".", "_meta", ".", "fields", ":", "if", "isinstance", "(", "field", ",", "GeometryField", ")", ":", "kml_sources", ".", "append", "(", "(", "source", ".", "_meta", ".", "app_label", ",", "source", ".", "_meta", ".", "model_name", ",", "field", ".", "name", ")", ")", "elif", "isinstance", "(", "source", ",", "(", "list", ",", "tuple", ")", ")", ":", "if", "len", "(", "source", ")", "!=", "3", ":", "raise", "ValueError", "(", "'Must specify a 3-tuple of (app_label, module_name, field_name).'", ")", "kml_sources", ".", "append", "(", "source", ")", "else", ":", "raise", "TypeError", "(", "'KML Sources must be a model or a 3-tuple.'", ")", "return", "kml_sources" ]
[ 18, 4 ]
[ 42, 26 ]
python
en
['en', 'error', 'th']
False
KMLSitemap.get_urls
(self, page=1, site=None, protocol=None)
This method is overridden so the appropriate `geo_format` attribute is placed on each URL element.
This method is overridden so the appropriate `geo_format` attribute is placed on each URL element.
def get_urls(self, page=1, site=None, protocol=None): """ This method is overridden so the appropriate `geo_format` attribute is placed on each URL element. """ urls = Sitemap.get_urls(self, page=page, site=site, protocol=protocol) for url in urls: url['geo_format'] = self.geo_format return urls
[ "def", "get_urls", "(", "self", ",", "page", "=", "1", ",", "site", "=", "None", ",", "protocol", "=", "None", ")", ":", "urls", "=", "Sitemap", ".", "get_urls", "(", "self", ",", "page", "=", "page", ",", "site", "=", "site", ",", "protocol", "=", "protocol", ")", "for", "url", "in", "urls", ":", "url", "[", "'geo_format'", "]", "=", "self", ".", "geo_format", "return", "urls" ]
[ 44, 4 ]
[ 52, 19 ]
python
en
['en', 'error', 'th']
False
get_args
()
Argument parser. Returns: Dictionary of arguments.
Argument parser.
def get_args(): """Argument parser. Returns: Dictionary of arguments. """ parser = argparse.ArgumentParser() parser.add_argument( '--job-dir', type=str, required=True, help='local or GCS location for writing checkpoints and exporting ' 'models') parser.add_argument( '--num-epochs', type=int, default=20, help='number of times to go through the data, default=20') parser.add_argument( '--batch-size', default=128, type=int, help='number of records to read during each training step, default=128') parser.add_argument( '--learning-rate', default=.01, type=float, help='learning rate for gradient descent, default=.01') parser.add_argument( '--verbosity', choices=['DEBUG', 'ERROR', 'FATAL', 'INFO', 'WARN'], default='INFO') args, _ = parser.parse_known_args() return args
[ "def", "get_args", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'--job-dir'", ",", "type", "=", "str", ",", "required", "=", "True", ",", "help", "=", "'local or GCS location for writing checkpoints and exporting '", "'models'", ")", "parser", ".", "add_argument", "(", "'--num-epochs'", ",", "type", "=", "int", ",", "default", "=", "20", ",", "help", "=", "'number of times to go through the data, default=20'", ")", "parser", ".", "add_argument", "(", "'--batch-size'", ",", "default", "=", "128", ",", "type", "=", "int", ",", "help", "=", "'number of records to read during each training step, default=128'", ")", "parser", ".", "add_argument", "(", "'--learning-rate'", ",", "default", "=", ".01", ",", "type", "=", "float", ",", "help", "=", "'learning rate for gradient descent, default=.01'", ")", "parser", ".", "add_argument", "(", "'--verbosity'", ",", "choices", "=", "[", "'DEBUG'", ",", "'ERROR'", ",", "'FATAL'", ",", "'INFO'", ",", "'WARN'", "]", ",", "default", "=", "'INFO'", ")", "args", ",", "_", "=", "parser", ".", "parse_known_args", "(", ")", "return", "args" ]
[ 28, 0 ]
[ 61, 15 ]
python
da
['fr', 'da', 'pt']
False
train_and_evaluate
(args)
Trains and evaluates the Keras model. Uses the Keras model defined in model.py and trains on data loaded and preprocessed in util.py. Saves the trained model in TensorFlow SavedModel format to the path defined in part by the --job-dir argument. Args: args: dictionary of arguments - see get_args() for details
Trains and evaluates the Keras model.
def train_and_evaluate(args): """Trains and evaluates the Keras model. Uses the Keras model defined in model.py and trains on data loaded and preprocessed in util.py. Saves the trained model in TensorFlow SavedModel format to the path defined in part by the --job-dir argument. Args: args: dictionary of arguments - see get_args() for details """ train_x, train_y, eval_x, eval_y = util.load_data() # dimensions num_train_examples, input_dim = train_x.shape num_eval_examples = eval_x.shape[0] # Create the Keras Model keras_model = model.create_keras_model( input_dim=input_dim, learning_rate=args.learning_rate) # Pass a numpy array by passing DataFrame.values training_dataset = model.input_fn( features=train_x.values, labels=train_y, shuffle=True, num_epochs=args.num_epochs, batch_size=args.batch_size) # Pass a numpy array by passing DataFrame.values validation_dataset = model.input_fn( features=eval_x.values, labels=eval_y, shuffle=False, num_epochs=args.num_epochs, batch_size=num_eval_examples) # Setup Learning Rate decay. lr_decay_cb = tf.keras.callbacks.LearningRateScheduler( lambda epoch: args.learning_rate + 0.02 * (0.5 ** (1 + epoch)), verbose=True) # Setup TensorBoard callback. tensorboard_cb = tf.keras.callbacks.TensorBoard( os.path.join(args.job_dir, 'keras_tensorboard'), histogram_freq=1) # Train model keras_model.fit( training_dataset, steps_per_epoch=int(num_train_examples / args.batch_size), epochs=args.num_epochs, validation_data=validation_dataset, validation_steps=1, verbose=1, callbacks=[lr_decay_cb, tensorboard_cb]) export_path = os.path.join(args.job_dir, 'keras_export') tf.keras.models.save_model(keras_model, export_path) print('Model exported to: {}'.format(export_path))
[ "def", "train_and_evaluate", "(", "args", ")", ":", "train_x", ",", "train_y", ",", "eval_x", ",", "eval_y", "=", "util", ".", "load_data", "(", ")", "# dimensions", "num_train_examples", ",", "input_dim", "=", "train_x", ".", "shape", "num_eval_examples", "=", "eval_x", ".", "shape", "[", "0", "]", "# Create the Keras Model", "keras_model", "=", "model", ".", "create_keras_model", "(", "input_dim", "=", "input_dim", ",", "learning_rate", "=", "args", ".", "learning_rate", ")", "# Pass a numpy array by passing DataFrame.values", "training_dataset", "=", "model", ".", "input_fn", "(", "features", "=", "train_x", ".", "values", ",", "labels", "=", "train_y", ",", "shuffle", "=", "True", ",", "num_epochs", "=", "args", ".", "num_epochs", ",", "batch_size", "=", "args", ".", "batch_size", ")", "# Pass a numpy array by passing DataFrame.values", "validation_dataset", "=", "model", ".", "input_fn", "(", "features", "=", "eval_x", ".", "values", ",", "labels", "=", "eval_y", ",", "shuffle", "=", "False", ",", "num_epochs", "=", "args", ".", "num_epochs", ",", "batch_size", "=", "num_eval_examples", ")", "# Setup Learning Rate decay.", "lr_decay_cb", "=", "tf", ".", "keras", ".", "callbacks", ".", "LearningRateScheduler", "(", "lambda", "epoch", ":", "args", ".", "learning_rate", "+", "0.02", "*", "(", "0.5", "**", "(", "1", "+", "epoch", ")", ")", ",", "verbose", "=", "True", ")", "# Setup TensorBoard callback.", "tensorboard_cb", "=", "tf", ".", "keras", ".", "callbacks", ".", "TensorBoard", "(", "os", ".", "path", ".", "join", "(", "args", ".", "job_dir", ",", "'keras_tensorboard'", ")", ",", "histogram_freq", "=", "1", ")", "# Train model", "keras_model", ".", "fit", "(", "training_dataset", ",", "steps_per_epoch", "=", "int", "(", "num_train_examples", "/", "args", ".", "batch_size", ")", ",", "epochs", "=", "args", ".", "num_epochs", ",", "validation_data", "=", "validation_dataset", ",", "validation_steps", "=", "1", ",", "verbose", "=", "1", ",", "callbacks", "=", "[", "lr_decay_cb", ",", "tensorboard_cb", "]", ")", "export_path", "=", "os", ".", "path", ".", "join", "(", "args", ".", "job_dir", ",", "'keras_export'", ")", "tf", ".", "keras", ".", "models", ".", "save_model", "(", "keras_model", ",", "export_path", ")", "print", "(", "'Model exported to: {}'", ".", "format", "(", "export_path", ")", ")" ]
[ 64, 0 ]
[ 123, 54 ]
python
en
['en', 'en', 'en']
True
FileBasedCache._cull
(self)
Remove random cache entries if max_entries is reached at a ratio of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means that the entire cache will be purged.
Remove random cache entries if max_entries is reached at a ratio of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means that the entire cache will be purged.
def _cull(self): """ Remove random cache entries if max_entries is reached at a ratio of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means that the entire cache will be purged. """ filelist = self._list_cache_files() num_entries = len(filelist) if num_entries < self._max_entries: return # return early if no culling is required if self._cull_frequency == 0: return self.clear() # Clear the cache when CULL_FREQUENCY = 0 # Delete a random selection of entries filelist = random.sample(filelist, int(num_entries / self._cull_frequency)) for fname in filelist: self._delete(fname)
[ "def", "_cull", "(", "self", ")", ":", "filelist", "=", "self", ".", "_list_cache_files", "(", ")", "num_entries", "=", "len", "(", "filelist", ")", "if", "num_entries", "<", "self", ".", "_max_entries", ":", "return", "# return early if no culling is required", "if", "self", ".", "_cull_frequency", "==", "0", ":", "return", "self", ".", "clear", "(", ")", "# Clear the cache when CULL_FREQUENCY = 0", "# Delete a random selection of entries", "filelist", "=", "random", ".", "sample", "(", "filelist", ",", "int", "(", "num_entries", "/", "self", ".", "_cull_frequency", ")", ")", "for", "fname", "in", "filelist", ":", "self", ".", "_delete", "(", "fname", ")" ]
[ 97, 4 ]
[ 113, 31 ]
python
en
['en', 'error', 'th']
False
FileBasedCache._key_to_file
(self, key, version=None)
Convert a key into a cache file path. Basically this is the root cache path joined with the md5sum of the key and a suffix.
Convert a key into a cache file path. Basically this is the root cache path joined with the md5sum of the key and a suffix.
def _key_to_file(self, key, version=None): """ Convert a key into a cache file path. Basically this is the root cache path joined with the md5sum of the key and a suffix. """ key = self.make_key(key, version=version) self.validate_key(key) return os.path.join(self._dir, ''.join( [hashlib.md5(key.encode()).hexdigest(), self.cache_suffix]))
[ "def", "_key_to_file", "(", "self", ",", "key", ",", "version", "=", "None", ")", ":", "key", "=", "self", ".", "make_key", "(", "key", ",", "version", "=", "version", ")", "self", ".", "validate_key", "(", "key", ")", "return", "os", ".", "path", ".", "join", "(", "self", ".", "_dir", ",", "''", ".", "join", "(", "[", "hashlib", ".", "md5", "(", "key", ".", "encode", "(", ")", ")", ".", "hexdigest", "(", ")", ",", "self", ".", "cache_suffix", "]", ")", ")" ]
[ 124, 4 ]
[ 132, 72 ]
python
en
['en', 'error', 'th']
False
FileBasedCache.clear
(self)
Remove all the cache files.
Remove all the cache files.
def clear(self): """ Remove all the cache files. """ for fname in self._list_cache_files(): self._delete(fname)
[ "def", "clear", "(", "self", ")", ":", "for", "fname", "in", "self", ".", "_list_cache_files", "(", ")", ":", "self", ".", "_delete", "(", "fname", ")" ]
[ 134, 4 ]
[ 139, 31 ]
python
en
['en', 'error', 'th']
False
FileBasedCache._is_expired
(self, f)
Take an open cache file `f` and delete it if it's expired.
Take an open cache file `f` and delete it if it's expired.
def _is_expired(self, f): """ Take an open cache file `f` and delete it if it's expired. """ try: exp = pickle.load(f) except EOFError: exp = 0 # An empty file is considered expired. if exp is not None and exp < time.time(): f.close() # On Windows a file has to be closed before deleting self._delete(f.name) return True return False
[ "def", "_is_expired", "(", "self", ",", "f", ")", ":", "try", ":", "exp", "=", "pickle", ".", "load", "(", "f", ")", "except", "EOFError", ":", "exp", "=", "0", "# An empty file is considered expired.", "if", "exp", "is", "not", "None", "and", "exp", "<", "time", ".", "time", "(", ")", ":", "f", ".", "close", "(", ")", "# On Windows a file has to be closed before deleting", "self", ".", "_delete", "(", "f", ".", "name", ")", "return", "True", "return", "False" ]
[ 141, 4 ]
[ 153, 20 ]
python
en
['en', 'error', 'th']
False
FileBasedCache._list_cache_files
(self)
Get a list of paths to all the cache files. These are all the files in the root cache dir that end on the cache_suffix.
Get a list of paths to all the cache files. These are all the files in the root cache dir that end on the cache_suffix.
def _list_cache_files(self): """ Get a list of paths to all the cache files. These are all the files in the root cache dir that end on the cache_suffix. """ return [ os.path.join(self._dir, fname) for fname in glob.glob1(self._dir, '*%s' % self.cache_suffix) ]
[ "def", "_list_cache_files", "(", "self", ")", ":", "return", "[", "os", ".", "path", ".", "join", "(", "self", ".", "_dir", ",", "fname", ")", "for", "fname", "in", "glob", ".", "glob1", "(", "self", ".", "_dir", ",", "'*%s'", "%", "self", ".", "cache_suffix", ")", "]" ]
[ 155, 4 ]
[ 163, 9 ]
python
en
['en', 'error', 'th']
False
copyfileobj
(fsrc, fdst, length=16*1024)
copy data from file-like object fsrc to file-like object fdst
copy data from file-like object fsrc to file-like object fdst
def copyfileobj(fsrc, fdst, length=16*1024): """copy data from file-like object fsrc to file-like object fdst""" while 1: buf = fsrc.read(length) if not buf: break fdst.write(buf)
[ "def", "copyfileobj", "(", "fsrc", ",", "fdst", ",", "length", "=", "16", "*", "1024", ")", ":", "while", "1", ":", "buf", "=", "fsrc", ".", "read", "(", "length", ")", "if", "not", "buf", ":", "break", "fdst", ".", "write", "(", "buf", ")" ]
[ 69, 0 ]
[ 75, 23 ]
python
en
['en', 'en', 'en']
True
copyfile
(src, dst)
Copy data from src to dst
Copy data from src to dst
def copyfile(src, dst): """Copy data from src to dst""" if _samefile(src, dst): raise Error("`%s` and `%s` are the same file" % (src, dst)) for fn in [src, dst]: try: st = os.stat(fn) except OSError: # File most likely does not exist pass else: # XXX What about other special files? (sockets, devices...) if stat.S_ISFIFO(st.st_mode): raise SpecialFileError("`%s` is a named pipe" % fn) with open(src, 'rb') as fsrc: with open(dst, 'wb') as fdst: copyfileobj(fsrc, fdst)
[ "def", "copyfile", "(", "src", ",", "dst", ")", ":", "if", "_samefile", "(", "src", ",", "dst", ")", ":", "raise", "Error", "(", "\"`%s` and `%s` are the same file\"", "%", "(", "src", ",", "dst", ")", ")", "for", "fn", "in", "[", "src", ",", "dst", "]", ":", "try", ":", "st", "=", "os", ".", "stat", "(", "fn", ")", "except", "OSError", ":", "# File most likely does not exist", "pass", "else", ":", "# XXX What about other special files? (sockets, devices...)", "if", "stat", ".", "S_ISFIFO", "(", "st", ".", "st_mode", ")", ":", "raise", "SpecialFileError", "(", "\"`%s` is a named pipe\"", "%", "fn", ")", "with", "open", "(", "src", ",", "'rb'", ")", "as", "fsrc", ":", "with", "open", "(", "dst", ",", "'wb'", ")", "as", "fdst", ":", "copyfileobj", "(", "fsrc", ",", "fdst", ")" ]
[ 89, 0 ]
[ 107, 35 ]
python
en
['en', 'en', 'en']
True
copymode
(src, dst)
Copy mode bits from src to dst
Copy mode bits from src to dst
def copymode(src, dst): """Copy mode bits from src to dst""" if hasattr(os, 'chmod'): st = os.stat(src) mode = stat.S_IMODE(st.st_mode) os.chmod(dst, mode)
[ "def", "copymode", "(", "src", ",", "dst", ")", ":", "if", "hasattr", "(", "os", ",", "'chmod'", ")", ":", "st", "=", "os", ".", "stat", "(", "src", ")", "mode", "=", "stat", ".", "S_IMODE", "(", "st", ".", "st_mode", ")", "os", ".", "chmod", "(", "dst", ",", "mode", ")" ]
[ 109, 0 ]
[ 114, 27 ]
python
en
['en', 'en', 'en']
True
copystat
(src, dst)
Copy all stat info (mode bits, atime, mtime, flags) from src to dst
Copy all stat info (mode bits, atime, mtime, flags) from src to dst
def copystat(src, dst): """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" st = os.stat(src) mode = stat.S_IMODE(st.st_mode) if hasattr(os, 'utime'): os.utime(dst, (st.st_atime, st.st_mtime)) if hasattr(os, 'chmod'): os.chmod(dst, mode) if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): try: os.chflags(dst, st.st_flags) except OSError as why: if (not hasattr(errno, 'EOPNOTSUPP') or why.errno != errno.EOPNOTSUPP): raise
[ "def", "copystat", "(", "src", ",", "dst", ")", ":", "st", "=", "os", ".", "stat", "(", "src", ")", "mode", "=", "stat", ".", "S_IMODE", "(", "st", ".", "st_mode", ")", "if", "hasattr", "(", "os", ",", "'utime'", ")", ":", "os", ".", "utime", "(", "dst", ",", "(", "st", ".", "st_atime", ",", "st", ".", "st_mtime", ")", ")", "if", "hasattr", "(", "os", ",", "'chmod'", ")", ":", "os", ".", "chmod", "(", "dst", ",", "mode", ")", "if", "hasattr", "(", "os", ",", "'chflags'", ")", "and", "hasattr", "(", "st", ",", "'st_flags'", ")", ":", "try", ":", "os", ".", "chflags", "(", "dst", ",", "st", ".", "st_flags", ")", "except", "OSError", "as", "why", ":", "if", "(", "not", "hasattr", "(", "errno", ",", "'EOPNOTSUPP'", ")", "or", "why", ".", "errno", "!=", "errno", ".", "EOPNOTSUPP", ")", ":", "raise" ]
[ 116, 0 ]
[ 130, 21 ]
python
en
['en', 'en', 'en']
True
copy
(src, dst)
Copy data and mode bits ("cp src dst"). The destination may be a directory.
Copy data and mode bits ("cp src dst").
def copy(src, dst): """Copy data and mode bits ("cp src dst"). The destination may be a directory. """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) copyfile(src, dst) copymode(src, dst)
[ "def", "copy", "(", "src", ",", "dst", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "dst", ")", ":", "dst", "=", "os", ".", "path", ".", "join", "(", "dst", ",", "os", ".", "path", ".", "basename", "(", "src", ")", ")", "copyfile", "(", "src", ",", "dst", ")", "copymode", "(", "src", ",", "dst", ")" ]
[ 132, 0 ]
[ 141, 22 ]
python
en
['en', 'en', 'en']
True
copy2
(src, dst)
Copy data and all stat info ("cp -p src dst"). The destination may be a directory.
Copy data and all stat info ("cp -p src dst").
def copy2(src, dst): """Copy data and all stat info ("cp -p src dst"). The destination may be a directory. """ if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) copyfile(src, dst) copystat(src, dst)
[ "def", "copy2", "(", "src", ",", "dst", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "dst", ")", ":", "dst", "=", "os", ".", "path", ".", "join", "(", "dst", ",", "os", ".", "path", ".", "basename", "(", "src", ")", ")", "copyfile", "(", "src", ",", "dst", ")", "copystat", "(", "src", ",", "dst", ")" ]
[ 143, 0 ]
[ 152, 22 ]
python
en
['en', 'en', 'en']
True
ignore_patterns
(*patterns)
Function that can be used as copytree() ignore parameter. Patterns is a sequence of glob-style patterns that are used to exclude files
Function that can be used as copytree() ignore parameter.
def ignore_patterns(*patterns): """Function that can be used as copytree() ignore parameter. Patterns is a sequence of glob-style patterns that are used to exclude files""" def _ignore_patterns(path, names): ignored_names = [] for pattern in patterns: ignored_names.extend(fnmatch.filter(names, pattern)) return set(ignored_names) return _ignore_patterns
[ "def", "ignore_patterns", "(", "*", "patterns", ")", ":", "def", "_ignore_patterns", "(", "path", ",", "names", ")", ":", "ignored_names", "=", "[", "]", "for", "pattern", "in", "patterns", ":", "ignored_names", ".", "extend", "(", "fnmatch", ".", "filter", "(", "names", ",", "pattern", ")", ")", "return", "set", "(", "ignored_names", ")", "return", "_ignore_patterns" ]
[ 154, 0 ]
[ 164, 27 ]
python
en
['en', 'en', 'en']
True
copytree
(src, dst, symlinks=False, ignore=None, copy_function=copy2, ignore_dangling_symlinks=False)
Recursively copy a directory tree. The destination directory must not already exist. If exception(s) occur, an Error is raised with a list of reasons. If the optional symlinks flag is true, symbolic links in the source tree result in symbolic links in the destination tree; if it is false, the contents of the files pointed to by symbolic links are copied. If the file pointed by the symlink doesn't exist, an exception will be added in the list of errors raised in an Error exception at the end of the copy process. You can set the optional ignore_dangling_symlinks flag to true if you want to silence this exception. Notice that this has no effect on platforms that don't support os.symlink. The optional ignore argument is a callable. If given, it is called with the `src` parameter, which is the directory being visited by copytree(), and `names` which is the list of `src` contents, as returned by os.listdir(): callable(src, names) -> ignored_names Since copytree() is called recursively, the callable will be called once for each directory that is copied. It returns a list of names relative to the `src` directory that should not be copied. The optional copy_function argument is a callable that will be used to copy each file. It will be called with the source path and the destination path as arguments. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used.
Recursively copy a directory tree.
def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, ignore_dangling_symlinks=False): """Recursively copy a directory tree. The destination directory must not already exist. If exception(s) occur, an Error is raised with a list of reasons. If the optional symlinks flag is true, symbolic links in the source tree result in symbolic links in the destination tree; if it is false, the contents of the files pointed to by symbolic links are copied. If the file pointed by the symlink doesn't exist, an exception will be added in the list of errors raised in an Error exception at the end of the copy process. You can set the optional ignore_dangling_symlinks flag to true if you want to silence this exception. Notice that this has no effect on platforms that don't support os.symlink. The optional ignore argument is a callable. If given, it is called with the `src` parameter, which is the directory being visited by copytree(), and `names` which is the list of `src` contents, as returned by os.listdir(): callable(src, names) -> ignored_names Since copytree() is called recursively, the callable will be called once for each directory that is copied. It returns a list of names relative to the `src` directory that should not be copied. The optional copy_function argument is a callable that will be used to copy each file. It will be called with the source path and the destination path as arguments. By default, copy2() is used, but any function that supports the same signature (like copy()) can be used. """ names = os.listdir(src) if ignore is not None: ignored_names = ignore(src, names) else: ignored_names = set() os.makedirs(dst) errors = [] for name in names: if name in ignored_names: continue srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if os.path.islink(srcname): linkto = os.readlink(srcname) if symlinks: os.symlink(linkto, dstname) else: # ignore dangling symlink if the flag is on if not os.path.exists(linkto) and ignore_dangling_symlinks: continue # otherwise let the copy occurs. copy2 will raise an error copy_function(srcname, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks, ignore, copy_function) else: # Will raise a SpecialFileError for unsupported file types copy_function(srcname, dstname) # catch the Error from the recursive copytree so that we can # continue with other files except Error as err: errors.extend(err.args[0]) except EnvironmentError as why: errors.append((srcname, dstname, str(why))) try: copystat(src, dst) except OSError as why: if WindowsError is not None and isinstance(why, WindowsError): # Copying file access times may fail on Windows pass else: errors.extend((src, dst, str(why))) if errors: raise Error(errors)
[ "def", "copytree", "(", "src", ",", "dst", ",", "symlinks", "=", "False", ",", "ignore", "=", "None", ",", "copy_function", "=", "copy2", ",", "ignore_dangling_symlinks", "=", "False", ")", ":", "names", "=", "os", ".", "listdir", "(", "src", ")", "if", "ignore", "is", "not", "None", ":", "ignored_names", "=", "ignore", "(", "src", ",", "names", ")", "else", ":", "ignored_names", "=", "set", "(", ")", "os", ".", "makedirs", "(", "dst", ")", "errors", "=", "[", "]", "for", "name", "in", "names", ":", "if", "name", "in", "ignored_names", ":", "continue", "srcname", "=", "os", ".", "path", ".", "join", "(", "src", ",", "name", ")", "dstname", "=", "os", ".", "path", ".", "join", "(", "dst", ",", "name", ")", "try", ":", "if", "os", ".", "path", ".", "islink", "(", "srcname", ")", ":", "linkto", "=", "os", ".", "readlink", "(", "srcname", ")", "if", "symlinks", ":", "os", ".", "symlink", "(", "linkto", ",", "dstname", ")", "else", ":", "# ignore dangling symlink if the flag is on", "if", "not", "os", ".", "path", ".", "exists", "(", "linkto", ")", "and", "ignore_dangling_symlinks", ":", "continue", "# otherwise let the copy occurs. copy2 will raise an error", "copy_function", "(", "srcname", ",", "dstname", ")", "elif", "os", ".", "path", ".", "isdir", "(", "srcname", ")", ":", "copytree", "(", "srcname", ",", "dstname", ",", "symlinks", ",", "ignore", ",", "copy_function", ")", "else", ":", "# Will raise a SpecialFileError for unsupported file types", "copy_function", "(", "srcname", ",", "dstname", ")", "# catch the Error from the recursive copytree so that we can", "# continue with other files", "except", "Error", "as", "err", ":", "errors", ".", "extend", "(", "err", ".", "args", "[", "0", "]", ")", "except", "EnvironmentError", "as", "why", ":", "errors", ".", "append", "(", "(", "srcname", ",", "dstname", ",", "str", "(", "why", ")", ")", ")", "try", ":", "copystat", "(", "src", ",", "dst", ")", "except", "OSError", "as", "why", ":", "if", "WindowsError", "is", "not", "None", "and", "isinstance", "(", "why", ",", "WindowsError", ")", ":", "# Copying file access times may fail on Windows", "pass", "else", ":", "errors", ".", "extend", "(", "(", "src", ",", "dst", ",", "str", "(", "why", ")", ")", ")", "if", "errors", ":", "raise", "Error", "(", "errors", ")" ]
[ 166, 0 ]
[ 246, 27 ]
python
en
['en', 'en', 'en']
True
rmtree
(path, ignore_errors=False, onerror=None)
Recursively delete a directory tree. If ignore_errors is set, errors are ignored; otherwise, if onerror is set, it is called to handle the error with arguments (func, path, exc_info) where func is os.listdir, os.remove, or os.rmdir; path is the argument to that function that caused it to fail; and exc_info is a tuple returned by sys.exc_info(). If ignore_errors is false and onerror is None, an exception is raised.
Recursively delete a directory tree.
def rmtree(path, ignore_errors=False, onerror=None): """Recursively delete a directory tree. If ignore_errors is set, errors are ignored; otherwise, if onerror is set, it is called to handle the error with arguments (func, path, exc_info) where func is os.listdir, os.remove, or os.rmdir; path is the argument to that function that caused it to fail; and exc_info is a tuple returned by sys.exc_info(). If ignore_errors is false and onerror is None, an exception is raised. """ if ignore_errors: def onerror(*args): pass elif onerror is None: def onerror(*args): raise try: if os.path.islink(path): # symlinks to directories are forbidden, see bug #1669 raise OSError("Cannot call rmtree on a symbolic link") except OSError: onerror(os.path.islink, path, sys.exc_info()) # can't continue even if onerror hook returns return names = [] try: names = os.listdir(path) except os.error: onerror(os.listdir, path, sys.exc_info()) for name in names: fullname = os.path.join(path, name) try: mode = os.lstat(fullname).st_mode except os.error: mode = 0 if stat.S_ISDIR(mode): rmtree(fullname, ignore_errors, onerror) else: try: os.remove(fullname) except os.error: onerror(os.remove, fullname, sys.exc_info()) try: os.rmdir(path) except os.error: onerror(os.rmdir, path, sys.exc_info())
[ "def", "rmtree", "(", "path", ",", "ignore_errors", "=", "False", ",", "onerror", "=", "None", ")", ":", "if", "ignore_errors", ":", "def", "onerror", "(", "*", "args", ")", ":", "pass", "elif", "onerror", "is", "None", ":", "def", "onerror", "(", "*", "args", ")", ":", "raise", "try", ":", "if", "os", ".", "path", ".", "islink", "(", "path", ")", ":", "# symlinks to directories are forbidden, see bug #1669", "raise", "OSError", "(", "\"Cannot call rmtree on a symbolic link\"", ")", "except", "OSError", ":", "onerror", "(", "os", ".", "path", ".", "islink", ",", "path", ",", "sys", ".", "exc_info", "(", ")", ")", "# can't continue even if onerror hook returns", "return", "names", "=", "[", "]", "try", ":", "names", "=", "os", ".", "listdir", "(", "path", ")", "except", "os", ".", "error", ":", "onerror", "(", "os", ".", "listdir", ",", "path", ",", "sys", ".", "exc_info", "(", ")", ")", "for", "name", "in", "names", ":", "fullname", "=", "os", ".", "path", ".", "join", "(", "path", ",", "name", ")", "try", ":", "mode", "=", "os", ".", "lstat", "(", "fullname", ")", ".", "st_mode", "except", "os", ".", "error", ":", "mode", "=", "0", "if", "stat", ".", "S_ISDIR", "(", "mode", ")", ":", "rmtree", "(", "fullname", ",", "ignore_errors", ",", "onerror", ")", "else", ":", "try", ":", "os", ".", "remove", "(", "fullname", ")", "except", "os", ".", "error", ":", "onerror", "(", "os", ".", "remove", ",", "fullname", ",", "sys", ".", "exc_info", "(", ")", ")", "try", ":", "os", ".", "rmdir", "(", "path", ")", "except", "os", ".", "error", ":", "onerror", "(", "os", ".", "rmdir", ",", "path", ",", "sys", ".", "exc_info", "(", ")", ")" ]
[ 248, 0 ]
[ 294, 47 ]
python
en
['en', 'en', 'en']
True
move
(src, dst)
Recursively move a file or directory to another location. This is similar to the Unix "mv" command. If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist. If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics. If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. A lot more could be done here... A look at a mv.c shows a lot of the issues this implementation glosses over.
Recursively move a file or directory to another location. This is similar to the Unix "mv" command.
def move(src, dst): """Recursively move a file or directory to another location. This is similar to the Unix "mv" command. If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already exist. If the destination already exists but is not a directory, it may be overwritten depending on os.rename() semantics. If the destination is on our current filesystem, then rename() is used. Otherwise, src is copied to the destination and then removed. A lot more could be done here... A look at a mv.c shows a lot of the issues this implementation glosses over. """ real_dst = dst if os.path.isdir(dst): if _samefile(src, dst): # We might be on a case insensitive filesystem, # perform the rename anyway. os.rename(src, dst) return real_dst = os.path.join(dst, _basename(src)) if os.path.exists(real_dst): raise Error("Destination path '%s' already exists" % real_dst) try: os.rename(src, real_dst) except OSError: if os.path.isdir(src): if _destinsrc(src, dst): raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) copytree(src, real_dst, symlinks=True) rmtree(src) else: copy2(src, real_dst) os.unlink(src)
[ "def", "move", "(", "src", ",", "dst", ")", ":", "real_dst", "=", "dst", "if", "os", ".", "path", ".", "isdir", "(", "dst", ")", ":", "if", "_samefile", "(", "src", ",", "dst", ")", ":", "# We might be on a case insensitive filesystem,", "# perform the rename anyway.", "os", ".", "rename", "(", "src", ",", "dst", ")", "return", "real_dst", "=", "os", ".", "path", ".", "join", "(", "dst", ",", "_basename", "(", "src", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "real_dst", ")", ":", "raise", "Error", "(", "\"Destination path '%s' already exists\"", "%", "real_dst", ")", "try", ":", "os", ".", "rename", "(", "src", ",", "real_dst", ")", "except", "OSError", ":", "if", "os", ".", "path", ".", "isdir", "(", "src", ")", ":", "if", "_destinsrc", "(", "src", ",", "dst", ")", ":", "raise", "Error", "(", "\"Cannot move a directory '%s' into itself '%s'.\"", "%", "(", "src", ",", "dst", ")", ")", "copytree", "(", "src", ",", "real_dst", ",", "symlinks", "=", "True", ")", "rmtree", "(", "src", ")", "else", ":", "copy2", "(", "src", ",", "real_dst", ")", "os", ".", "unlink", "(", "src", ")" ]
[ 302, 0 ]
[ 340, 26 ]
python
en
['en', 'en', 'en']
True
_get_gid
(name)
Returns a gid, given a group name.
Returns a gid, given a group name.
def _get_gid(name): """Returns a gid, given a group name.""" if getgrnam is None or name is None: return None try: result = getgrnam(name) except KeyError: result = None if result is not None: return result[2] return None
[ "def", "_get_gid", "(", "name", ")", ":", "if", "getgrnam", "is", "None", "or", "name", "is", "None", ":", "return", "None", "try", ":", "result", "=", "getgrnam", "(", "name", ")", "except", "KeyError", ":", "result", "=", "None", "if", "result", "is", "not", "None", ":", "return", "result", "[", "2", "]", "return", "None" ]
[ 351, 0 ]
[ 361, 15 ]
python
en
['en', 'en', 'en']
True
_get_uid
(name)
Returns an uid, given a user name.
Returns an uid, given a user name.
def _get_uid(name): """Returns an uid, given a user name.""" if getpwnam is None or name is None: return None try: result = getpwnam(name) except KeyError: result = None if result is not None: return result[2] return None
[ "def", "_get_uid", "(", "name", ")", ":", "if", "getpwnam", "is", "None", "or", "name", "is", "None", ":", "return", "None", "try", ":", "result", "=", "getpwnam", "(", "name", ")", "except", "KeyError", ":", "result", "=", "None", "if", "result", "is", "not", "None", ":", "return", "result", "[", "2", "]", "return", "None" ]
[ 363, 0 ]
[ 373, 15 ]
python
en
['en', 'en', 'en']
True
_make_tarball
(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None, logger=None)
Create a (possibly compressed) tar file from all the files under 'base_dir'. 'compress' must be "gzip" (the default), "bzip2", or None. 'owner' and 'group' can be used to define an owner and a group for the archive that is being built. If not provided, the current owner and group will be used. The output tar file will be named 'base_name' + ".tar", possibly plus the appropriate compression extension (".gz", or ".bz2"). Returns the output filename.
Create a (possibly compressed) tar file from all the files under 'base_dir'.
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None, logger=None): """Create a (possibly compressed) tar file from all the files under 'base_dir'. 'compress' must be "gzip" (the default), "bzip2", or None. 'owner' and 'group' can be used to define an owner and a group for the archive that is being built. If not provided, the current owner and group will be used. The output tar file will be named 'base_name' + ".tar", possibly plus the appropriate compression extension (".gz", or ".bz2"). Returns the output filename. """ tar_compression = {'gzip': 'gz', None: ''} compress_ext = {'gzip': '.gz'} if _BZ2_SUPPORTED: tar_compression['bzip2'] = 'bz2' compress_ext['bzip2'] = '.bz2' # flags for compression program, each element of list will be an argument if compress is not None and compress not in compress_ext: raise ValueError("bad value for 'compress', or compression format not " "supported : {0}".format(compress)) archive_name = base_name + '.tar' + compress_ext.get(compress, '') archive_dir = os.path.dirname(archive_name) if not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: os.makedirs(archive_dir) # creating the tarball if logger is not None: logger.info('Creating tar archive') uid = _get_uid(owner) gid = _get_gid(group) def _set_uid_gid(tarinfo): if gid is not None: tarinfo.gid = gid tarinfo.gname = group if uid is not None: tarinfo.uid = uid tarinfo.uname = owner return tarinfo if not dry_run: tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) try: tar.add(base_dir, filter=_set_uid_gid) finally: tar.close() return archive_name
[ "def", "_make_tarball", "(", "base_name", ",", "base_dir", ",", "compress", "=", "\"gzip\"", ",", "verbose", "=", "0", ",", "dry_run", "=", "0", ",", "owner", "=", "None", ",", "group", "=", "None", ",", "logger", "=", "None", ")", ":", "tar_compression", "=", "{", "'gzip'", ":", "'gz'", ",", "None", ":", "''", "}", "compress_ext", "=", "{", "'gzip'", ":", "'.gz'", "}", "if", "_BZ2_SUPPORTED", ":", "tar_compression", "[", "'bzip2'", "]", "=", "'bz2'", "compress_ext", "[", "'bzip2'", "]", "=", "'.bz2'", "# flags for compression program, each element of list will be an argument", "if", "compress", "is", "not", "None", "and", "compress", "not", "in", "compress_ext", ":", "raise", "ValueError", "(", "\"bad value for 'compress', or compression format not \"", "\"supported : {0}\"", ".", "format", "(", "compress", ")", ")", "archive_name", "=", "base_name", "+", "'.tar'", "+", "compress_ext", ".", "get", "(", "compress", ",", "''", ")", "archive_dir", "=", "os", ".", "path", ".", "dirname", "(", "archive_name", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "archive_dir", ")", ":", "if", "logger", "is", "not", "None", ":", "logger", ".", "info", "(", "\"creating %s\"", ",", "archive_dir", ")", "if", "not", "dry_run", ":", "os", ".", "makedirs", "(", "archive_dir", ")", "# creating the tarball", "if", "logger", "is", "not", "None", ":", "logger", ".", "info", "(", "'Creating tar archive'", ")", "uid", "=", "_get_uid", "(", "owner", ")", "gid", "=", "_get_gid", "(", "group", ")", "def", "_set_uid_gid", "(", "tarinfo", ")", ":", "if", "gid", "is", "not", "None", ":", "tarinfo", ".", "gid", "=", "gid", "tarinfo", ".", "gname", "=", "group", "if", "uid", "is", "not", "None", ":", "tarinfo", ".", "uid", "=", "uid", "tarinfo", ".", "uname", "=", "owner", "return", "tarinfo", "if", "not", "dry_run", ":", "tar", "=", "tarfile", ".", "open", "(", "archive_name", ",", "'w|%s'", "%", "tar_compression", "[", "compress", "]", ")", "try", ":", "tar", ".", "add", "(", "base_dir", ",", "filter", "=", "_set_uid_gid", ")", "finally", ":", "tar", ".", "close", "(", ")", "return", "archive_name" ]
[ 375, 0 ]
[ 435, 23 ]
python
en
['en', 'en', 'en']
True
_make_zipfile
(base_name, base_dir, verbose=0, dry_run=0, logger=None)
Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_name' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises ExecError. Returns the name of the output zip file.
Create a zip file from all the files under 'base_dir'.
def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): """Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_name' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises ExecError. Returns the name of the output zip file. """ zip_filename = base_name + ".zip" archive_dir = os.path.dirname(base_name) if not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: os.makedirs(archive_dir) # If zipfile module is not available, try spawning an external 'zip' # command. try: import zipfile except ImportError: zipfile = None if zipfile is None: _call_external_zip(base_dir, zip_filename, verbose, dry_run) else: if logger is not None: logger.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) if not dry_run: zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED) for dirpath, dirnames, filenames in os.walk(base_dir): for name in filenames: path = os.path.normpath(os.path.join(dirpath, name)) if os.path.isfile(path): zip.write(path, path) if logger is not None: logger.info("adding '%s'", path) zip.close() return zip_filename
[ "def", "_make_zipfile", "(", "base_name", ",", "base_dir", ",", "verbose", "=", "0", ",", "dry_run", "=", "0", ",", "logger", "=", "None", ")", ":", "zip_filename", "=", "base_name", "+", "\".zip\"", "archive_dir", "=", "os", ".", "path", ".", "dirname", "(", "base_name", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "archive_dir", ")", ":", "if", "logger", "is", "not", "None", ":", "logger", ".", "info", "(", "\"creating %s\"", ",", "archive_dir", ")", "if", "not", "dry_run", ":", "os", ".", "makedirs", "(", "archive_dir", ")", "# If zipfile module is not available, try spawning an external 'zip'", "# command.", "try", ":", "import", "zipfile", "except", "ImportError", ":", "zipfile", "=", "None", "if", "zipfile", "is", "None", ":", "_call_external_zip", "(", "base_dir", ",", "zip_filename", ",", "verbose", ",", "dry_run", ")", "else", ":", "if", "logger", "is", "not", "None", ":", "logger", ".", "info", "(", "\"creating '%s' and adding '%s' to it\"", ",", "zip_filename", ",", "base_dir", ")", "if", "not", "dry_run", ":", "zip", "=", "zipfile", ".", "ZipFile", "(", "zip_filename", ",", "\"w\"", ",", "compression", "=", "zipfile", ".", "ZIP_DEFLATED", ")", "for", "dirpath", ",", "dirnames", ",", "filenames", "in", "os", ".", "walk", "(", "base_dir", ")", ":", "for", "name", "in", "filenames", ":", "path", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "dirpath", ",", "name", ")", ")", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "zip", ".", "write", "(", "path", ",", "path", ")", "if", "logger", "is", "not", "None", ":", "logger", ".", "info", "(", "\"adding '%s'\"", ",", "path", ")", "zip", ".", "close", "(", ")", "return", "zip_filename" ]
[ 454, 0 ]
[ 499, 23 ]
python
en
['en', 'en', 'en']
True
get_archive_formats
()
Returns a list of supported formats for archiving and unarchiving. Each element of the returned sequence is a tuple (name, description)
Returns a list of supported formats for archiving and unarchiving.
def get_archive_formats(): """Returns a list of supported formats for archiving and unarchiving. Each element of the returned sequence is a tuple (name, description) """ formats = [(name, registry[2]) for name, registry in _ARCHIVE_FORMATS.items()] formats.sort() return formats
[ "def", "get_archive_formats", "(", ")", ":", "formats", "=", "[", "(", "name", ",", "registry", "[", "2", "]", ")", "for", "name", ",", "registry", "in", "_ARCHIVE_FORMATS", ".", "items", "(", ")", "]", "formats", ".", "sort", "(", ")", "return", "formats" ]
[ 512, 0 ]
[ 520, 18 ]
python
en
['en', 'en', 'en']
True
register_archive_format
(name, function, extra_args=None, description='')
Registers an archive format. name is the name of the format. function is the callable that will be used to create archives. If provided, extra_args is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_archive_formats() function.
Registers an archive format.
def register_archive_format(name, function, extra_args=None, description=''): """Registers an archive format. name is the name of the format. function is the callable that will be used to create archives. If provided, extra_args is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_archive_formats() function. """ if extra_args is None: extra_args = [] if not isinstance(function, Callable): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') for element in extra_args: if not isinstance(element, (tuple, list)) or len(element) !=2: raise TypeError('extra_args elements are : (arg_name, value)') _ARCHIVE_FORMATS[name] = (function, extra_args, description)
[ "def", "register_archive_format", "(", "name", ",", "function", ",", "extra_args", "=", "None", ",", "description", "=", "''", ")", ":", "if", "extra_args", "is", "None", ":", "extra_args", "=", "[", "]", "if", "not", "isinstance", "(", "function", ",", "Callable", ")", ":", "raise", "TypeError", "(", "'The %s object is not callable'", "%", "function", ")", "if", "not", "isinstance", "(", "extra_args", ",", "(", "tuple", ",", "list", ")", ")", ":", "raise", "TypeError", "(", "'extra_args needs to be a sequence'", ")", "for", "element", "in", "extra_args", ":", "if", "not", "isinstance", "(", "element", ",", "(", "tuple", ",", "list", ")", ")", "or", "len", "(", "element", ")", "!=", "2", ":", "raise", "TypeError", "(", "'extra_args elements are : (arg_name, value)'", ")", "_ARCHIVE_FORMATS", "[", "name", "]", "=", "(", "function", ",", "extra_args", ",", "description", ")" ]
[ 522, 0 ]
[ 541, 64 ]
python
en
['en', 'en', 'en']
True
make_archive
(base_name, format, root_dir=None, base_dir=None, verbose=0, dry_run=0, owner=None, group=None, logger=None)
Create an archive file (eg. zip or tar). 'base_name' is the name of the file to create, minus any format-specific extension; 'format' is the archive format: one of "zip", "tar", "bztar" or "gztar". 'root_dir' is a directory that will be the root directory of the archive; ie. we typically chdir into 'root_dir' before creating the archive. 'base_dir' is the directory where we start archiving from; ie. 'base_dir' will be the common prefix of all files and directories in the archive. 'root_dir' and 'base_dir' both default to the current directory. Returns the name of the archive file. 'owner' and 'group' are used when creating a tar archive. By default, uses the current owner and group.
Create an archive file (eg. zip or tar).
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, dry_run=0, owner=None, group=None, logger=None): """Create an archive file (eg. zip or tar). 'base_name' is the name of the file to create, minus any format-specific extension; 'format' is the archive format: one of "zip", "tar", "bztar" or "gztar". 'root_dir' is a directory that will be the root directory of the archive; ie. we typically chdir into 'root_dir' before creating the archive. 'base_dir' is the directory where we start archiving from; ie. 'base_dir' will be the common prefix of all files and directories in the archive. 'root_dir' and 'base_dir' both default to the current directory. Returns the name of the archive file. 'owner' and 'group' are used when creating a tar archive. By default, uses the current owner and group. """ save_cwd = os.getcwd() if root_dir is not None: if logger is not None: logger.debug("changing into '%s'", root_dir) base_name = os.path.abspath(base_name) if not dry_run: os.chdir(root_dir) if base_dir is None: base_dir = os.curdir kwargs = {'dry_run': dry_run, 'logger': logger} try: format_info = _ARCHIVE_FORMATS[format] except KeyError: raise ValueError("unknown archive format '%s'" % format) func = format_info[0] for arg, val in format_info[1]: kwargs[arg] = val if format != 'zip': kwargs['owner'] = owner kwargs['group'] = group try: filename = func(base_name, base_dir, **kwargs) finally: if root_dir is not None: if logger is not None: logger.debug("changing back to '%s'", save_cwd) os.chdir(save_cwd) return filename
[ "def", "make_archive", "(", "base_name", ",", "format", ",", "root_dir", "=", "None", ",", "base_dir", "=", "None", ",", "verbose", "=", "0", ",", "dry_run", "=", "0", ",", "owner", "=", "None", ",", "group", "=", "None", ",", "logger", "=", "None", ")", ":", "save_cwd", "=", "os", ".", "getcwd", "(", ")", "if", "root_dir", "is", "not", "None", ":", "if", "logger", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"changing into '%s'\"", ",", "root_dir", ")", "base_name", "=", "os", ".", "path", ".", "abspath", "(", "base_name", ")", "if", "not", "dry_run", ":", "os", ".", "chdir", "(", "root_dir", ")", "if", "base_dir", "is", "None", ":", "base_dir", "=", "os", ".", "curdir", "kwargs", "=", "{", "'dry_run'", ":", "dry_run", ",", "'logger'", ":", "logger", "}", "try", ":", "format_info", "=", "_ARCHIVE_FORMATS", "[", "format", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"unknown archive format '%s'\"", "%", "format", ")", "func", "=", "format_info", "[", "0", "]", "for", "arg", ",", "val", "in", "format_info", "[", "1", "]", ":", "kwargs", "[", "arg", "]", "=", "val", "if", "format", "!=", "'zip'", ":", "kwargs", "[", "'owner'", "]", "=", "owner", "kwargs", "[", "'group'", "]", "=", "group", "try", ":", "filename", "=", "func", "(", "base_name", ",", "base_dir", ",", "*", "*", "kwargs", ")", "finally", ":", "if", "root_dir", "is", "not", "None", ":", "if", "logger", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"changing back to '%s'\"", ",", "save_cwd", ")", "os", ".", "chdir", "(", "save_cwd", ")", "return", "filename" ]
[ 546, 0 ]
[ 598, 19 ]
python
en
['en', 'gd', 'en']
True
get_unpack_formats
()
Returns a list of supported formats for unpacking. Each element of the returned sequence is a tuple (name, extensions, description)
Returns a list of supported formats for unpacking.
def get_unpack_formats(): """Returns a list of supported formats for unpacking. Each element of the returned sequence is a tuple (name, extensions, description) """ formats = [(name, info[0], info[3]) for name, info in _UNPACK_FORMATS.items()] formats.sort() return formats
[ "def", "get_unpack_formats", "(", ")", ":", "formats", "=", "[", "(", "name", ",", "info", "[", "0", "]", ",", "info", "[", "3", "]", ")", "for", "name", ",", "info", "in", "_UNPACK_FORMATS", ".", "items", "(", ")", "]", "formats", ".", "sort", "(", ")", "return", "formats" ]
[ 601, 0 ]
[ 610, 18 ]
python
en
['en', 'en', 'en']
True
_check_unpack_options
(extensions, function, extra_args)
Checks what gets registered as an unpacker.
Checks what gets registered as an unpacker.
def _check_unpack_options(extensions, function, extra_args): """Checks what gets registered as an unpacker.""" # first make sure no other unpacker is registered for this extension existing_extensions = {} for name, info in _UNPACK_FORMATS.items(): for ext in info[0]: existing_extensions[ext] = name for extension in extensions: if extension in existing_extensions: msg = '%s is already registered for "%s"' raise RegistryError(msg % (extension, existing_extensions[extension])) if not isinstance(function, Callable): raise TypeError('The registered function must be a callable')
[ "def", "_check_unpack_options", "(", "extensions", ",", "function", ",", "extra_args", ")", ":", "# first make sure no other unpacker is registered for this extension", "existing_extensions", "=", "{", "}", "for", "name", ",", "info", "in", "_UNPACK_FORMATS", ".", "items", "(", ")", ":", "for", "ext", "in", "info", "[", "0", "]", ":", "existing_extensions", "[", "ext", "]", "=", "name", "for", "extension", "in", "extensions", ":", "if", "extension", "in", "existing_extensions", ":", "msg", "=", "'%s is already registered for \"%s\"'", "raise", "RegistryError", "(", "msg", "%", "(", "extension", ",", "existing_extensions", "[", "extension", "]", ")", ")", "if", "not", "isinstance", "(", "function", ",", "Callable", ")", ":", "raise", "TypeError", "(", "'The registered function must be a callable'", ")" ]
[ 612, 0 ]
[ 627, 69 ]
python
en
['en', 'en', 'en']
True
register_unpack_format
(name, extensions, function, extra_args=None, description='')
Registers an unpack format. `name` is the name of the format. `extensions` is a list of extensions corresponding to the format. `function` is the callable that will be used to unpack archives. The callable will receive archives to unpack. If it's unable to handle an archive, it needs to raise a ReadError exception. If provided, `extra_args` is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_unpack_formats() function.
Registers an unpack format.
def register_unpack_format(name, extensions, function, extra_args=None, description=''): """Registers an unpack format. `name` is the name of the format. `extensions` is a list of extensions corresponding to the format. `function` is the callable that will be used to unpack archives. The callable will receive archives to unpack. If it's unable to handle an archive, it needs to raise a ReadError exception. If provided, `extra_args` is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_unpack_formats() function. """ if extra_args is None: extra_args = [] _check_unpack_options(extensions, function, extra_args) _UNPACK_FORMATS[name] = extensions, function, extra_args, description
[ "def", "register_unpack_format", "(", "name", ",", "extensions", ",", "function", ",", "extra_args", "=", "None", ",", "description", "=", "''", ")", ":", "if", "extra_args", "is", "None", ":", "extra_args", "=", "[", "]", "_check_unpack_options", "(", "extensions", ",", "function", ",", "extra_args", ")", "_UNPACK_FORMATS", "[", "name", "]", "=", "extensions", ",", "function", ",", "extra_args", ",", "description" ]
[ 630, 0 ]
[ 650, 73 ]
python
en
['en', 'fr', 'en']
True
unregister_unpack_format
(name)
Removes the pack format from the registry.
Removes the pack format from the registry.
def unregister_unpack_format(name): """Removes the pack format from the registry.""" del _UNPACK_FORMATS[name]
[ "def", "unregister_unpack_format", "(", "name", ")", ":", "del", "_UNPACK_FORMATS", "[", "name", "]" ]
[ 652, 0 ]
[ 654, 29 ]
python
en
['en', 'en', 'en']
True
_ensure_directory
(path)
Ensure that the parent directory of `path` exists
Ensure that the parent directory of `path` exists
def _ensure_directory(path): """Ensure that the parent directory of `path` exists""" dirname = os.path.dirname(path) if not os.path.isdir(dirname): os.makedirs(dirname)
[ "def", "_ensure_directory", "(", "path", ")", ":", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "dirname", ")", ":", "os", ".", "makedirs", "(", "dirname", ")" ]
[ 656, 0 ]
[ 660, 28 ]
python
en
['en', 'en', 'en']
True
_unpack_zipfile
(filename, extract_dir)
Unpack zip `filename` to `extract_dir`
Unpack zip `filename` to `extract_dir`
def _unpack_zipfile(filename, extract_dir): """Unpack zip `filename` to `extract_dir` """ try: import zipfile except ImportError: raise ReadError('zlib not supported, cannot unpack this archive.') if not zipfile.is_zipfile(filename): raise ReadError("%s is not a zip file" % filename) zip = zipfile.ZipFile(filename) try: for info in zip.infolist(): name = info.filename # don't extract absolute paths or ones with .. in them if name.startswith('/') or '..' in name: continue target = os.path.join(extract_dir, *name.split('/')) if not target: continue _ensure_directory(target) if not name.endswith('/'): # file data = zip.read(info.filename) f = open(target, 'wb') try: f.write(data) finally: f.close() del data finally: zip.close()
[ "def", "_unpack_zipfile", "(", "filename", ",", "extract_dir", ")", ":", "try", ":", "import", "zipfile", "except", "ImportError", ":", "raise", "ReadError", "(", "'zlib not supported, cannot unpack this archive.'", ")", "if", "not", "zipfile", ".", "is_zipfile", "(", "filename", ")", ":", "raise", "ReadError", "(", "\"%s is not a zip file\"", "%", "filename", ")", "zip", "=", "zipfile", ".", "ZipFile", "(", "filename", ")", "try", ":", "for", "info", "in", "zip", ".", "infolist", "(", ")", ":", "name", "=", "info", ".", "filename", "# don't extract absolute paths or ones with .. in them", "if", "name", ".", "startswith", "(", "'/'", ")", "or", "'..'", "in", "name", ":", "continue", "target", "=", "os", ".", "path", ".", "join", "(", "extract_dir", ",", "*", "name", ".", "split", "(", "'/'", ")", ")", "if", "not", "target", ":", "continue", "_ensure_directory", "(", "target", ")", "if", "not", "name", ".", "endswith", "(", "'/'", ")", ":", "# file", "data", "=", "zip", ".", "read", "(", "info", ".", "filename", ")", "f", "=", "open", "(", "target", ",", "'wb'", ")", "try", ":", "f", ".", "write", "(", "data", ")", "finally", ":", "f", ".", "close", "(", ")", "del", "data", "finally", ":", "zip", ".", "close", "(", ")" ]
[ 662, 0 ]
[ 697, 19 ]
python
en
['en', 'nl', 'ur']
False
_unpack_tarfile
(filename, extract_dir)
Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
def _unpack_tarfile(filename, extract_dir): """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` """ try: tarobj = tarfile.open(filename) except tarfile.TarError: raise ReadError( "%s is not a compressed or uncompressed tar file" % filename) try: tarobj.extractall(extract_dir) finally: tarobj.close()
[ "def", "_unpack_tarfile", "(", "filename", ",", "extract_dir", ")", ":", "try", ":", "tarobj", "=", "tarfile", ".", "open", "(", "filename", ")", "except", "tarfile", ".", "TarError", ":", "raise", "ReadError", "(", "\"%s is not a compressed or uncompressed tar file\"", "%", "filename", ")", "try", ":", "tarobj", ".", "extractall", "(", "extract_dir", ")", "finally", ":", "tarobj", ".", "close", "(", ")" ]
[ 699, 0 ]
[ 710, 22 ]
python
en
['en', 'id', 'hi']
False
unpack_archive
(filename, extract_dir=None, format=None)
Unpack an archive. `filename` is the name of the archive. `extract_dir` is the name of the target directory, where the archive is unpacked. If not provided, the current working directory is used. `format` is the archive format: one of "zip", "tar", or "gztar". Or any other registered format. If not provided, unpack_archive will use the filename extension and see if an unpacker was registered for that extension. In case none is found, a ValueError is raised.
Unpack an archive.
def unpack_archive(filename, extract_dir=None, format=None): """Unpack an archive. `filename` is the name of the archive. `extract_dir` is the name of the target directory, where the archive is unpacked. If not provided, the current working directory is used. `format` is the archive format: one of "zip", "tar", or "gztar". Or any other registered format. If not provided, unpack_archive will use the filename extension and see if an unpacker was registered for that extension. In case none is found, a ValueError is raised. """ if extract_dir is None: extract_dir = os.getcwd() if format is not None: try: format_info = _UNPACK_FORMATS[format] except KeyError: raise ValueError("Unknown unpack format '{0}'".format(format)) func = format_info[1] func(filename, extract_dir, **dict(format_info[2])) else: # we need to look at the registered unpackers supported extensions format = _find_unpack_format(filename) if format is None: raise ReadError("Unknown archive format '{0}'".format(filename)) func = _UNPACK_FORMATS[format][1] kwargs = dict(_UNPACK_FORMATS[format][2]) func(filename, extract_dir, **kwargs)
[ "def", "unpack_archive", "(", "filename", ",", "extract_dir", "=", "None", ",", "format", "=", "None", ")", ":", "if", "extract_dir", "is", "None", ":", "extract_dir", "=", "os", ".", "getcwd", "(", ")", "if", "format", "is", "not", "None", ":", "try", ":", "format_info", "=", "_UNPACK_FORMATS", "[", "format", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"Unknown unpack format '{0}'\"", ".", "format", "(", "format", ")", ")", "func", "=", "format_info", "[", "1", "]", "func", "(", "filename", ",", "extract_dir", ",", "*", "*", "dict", "(", "format_info", "[", "2", "]", ")", ")", "else", ":", "# we need to look at the registered unpackers supported extensions", "format", "=", "_find_unpack_format", "(", "filename", ")", "if", "format", "is", "None", ":", "raise", "ReadError", "(", "\"Unknown archive format '{0}'\"", ".", "format", "(", "filename", ")", ")", "func", "=", "_UNPACK_FORMATS", "[", "format", "]", "[", "1", "]", "kwargs", "=", "dict", "(", "_UNPACK_FORMATS", "[", "format", "]", "[", "2", "]", ")", "func", "(", "filename", ",", "extract_dir", ",", "*", "*", "kwargs", ")" ]
[ 729, 0 ]
[ 763, 45 ]
python
de
['en', 'fr', 'de']
False
hello_monkey
()
Respond to incoming calls with a simple text message.
Respond to incoming calls with a simple text message.
def hello_monkey(): """Respond to incoming calls with a simple text message.""" resp = MessagingResponse() resp.message("Hello, Mobile Monkey") return str(resp)
[ "def", "hello_monkey", "(", ")", ":", "resp", "=", "MessagingResponse", "(", ")", "resp", ".", "message", "(", "\"Hello, Mobile Monkey\"", ")", "return", "str", "(", "resp", ")" ]
[ 8, 0 ]
[ 13, 20 ]
python
en
['en', 'en', 'en']
True
BaseMemcachedCache._cache
(self)
Implement transparent thread-safe access to a memcached client.
Implement transparent thread-safe access to a memcached client.
def _cache(self): """ Implement transparent thread-safe access to a memcached client. """ return self._class(self.client_servers, **self._options)
[ "def", "_cache", "(", "self", ")", ":", "return", "self", ".", "_class", "(", "self", ".", "client_servers", ",", "*", "*", "self", ".", "_options", ")" ]
[ 35, 4 ]
[ 39, 64 ]
python
en
['en', 'error', 'th']
False
BaseMemcachedCache.get_backend_timeout
(self, timeout=DEFAULT_TIMEOUT)
Memcached deals with long (> 30 days) timeouts in a special way. Call this function to obtain a safe value for your timeout.
Memcached deals with long (> 30 days) timeouts in a special way. Call this function to obtain a safe value for your timeout.
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): """ Memcached deals with long (> 30 days) timeouts in a special way. Call this function to obtain a safe value for your timeout. """ if timeout == DEFAULT_TIMEOUT: timeout = self.default_timeout if timeout is None: # Using 0 in memcache sets a non-expiring timeout. return 0 elif int(timeout) == 0: # Other cache backends treat 0 as set-and-expire. To achieve this # in memcache backends, a negative timeout must be passed. timeout = -1 if timeout > 2592000: # 60*60*24*30, 30 days # See https://github.com/memcached/memcached/wiki/Programming#expiration # "Expiration times can be set from 0, meaning "never expire", to # 30 days. Any time higher than 30 days is interpreted as a Unix # timestamp date. If you want to expire an object on January 1st of # next year, this is how you do that." # # This means that we have to switch to absolute timestamps. timeout += int(time.time()) return int(timeout)
[ "def", "get_backend_timeout", "(", "self", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", ":", "if", "timeout", "==", "DEFAULT_TIMEOUT", ":", "timeout", "=", "self", ".", "default_timeout", "if", "timeout", "is", "None", ":", "# Using 0 in memcache sets a non-expiring timeout.", "return", "0", "elif", "int", "(", "timeout", ")", "==", "0", ":", "# Other cache backends treat 0 as set-and-expire. To achieve this", "# in memcache backends, a negative timeout must be passed.", "timeout", "=", "-", "1", "if", "timeout", ">", "2592000", ":", "# 60*60*24*30, 30 days", "# See https://github.com/memcached/memcached/wiki/Programming#expiration", "# \"Expiration times can be set from 0, meaning \"never expire\", to", "# 30 days. Any time higher than 30 days is interpreted as a Unix", "# timestamp date. If you want to expire an object on January 1st of", "# next year, this is how you do that.\"", "#", "# This means that we have to switch to absolute timestamps.", "timeout", "+=", "int", "(", "time", ".", "time", "(", ")", ")", "return", "int", "(", "timeout", ")" ]
[ 41, 4 ]
[ 66, 27 ]
python
en
['en', 'error', 'th']
False
FilesystemTables.create
(self: "FilesystemTables", schema: Schema, table_identifier: str, spec: PartitionSpec = None, properties: dict = None, location: str = None)
Create a new table on the filesystem. Note: it is expected that the filesystem has atomic operations to ensure consistency for metadata updates. Filesystems that don't have this guarantee could lead to data loss. Location should always be None as the table location on disk is taken from `table_identifier`
Create a new table on the filesystem.
def create(self: "FilesystemTables", schema: Schema, table_identifier: str, spec: PartitionSpec = None, properties: dict = None, location: str = None) -> Table: """ Create a new table on the filesystem. Note: it is expected that the filesystem has atomic operations to ensure consistency for metadata updates. Filesystems that don't have this guarantee could lead to data loss. Location should always be None as the table location on disk is taken from `table_identifier` """ from ..base_table import BaseTable if location: raise RuntimeError("""location has to be None. Both table_identifier and location have been declared. table_identifier: {} and location: {}""".format(table_identifier, location)) full_spec, properties = super(FilesystemTables, self).default_args(spec, properties) ops = self.new_table_ops(table_identifier) metadata = TableMetadata.new_table_metadata(ops, schema, full_spec, table_identifier, properties) ops.commit(None, metadata) return BaseTable(ops, table_identifier)
[ "def", "create", "(", "self", ":", "\"FilesystemTables\"", ",", "schema", ":", "Schema", ",", "table_identifier", ":", "str", ",", "spec", ":", "PartitionSpec", "=", "None", ",", "properties", ":", "dict", "=", "None", ",", "location", ":", "str", "=", "None", ")", "->", "Table", ":", "from", ".", ".", "base_table", "import", "BaseTable", "if", "location", ":", "raise", "RuntimeError", "(", "\"\"\"location has to be None. Both table_identifier and location have been declared.\n table_identifier: {} and location: {}\"\"\"", ".", "format", "(", "table_identifier", ",", "location", ")", ")", "full_spec", ",", "properties", "=", "super", "(", "FilesystemTables", ",", "self", ")", ".", "default_args", "(", "spec", ",", "properties", ")", "ops", "=", "self", ".", "new_table_ops", "(", "table_identifier", ")", "metadata", "=", "TableMetadata", ".", "new_table_metadata", "(", "ops", ",", "schema", ",", "full_spec", ",", "table_identifier", ",", "properties", ")", "ops", ".", "commit", "(", "None", ",", "metadata", ")", "return", "BaseTable", "(", "ops", ",", "table_identifier", ")" ]
[ 37, 4 ]
[ 58, 47 ]
python
en
['en', 'error', 'th']
False
load_cdll
(name, macos10_16_path)
Loads a CDLL by name, falling back to known path on 10.16+
Loads a CDLL by name, falling back to known path on 10.16+
def load_cdll(name, macos10_16_path): """Loads a CDLL by name, falling back to known path on 10.16+""" try: # Big Sur is technically 11 but we use 10.16 due to the Big Sur # beta being labeled as 10.16. if version_info >= (10, 16): path = macos10_16_path else: path = find_library(name) if not path: raise OSError # Caught and reraised as 'ImportError' return CDLL(path, use_errno=True) except OSError: raise_from(ImportError("The library %s failed to load" % name), None)
[ "def", "load_cdll", "(", "name", ",", "macos10_16_path", ")", ":", "try", ":", "# Big Sur is technically 11 but we use 10.16 due to the Big Sur", "# beta being labeled as 10.16.", "if", "version_info", ">=", "(", "10", ",", "16", ")", ":", "path", "=", "macos10_16_path", "else", ":", "path", "=", "find_library", "(", "name", ")", "if", "not", "path", ":", "raise", "OSError", "# Caught and reraised as 'ImportError'", "return", "CDLL", "(", "path", ",", "use_errno", "=", "True", ")", "except", "OSError", ":", "raise_from", "(", "ImportError", "(", "\"The library %s failed to load\"", "%", "name", ")", ",", "None", ")" ]
[ 64, 0 ]
[ 77, 77 ]
python
en
['en', 'en', 'en']
True
handle_userJoined
(bot, user, channel)
Automatically give operator status to admins
Automatically give operator status to admins
def handle_userJoined(bot, user, channel): "Automatically give operator status to admins" if permissions(user) >= 10: bot.mode(channel, True, 'o', user=get_nick(user))
[ "def", "handle_userJoined", "(", "bot", ",", "user", ",", "channel", ")", ":", "if", "permissions", "(", "user", ")", ">=", "10", ":", "bot", ".", "mode", "(", "channel", ",", "True", ",", "'o'", ",", "user", "=", "get_nick", "(", "user", ")", ")" ]
[ 1, 0 ]
[ 4, 57 ]
python
en
['en', 'en', 'en']
True
_get_all_permissions
(opts)
Return (codename, name) for all permissions in the given opts.
Return (codename, name) for all permissions in the given opts.
def _get_all_permissions(opts): """ Return (codename, name) for all permissions in the given opts. """ return [*_get_builtin_permissions(opts), *opts.permissions]
[ "def", "_get_all_permissions", "(", "opts", ")", ":", "return", "[", "*", "_get_builtin_permissions", "(", "opts", ")", ",", "*", "opts", ".", "permissions", "]" ]
[ 13, 0 ]
[ 17, 63 ]
python
en
['en', 'error', 'th']
False
_get_builtin_permissions
(opts)
Return (codename, name) for all autogenerated permissions. By default, this is ('add', 'change', 'delete', 'view')
Return (codename, name) for all autogenerated permissions. By default, this is ('add', 'change', 'delete', 'view')
def _get_builtin_permissions(opts): """ Return (codename, name) for all autogenerated permissions. By default, this is ('add', 'change', 'delete', 'view') """ perms = [] for action in opts.default_permissions: perms.append(( get_permission_codename(action, opts), 'Can %s %s' % (action, opts.verbose_name_raw) )) return perms
[ "def", "_get_builtin_permissions", "(", "opts", ")", ":", "perms", "=", "[", "]", "for", "action", "in", "opts", ".", "default_permissions", ":", "perms", ".", "append", "(", "(", "get_permission_codename", "(", "action", ",", "opts", ")", ",", "'Can %s %s'", "%", "(", "action", ",", "opts", ".", "verbose_name_raw", ")", ")", ")", "return", "perms" ]
[ 20, 0 ]
[ 31, 16 ]
python
en
['en', 'error', 'th']
False
get_system_username
()
Return the current system user's username, or an empty string if the username could not be determined.
Return the current system user's username, or an empty string if the username could not be determined.
def get_system_username(): """ Return the current system user's username, or an empty string if the username could not be determined. """ try: result = getpass.getuser() except (ImportError, KeyError): # KeyError will be raised by os.getpwuid() (called by getuser()) # if there is no corresponding entry in the /etc/passwd file # (a very restricted chroot environment, for example). return '' return result
[ "def", "get_system_username", "(", ")", ":", "try", ":", "result", "=", "getpass", ".", "getuser", "(", ")", "except", "(", "ImportError", ",", "KeyError", ")", ":", "# KeyError will be raised by os.getpwuid() (called by getuser())", "# if there is no corresponding entry in the /etc/passwd file", "# (a very restricted chroot environment, for example).", "return", "''", "return", "result" ]
[ 88, 0 ]
[ 100, 17 ]
python
en
['en', 'error', 'th']
False
get_default_username
(check_db=True, database=DEFAULT_DB_ALIAS)
Try to determine the current system user's username to use as a default. :param check_db: If ``True``, requires that the username does not match an existing ``auth.User`` (otherwise returns an empty string). :param database: The database where the unique check will be performed. :returns: The username, or an empty string if no username can be determined or the suggested username is already taken.
Try to determine the current system user's username to use as a default.
def get_default_username(check_db=True, database=DEFAULT_DB_ALIAS): """ Try to determine the current system user's username to use as a default. :param check_db: If ``True``, requires that the username does not match an existing ``auth.User`` (otherwise returns an empty string). :param database: The database where the unique check will be performed. :returns: The username, or an empty string if no username can be determined or the suggested username is already taken. """ # This file is used in apps.py, it should not trigger models import. from django.contrib.auth import models as auth_app # If the User model has been swapped out, we can't make any assumptions # about the default user name. if auth_app.User._meta.swapped: return '' default_username = get_system_username() try: default_username = ( unicodedata.normalize('NFKD', default_username) .encode('ascii', 'ignore').decode('ascii') .replace(' ', '').lower() ) except UnicodeDecodeError: return '' # Run the username validator try: auth_app.User._meta.get_field('username').run_validators(default_username) except exceptions.ValidationError: return '' # Don't return the default username if it is already taken. if check_db and default_username: try: auth_app.User._default_manager.db_manager(database).get( username=default_username, ) except auth_app.User.DoesNotExist: pass else: return '' return default_username
[ "def", "get_default_username", "(", "check_db", "=", "True", ",", "database", "=", "DEFAULT_DB_ALIAS", ")", ":", "# This file is used in apps.py, it should not trigger models import.", "from", "django", ".", "contrib", ".", "auth", "import", "models", "as", "auth_app", "# If the User model has been swapped out, we can't make any assumptions", "# about the default user name.", "if", "auth_app", ".", "User", ".", "_meta", ".", "swapped", ":", "return", "''", "default_username", "=", "get_system_username", "(", ")", "try", ":", "default_username", "=", "(", "unicodedata", ".", "normalize", "(", "'NFKD'", ",", "default_username", ")", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")", ".", "decode", "(", "'ascii'", ")", ".", "replace", "(", "' '", ",", "''", ")", ".", "lower", "(", ")", ")", "except", "UnicodeDecodeError", ":", "return", "''", "# Run the username validator", "try", ":", "auth_app", ".", "User", ".", "_meta", ".", "get_field", "(", "'username'", ")", ".", "run_validators", "(", "default_username", ")", "except", "exceptions", ".", "ValidationError", ":", "return", "''", "# Don't return the default username if it is already taken.", "if", "check_db", "and", "default_username", ":", "try", ":", "auth_app", ".", "User", ".", "_default_manager", ".", "db_manager", "(", "database", ")", ".", "get", "(", "username", "=", "default_username", ",", ")", "except", "auth_app", ".", "User", ".", "DoesNotExist", ":", "pass", "else", ":", "return", "''", "return", "default_username" ]
[ 103, 0 ]
[ 147, 27 ]
python
en
['en', 'error', 'th']
False
init_logger
(logdir, loglevel, nologs, quiet)
Initializes the logger for system messages.
Initializes the logger for system messages.
def init_logger(logdir, loglevel, nologs, quiet): "Initializes the logger for system messages." logger = logging.getLogger() # Set the loglevel. if loglevel > 3: # Cap at 3, incase someone likes their v-key too much. loglevel = 3 levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] logger.setLevel(levels[loglevel]) log.info("Loglevel is {}.".format(levels[loglevel])) logformat = "%(asctime)-14s %(levelname)-8s %(name)-8s %(message)s" formatter = logging.Formatter(logformat) # Only attach a file handler, if the --no-logs option is not enabled. if not nologs: try: logfile = os.path.join(logdir, "demibot.log") file_handler = logging.FileHandler(logfile) file_handler.setFormatter(formatter) logger.addHandler(file_handler) log.debug("Added logging file handler.") except IOError: log.error("Could not attach file handler. Only logging to stdout.") # Only attach a console handler if both nologs and quiet are disabled. if not quiet: console_handler = logging.StreamHandler(sys.stdout) console_handler.setFormatter(formatter) logger.addHandler(console_handler) log.debug("Added logging console handler.")
[ "def", "init_logger", "(", "logdir", ",", "loglevel", ",", "nologs", ",", "quiet", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "# Set the loglevel.", "if", "loglevel", ">", "3", ":", "# Cap at 3, incase someone likes their v-key too much.", "loglevel", "=", "3", "levels", "=", "[", "logging", ".", "ERROR", ",", "logging", ".", "WARN", ",", "logging", ".", "INFO", ",", "logging", ".", "DEBUG", "]", "logger", ".", "setLevel", "(", "levels", "[", "loglevel", "]", ")", "log", ".", "info", "(", "\"Loglevel is {}.\"", ".", "format", "(", "levels", "[", "loglevel", "]", ")", ")", "logformat", "=", "\"%(asctime)-14s %(levelname)-8s %(name)-8s %(message)s\"", "formatter", "=", "logging", ".", "Formatter", "(", "logformat", ")", "# Only attach a file handler, if the --no-logs option is not enabled.", "if", "not", "nologs", ":", "try", ":", "logfile", "=", "os", ".", "path", ".", "join", "(", "logdir", ",", "\"demibot.log\"", ")", "file_handler", "=", "logging", ".", "FileHandler", "(", "logfile", ")", "file_handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "file_handler", ")", "log", ".", "debug", "(", "\"Added logging file handler.\"", ")", "except", "IOError", ":", "log", ".", "error", "(", "\"Could not attach file handler. Only logging to stdout.\"", ")", "# Only attach a console handler if both nologs and quiet are disabled.", "if", "not", "quiet", ":", "console_handler", "=", "logging", ".", "StreamHandler", "(", "sys", ".", "stdout", ")", "console_handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "console_handler", ")", "log", ".", "debug", "(", "\"Added logging console handler.\"", ")" ]
[ 77, 0 ]
[ 108, 55 ]
python
en
['en', 'en', 'en']
True
ChatLogger.log
(self, msg, channel)
Write a log line with a time stamp to the logfile of the channel
Write a log line with a time stamp to the logfile of the channel
def log(self, msg, channel): "Write a log line with a time stamp to the logfile of the channel" timestamp = time.strftime("%H:%M:%S", time.localtime(time.time())) try: self.logfiles[channel].write("[{}] {}\n".format(timestamp, msg)) self.logfiles[channel].flush() except KeyError as e: log.error("KeyError: {}. Missing write permissions?".format(e)) if self.factory.logs_enabled: self.factory.logs_enabled = False
[ "def", "log", "(", "self", ",", "msg", ",", "channel", ")", ":", "timestamp", "=", "time", ".", "strftime", "(", "\"%H:%M:%S\"", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "try", ":", "self", ".", "logfiles", "[", "channel", "]", ".", "write", "(", "\"[{}] {}\\n\"", ".", "format", "(", "timestamp", ",", "msg", ")", ")", "self", ".", "logfiles", "[", "channel", "]", ".", "flush", "(", ")", "except", "KeyError", "as", "e", ":", "log", ".", "error", "(", "\"KeyError: {}. Missing write permissions?\"", ".", "format", "(", "e", ")", ")", "if", "self", ".", "factory", ".", "logs_enabled", ":", "self", ".", "factory", ".", "logs_enabled", "=", "False" ]
[ 18, 4 ]
[ 27, 49 ]
python
en
['en', 'en', 'en']
True
ChatLogger.log_url
(self, msg, channel)
Messages that contain urls are logged separately. Why not?
Messages that contain urls are logged separately. Why not?
def log_url(self, msg, channel): "Messages that contain urls are logged separately. Why not?" timestamp = time.strftime("%H:%M:%S", time.localtime(time.time())) self.logfiles["urls"].write("[{}] ({}) {}\n".format(timestamp, channel, msg)) self.logfiles["urls"].flush()
[ "def", "log_url", "(", "self", ",", "msg", ",", "channel", ")", ":", "timestamp", "=", "time", ".", "strftime", "(", "\"%H:%M:%S\"", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "self", ".", "logfiles", "[", "\"urls\"", "]", ".", "write", "(", "\"[{}] ({}) {}\\n\"", ".", "format", "(", "timestamp", ",", "channel", ",", "msg", ")", ")", "self", ".", "logfiles", "[", "\"urls\"", "]", ".", "flush", "(", ")" ]
[ 29, 4 ]
[ 34, 37 ]
python
en
['en', 'en', 'en']
True
ChatLogger.del_channel
(self, channel)
Removes a channel from the logfiles dictionary
Removes a channel from the logfiles dictionary
def del_channel(self, channel): "Removes a channel from the logfiles dictionary" # To avoid a keyerror, pop will return None if the key is not found. self.logfiles.pop(channel, None)
[ "def", "del_channel", "(", "self", ",", "channel", ")", ":", "# To avoid a keyerror, pop will return None if the key is not found.", "self", ".", "logfiles", ".", "pop", "(", "channel", ",", "None", ")" ]
[ 52, 4 ]
[ 55, 40 ]
python
en
['en', 'en', 'en']
True
ChatLogger.open_logs
(self, channels)
Creates the file handles and opens them.
Creates the file handles and opens them.
def open_logs(self, channels): "Creates the file handles and opens them." for channel in channels: self.add_channel(channel) try: self.logfiles["urls"] = open("{}/urls-{}{}".format(self.prefix, self.server, self.suffix), "a") except IOError as e: err_str = "IOError: Disabling chatlogs. Missing write permissions?" log.error("{}".format(e)) if self.factory.logs_enabled: self.factory.logs_enabled = False log.error("{}".format(err_str))
[ "def", "open_logs", "(", "self", ",", "channels", ")", ":", "for", "channel", "in", "channels", ":", "self", ".", "add_channel", "(", "channel", ")", "try", ":", "self", ".", "logfiles", "[", "\"urls\"", "]", "=", "open", "(", "\"{}/urls-{}{}\"", ".", "format", "(", "self", ".", "prefix", ",", "self", ".", "server", ",", "self", ".", "suffix", ")", ",", "\"a\"", ")", "except", "IOError", "as", "e", ":", "err_str", "=", "\"IOError: Disabling chatlogs. Missing write permissions?\"", "log", ".", "error", "(", "\"{}\"", ".", "format", "(", "e", ")", ")", "if", "self", ".", "factory", ".", "logs_enabled", ":", "self", ".", "factory", ".", "logs_enabled", "=", "False", "log", ".", "error", "(", "\"{}\"", ".", "format", "(", "err_str", ")", ")" ]
[ 57, 4 ]
[ 69, 47 ]
python
en
['en', 'en', 'en']
True
TargetPython.__init__
( self, platforms: Optional[List[str]] = None, py_version_info: Optional[Tuple[int, ...]] = None, abis: Optional[List[str]] = None, implementation: Optional[str] = None, )
:param platforms: A list of strings or None. If None, searches for packages that are supported by the current system. Otherwise, will find packages that can be built on the platforms passed in. These packages will only be downloaded for distribution: they will not be built locally. :param py_version_info: An optional tuple of ints representing the Python version information to use (e.g. `sys.version_info[:3]`). This can have length 1, 2, or 3 when provided. :param abis: A list of strings or None. This is passed to compatibility_tags.py's get_supported() function as is. :param implementation: A string or None. This is passed to compatibility_tags.py's get_supported() function as is.
:param platforms: A list of strings or None. If None, searches for packages that are supported by the current system. Otherwise, will find packages that can be built on the platforms passed in. These packages will only be downloaded for distribution: they will not be built locally. :param py_version_info: An optional tuple of ints representing the Python version information to use (e.g. `sys.version_info[:3]`). This can have length 1, 2, or 3 when provided. :param abis: A list of strings or None. This is passed to compatibility_tags.py's get_supported() function as is. :param implementation: A string or None. This is passed to compatibility_tags.py's get_supported() function as is.
def __init__( self, platforms: Optional[List[str]] = None, py_version_info: Optional[Tuple[int, ...]] = None, abis: Optional[List[str]] = None, implementation: Optional[str] = None, ) -> None: """ :param platforms: A list of strings or None. If None, searches for packages that are supported by the current system. Otherwise, will find packages that can be built on the platforms passed in. These packages will only be downloaded for distribution: they will not be built locally. :param py_version_info: An optional tuple of ints representing the Python version information to use (e.g. `sys.version_info[:3]`). This can have length 1, 2, or 3 when provided. :param abis: A list of strings or None. This is passed to compatibility_tags.py's get_supported() function as is. :param implementation: A string or None. This is passed to compatibility_tags.py's get_supported() function as is. """ # Store the given py_version_info for when we call get_supported(). self._given_py_version_info = py_version_info if py_version_info is None: py_version_info = sys.version_info[:3] else: py_version_info = normalize_version_info(py_version_info) py_version = '.'.join(map(str, py_version_info[:2])) self.abis = abis self.implementation = implementation self.platforms = platforms self.py_version = py_version self.py_version_info = py_version_info # This is used to cache the return value of get_tags(). self._valid_tags: Optional[List[Tag]] = None
[ "def", "__init__", "(", "self", ",", "platforms", ":", "Optional", "[", "List", "[", "str", "]", "]", "=", "None", ",", "py_version_info", ":", "Optional", "[", "Tuple", "[", "int", ",", "...", "]", "]", "=", "None", ",", "abis", ":", "Optional", "[", "List", "[", "str", "]", "]", "=", "None", ",", "implementation", ":", "Optional", "[", "str", "]", "=", "None", ",", ")", "->", "None", ":", "# Store the given py_version_info for when we call get_supported().", "self", ".", "_given_py_version_info", "=", "py_version_info", "if", "py_version_info", "is", "None", ":", "py_version_info", "=", "sys", ".", "version_info", "[", ":", "3", "]", "else", ":", "py_version_info", "=", "normalize_version_info", "(", "py_version_info", ")", "py_version", "=", "'.'", ".", "join", "(", "map", "(", "str", ",", "py_version_info", "[", ":", "2", "]", ")", ")", "self", ".", "abis", "=", "abis", "self", ".", "implementation", "=", "implementation", "self", ".", "platforms", "=", "platforms", "self", ".", "py_version", "=", "py_version", "self", ".", "py_version_info", "=", "py_version_info", "# This is used to cache the return value of get_tags().", "self", ".", "_valid_tags", ":", "Optional", "[", "List", "[", "Tag", "]", "]", "=", "None" ]
[ 26, 4 ]
[ 64, 52 ]
python
en
['en', 'error', 'th']
False
TargetPython.format_given
(self)
Format the given, non-None attributes for display.
Format the given, non-None attributes for display.
def format_given(self) -> str: """ Format the given, non-None attributes for display. """ display_version = None if self._given_py_version_info is not None: display_version = '.'.join( str(part) for part in self._given_py_version_info ) key_values = [ ('platforms', self.platforms), ('version_info', display_version), ('abis', self.abis), ('implementation', self.implementation), ] return ' '.join( f'{key}={value!r}' for key, value in key_values if value is not None )
[ "def", "format_given", "(", "self", ")", "->", "str", ":", "display_version", "=", "None", "if", "self", ".", "_given_py_version_info", "is", "not", "None", ":", "display_version", "=", "'.'", ".", "join", "(", "str", "(", "part", ")", "for", "part", "in", "self", ".", "_given_py_version_info", ")", "key_values", "=", "[", "(", "'platforms'", ",", "self", ".", "platforms", ")", ",", "(", "'version_info'", ",", "display_version", ")", ",", "(", "'abis'", ",", "self", ".", "abis", ")", ",", "(", "'implementation'", ",", "self", ".", "implementation", ")", ",", "]", "return", "' '", ".", "join", "(", "f'{key}={value!r}'", "for", "key", ",", "value", "in", "key_values", "if", "value", "is", "not", "None", ")" ]
[ 66, 4 ]
[ 85, 9 ]
python
en
['en', 'error', 'th']
False
TargetPython.get_tags
(self)
Return the supported PEP 425 tags to check wheel candidates against. The tags are returned in order of preference (most preferred first).
Return the supported PEP 425 tags to check wheel candidates against.
def get_tags(self) -> List[Tag]: """ Return the supported PEP 425 tags to check wheel candidates against. The tags are returned in order of preference (most preferred first). """ if self._valid_tags is None: # Pass versions=None if no py_version_info was given since # versions=None uses special default logic. py_version_info = self._given_py_version_info if py_version_info is None: version = None else: version = version_info_to_nodot(py_version_info) tags = get_supported( version=version, platforms=self.platforms, abis=self.abis, impl=self.implementation, ) self._valid_tags = tags return self._valid_tags
[ "def", "get_tags", "(", "self", ")", "->", "List", "[", "Tag", "]", ":", "if", "self", ".", "_valid_tags", "is", "None", ":", "# Pass versions=None if no py_version_info was given since", "# versions=None uses special default logic.", "py_version_info", "=", "self", ".", "_given_py_version_info", "if", "py_version_info", "is", "None", ":", "version", "=", "None", "else", ":", "version", "=", "version_info_to_nodot", "(", "py_version_info", ")", "tags", "=", "get_supported", "(", "version", "=", "version", ",", "platforms", "=", "self", ".", "platforms", ",", "abis", "=", "self", ".", "abis", ",", "impl", "=", "self", ".", "implementation", ",", ")", "self", ".", "_valid_tags", "=", "tags", "return", "self", ".", "_valid_tags" ]
[ 87, 4 ]
[ 110, 31 ]
python
en
['en', 'error', 'th']
False
fix_method_name
(name)
Fix method names to avoid reserved word conflicts. Args: name: string, method name. Returns: The name with a '_' prefixed if the name is a reserved word.
Fix method names to avoid reserved word conflicts.
def fix_method_name(name): """Fix method names to avoid reserved word conflicts. Args: name: string, method name. Returns: The name with a '_' prefixed if the name is a reserved word. """ if keyword.iskeyword(name) or name in RESERVED_WORDS: return name + '_' else: return name
[ "def", "fix_method_name", "(", "name", ")", ":", "if", "keyword", ".", "iskeyword", "(", "name", ")", "or", "name", "in", "RESERVED_WORDS", ":", "return", "name", "+", "'_'", "else", ":", "return", "name" ]
[ 133, 0 ]
[ 145, 15 ]
python
en
['en', 'en', 'en']
True
key2param
(key)
Converts key names into parameter names. For example, converting "max-results" -> "max_results" Args: key: string, the method key name. Returns: A safe method name based on the key name.
Converts key names into parameter names.
def key2param(key): """Converts key names into parameter names. For example, converting "max-results" -> "max_results" Args: key: string, the method key name. Returns: A safe method name based on the key name. """ result = [] key = list(key) if not key[0].isalpha(): result.append('x') for c in key: if c.isalnum(): result.append(c) else: result.append('_') return ''.join(result)
[ "def", "key2param", "(", "key", ")", ":", "result", "=", "[", "]", "key", "=", "list", "(", "key", ")", "if", "not", "key", "[", "0", "]", ".", "isalpha", "(", ")", ":", "result", ".", "append", "(", "'x'", ")", "for", "c", "in", "key", ":", "if", "c", ".", "isalnum", "(", ")", ":", "result", ".", "append", "(", "c", ")", "else", ":", "result", ".", "append", "(", "'_'", ")", "return", "''", ".", "join", "(", "result", ")" ]
[ 148, 0 ]
[ 169, 24 ]
python
en
['en', 'fil', 'en']
True
build
(serviceName, version, http=None, discoveryServiceUrl=DISCOVERY_URI, developerKey=None, model=None, requestBuilder=HttpRequest, credentials=None, cache_discovery=True, cache=None)
Construct a Resource for interacting with an API. Construct a Resource object for interacting with an API. The serviceName and version are the names from the Discovery service. Args: serviceName: string, name of the service. version: string, the version of the service. http: httplib2.Http, An instance of httplib2.Http or something that acts like it that HTTP requests will be made through. discoveryServiceUrl: string, a URI Template that points to the location of the discovery service. It should have two parameters {api} and {apiVersion} that when filled in produce an absolute URI to the discovery document for that service. developerKey: string, key obtained from https://code.google.com/apis/console. model: googleapiclient.Model, converts to and from the wire format. requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP request. credentials: oauth2client.Credentials or google.auth.credentials.Credentials, credentials to be used for authentication. cache_discovery: Boolean, whether or not to cache the discovery doc. cache: googleapiclient.discovery_cache.base.CacheBase, an optional cache object for the discovery documents. Returns: A Resource object with methods for interacting with the service.
Construct a Resource for interacting with an API.
def build(serviceName, version, http=None, discoveryServiceUrl=DISCOVERY_URI, developerKey=None, model=None, requestBuilder=HttpRequest, credentials=None, cache_discovery=True, cache=None): """Construct a Resource for interacting with an API. Construct a Resource object for interacting with an API. The serviceName and version are the names from the Discovery service. Args: serviceName: string, name of the service. version: string, the version of the service. http: httplib2.Http, An instance of httplib2.Http or something that acts like it that HTTP requests will be made through. discoveryServiceUrl: string, a URI Template that points to the location of the discovery service. It should have two parameters {api} and {apiVersion} that when filled in produce an absolute URI to the discovery document for that service. developerKey: string, key obtained from https://code.google.com/apis/console. model: googleapiclient.Model, converts to and from the wire format. requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP request. credentials: oauth2client.Credentials or google.auth.credentials.Credentials, credentials to be used for authentication. cache_discovery: Boolean, whether or not to cache the discovery doc. cache: googleapiclient.discovery_cache.base.CacheBase, an optional cache object for the discovery documents. Returns: A Resource object with methods for interacting with the service. """ params = { 'api': serviceName, 'apiVersion': version } if http is None: discovery_http = build_http() else: discovery_http = http for discovery_url in (discoveryServiceUrl, V2_DISCOVERY_URI,): requested_url = uritemplate.expand(discovery_url, params) try: content = _retrieve_discovery_doc( requested_url, discovery_http, cache_discovery, cache) return build_from_document(content, base=discovery_url, http=http, developerKey=developerKey, model=model, requestBuilder=requestBuilder, credentials=credentials) except HttpError as e: if e.resp.status == http_client.NOT_FOUND: continue else: raise e raise UnknownApiNameOrVersion( "name: %s version: %s" % (serviceName, version))
[ "def", "build", "(", "serviceName", ",", "version", ",", "http", "=", "None", ",", "discoveryServiceUrl", "=", "DISCOVERY_URI", ",", "developerKey", "=", "None", ",", "model", "=", "None", ",", "requestBuilder", "=", "HttpRequest", ",", "credentials", "=", "None", ",", "cache_discovery", "=", "True", ",", "cache", "=", "None", ")", ":", "params", "=", "{", "'api'", ":", "serviceName", ",", "'apiVersion'", ":", "version", "}", "if", "http", "is", "None", ":", "discovery_http", "=", "build_http", "(", ")", "else", ":", "discovery_http", "=", "http", "for", "discovery_url", "in", "(", "discoveryServiceUrl", ",", "V2_DISCOVERY_URI", ",", ")", ":", "requested_url", "=", "uritemplate", ".", "expand", "(", "discovery_url", ",", "params", ")", "try", ":", "content", "=", "_retrieve_discovery_doc", "(", "requested_url", ",", "discovery_http", ",", "cache_discovery", ",", "cache", ")", "return", "build_from_document", "(", "content", ",", "base", "=", "discovery_url", ",", "http", "=", "http", ",", "developerKey", "=", "developerKey", ",", "model", "=", "model", ",", "requestBuilder", "=", "requestBuilder", ",", "credentials", "=", "credentials", ")", "except", "HttpError", "as", "e", ":", "if", "e", ".", "resp", ".", "status", "==", "http_client", ".", "NOT_FOUND", ":", "continue", "else", ":", "raise", "e", "raise", "UnknownApiNameOrVersion", "(", "\"name: %s version: %s\"", "%", "(", "serviceName", ",", "version", ")", ")" ]
[ 173, 0 ]
[ 238, 57 ]
python
en
['en', 'en', 'en']
True
_retrieve_discovery_doc
(url, http, cache_discovery, cache=None)
Retrieves the discovery_doc from cache or the internet. Args: url: string, the URL of the discovery document. http: httplib2.Http, An instance of httplib2.Http or something that acts like it through which HTTP requests will be made. cache_discovery: Boolean, whether or not to cache the discovery doc. cache: googleapiclient.discovery_cache.base.Cache, an optional cache object for the discovery documents. Returns: A unicode string representation of the discovery document.
Retrieves the discovery_doc from cache or the internet.
def _retrieve_discovery_doc(url, http, cache_discovery, cache=None): """Retrieves the discovery_doc from cache or the internet. Args: url: string, the URL of the discovery document. http: httplib2.Http, An instance of httplib2.Http or something that acts like it through which HTTP requests will be made. cache_discovery: Boolean, whether or not to cache the discovery doc. cache: googleapiclient.discovery_cache.base.Cache, an optional cache object for the discovery documents. Returns: A unicode string representation of the discovery document. """ if cache_discovery: from . import discovery_cache from .discovery_cache import base if cache is None: cache = discovery_cache.autodetect() if cache: content = cache.get(url) if content: return content actual_url = url # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment # variable that contains the network address of the client sending the # request. If it exists then add that to the request for the discovery # document to avoid exceeding the quota on discovery requests. if 'REMOTE_ADDR' in os.environ: actual_url = _add_query_parameter(url, 'userIp', os.environ['REMOTE_ADDR']) logger.info('URL being requested: GET %s', actual_url) resp, content = http.request(actual_url) if resp.status >= 400: raise HttpError(resp, content, uri=actual_url) try: content = content.decode('utf-8') except AttributeError: pass try: service = json.loads(content) except ValueError as e: logger.error('Failed to parse as JSON: ' + content) raise InvalidJsonError() if cache_discovery and cache: cache.set(url, content) return content
[ "def", "_retrieve_discovery_doc", "(", "url", ",", "http", ",", "cache_discovery", ",", "cache", "=", "None", ")", ":", "if", "cache_discovery", ":", "from", ".", "import", "discovery_cache", "from", ".", "discovery_cache", "import", "base", "if", "cache", "is", "None", ":", "cache", "=", "discovery_cache", ".", "autodetect", "(", ")", "if", "cache", ":", "content", "=", "cache", ".", "get", "(", "url", ")", "if", "content", ":", "return", "content", "actual_url", "=", "url", "# REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment", "# variable that contains the network address of the client sending the", "# request. If it exists then add that to the request for the discovery", "# document to avoid exceeding the quota on discovery requests.", "if", "'REMOTE_ADDR'", "in", "os", ".", "environ", ":", "actual_url", "=", "_add_query_parameter", "(", "url", ",", "'userIp'", ",", "os", ".", "environ", "[", "'REMOTE_ADDR'", "]", ")", "logger", ".", "info", "(", "'URL being requested: GET %s'", ",", "actual_url", ")", "resp", ",", "content", "=", "http", ".", "request", "(", "actual_url", ")", "if", "resp", ".", "status", ">=", "400", ":", "raise", "HttpError", "(", "resp", ",", "content", ",", "uri", "=", "actual_url", ")", "try", ":", "content", "=", "content", ".", "decode", "(", "'utf-8'", ")", "except", "AttributeError", ":", "pass", "try", ":", "service", "=", "json", ".", "loads", "(", "content", ")", "except", "ValueError", "as", "e", ":", "logger", ".", "error", "(", "'Failed to parse as JSON: '", "+", "content", ")", "raise", "InvalidJsonError", "(", ")", "if", "cache_discovery", "and", "cache", ":", "cache", ".", "set", "(", "url", ",", "content", ")", "return", "content" ]
[ 241, 0 ]
[ 291, 16 ]
python
en
['en', 'en', 'en']
True
build_from_document
( service, base=None, future=None, http=None, developerKey=None, model=None, requestBuilder=HttpRequest, credentials=None)
Create a Resource for interacting with an API. Same as `build()`, but constructs the Resource object from a discovery document that is it given, as opposed to retrieving one over HTTP. Args: service: string or object, the JSON discovery document describing the API. The value passed in may either be the JSON string or the deserialized JSON. base: string, base URI for all HTTP requests, usually the discovery URI. This parameter is no longer used as rootUrl and servicePath are included within the discovery document. (deprecated) future: string, discovery document with future capabilities (deprecated). http: httplib2.Http, An instance of httplib2.Http or something that acts like it that HTTP requests will be made through. developerKey: string, Key for controlling API usage, generated from the API Console. model: Model class instance that serializes and de-serializes requests and responses. requestBuilder: Takes an http request and packages it up to be executed. credentials: oauth2client.Credentials or google.auth.credentials.Credentials, credentials to be used for authentication. Returns: A Resource object with methods for interacting with the service.
Create a Resource for interacting with an API.
def build_from_document( service, base=None, future=None, http=None, developerKey=None, model=None, requestBuilder=HttpRequest, credentials=None): """Create a Resource for interacting with an API. Same as `build()`, but constructs the Resource object from a discovery document that is it given, as opposed to retrieving one over HTTP. Args: service: string or object, the JSON discovery document describing the API. The value passed in may either be the JSON string or the deserialized JSON. base: string, base URI for all HTTP requests, usually the discovery URI. This parameter is no longer used as rootUrl and servicePath are included within the discovery document. (deprecated) future: string, discovery document with future capabilities (deprecated). http: httplib2.Http, An instance of httplib2.Http or something that acts like it that HTTP requests will be made through. developerKey: string, Key for controlling API usage, generated from the API Console. model: Model class instance that serializes and de-serializes requests and responses. requestBuilder: Takes an http request and packages it up to be executed. credentials: oauth2client.Credentials or google.auth.credentials.Credentials, credentials to be used for authentication. Returns: A Resource object with methods for interacting with the service. """ if http is not None and credentials is not None: raise ValueError('Arguments http and credentials are mutually exclusive.') if isinstance(service, six.string_types): service = json.loads(service) if 'rootUrl' not in service and (isinstance(http, (HttpMock, HttpMockSequence))): logger.error("You are using HttpMock or HttpMockSequence without" + "having the service discovery doc in cache. Try calling " + "build() without mocking once first to populate the " + "cache.") raise InvalidJsonError() base = urljoin(service['rootUrl'], service['servicePath']) schema = Schemas(service) # If the http client is not specified, then we must construct an http client # to make requests. If the service has scopes, then we also need to setup # authentication. if http is None: # Does the service require scopes? scopes = list( service.get('auth', {}).get('oauth2', {}).get('scopes', {}).keys()) # If so, then the we need to setup authentication if no developerKey is # specified. if scopes and not developerKey: # If the user didn't pass in credentials, attempt to acquire application # default credentials. if credentials is None: credentials = _auth.default_credentials() # The credentials need to be scoped. credentials = _auth.with_scopes(credentials, scopes) # Create an authorized http instance http = _auth.authorized_http(credentials) # If the service doesn't require scopes then there is no need for # authentication. else: http = build_http() if model is None: features = service.get('features', []) model = JsonModel('dataWrapper' in features) return Resource(http=http, baseUrl=base, model=model, developerKey=developerKey, requestBuilder=requestBuilder, resourceDesc=service, rootDesc=service, schema=schema)
[ "def", "build_from_document", "(", "service", ",", "base", "=", "None", ",", "future", "=", "None", ",", "http", "=", "None", ",", "developerKey", "=", "None", ",", "model", "=", "None", ",", "requestBuilder", "=", "HttpRequest", ",", "credentials", "=", "None", ")", ":", "if", "http", "is", "not", "None", "and", "credentials", "is", "not", "None", ":", "raise", "ValueError", "(", "'Arguments http and credentials are mutually exclusive.'", ")", "if", "isinstance", "(", "service", ",", "six", ".", "string_types", ")", ":", "service", "=", "json", ".", "loads", "(", "service", ")", "if", "'rootUrl'", "not", "in", "service", "and", "(", "isinstance", "(", "http", ",", "(", "HttpMock", ",", "HttpMockSequence", ")", ")", ")", ":", "logger", ".", "error", "(", "\"You are using HttpMock or HttpMockSequence without\"", "+", "\"having the service discovery doc in cache. Try calling \"", "+", "\"build() without mocking once first to populate the \"", "+", "\"cache.\"", ")", "raise", "InvalidJsonError", "(", ")", "base", "=", "urljoin", "(", "service", "[", "'rootUrl'", "]", ",", "service", "[", "'servicePath'", "]", ")", "schema", "=", "Schemas", "(", "service", ")", "# If the http client is not specified, then we must construct an http client", "# to make requests. If the service has scopes, then we also need to setup", "# authentication.", "if", "http", "is", "None", ":", "# Does the service require scopes?", "scopes", "=", "list", "(", "service", ".", "get", "(", "'auth'", ",", "{", "}", ")", ".", "get", "(", "'oauth2'", ",", "{", "}", ")", ".", "get", "(", "'scopes'", ",", "{", "}", ")", ".", "keys", "(", ")", ")", "# If so, then the we need to setup authentication if no developerKey is", "# specified.", "if", "scopes", "and", "not", "developerKey", ":", "# If the user didn't pass in credentials, attempt to acquire application", "# default credentials.", "if", "credentials", "is", "None", ":", "credentials", "=", "_auth", ".", "default_credentials", "(", ")", "# The credentials need to be scoped.", "credentials", "=", "_auth", ".", "with_scopes", "(", "credentials", ",", "scopes", ")", "# Create an authorized http instance", "http", "=", "_auth", ".", "authorized_http", "(", "credentials", ")", "# If the service doesn't require scopes then there is no need for", "# authentication.", "else", ":", "http", "=", "build_http", "(", ")", "if", "model", "is", "None", ":", "features", "=", "service", ".", "get", "(", "'features'", ",", "[", "]", ")", "model", "=", "JsonModel", "(", "'dataWrapper'", "in", "features", ")", "return", "Resource", "(", "http", "=", "http", ",", "baseUrl", "=", "base", ",", "model", "=", "model", ",", "developerKey", "=", "developerKey", ",", "requestBuilder", "=", "requestBuilder", ",", "resourceDesc", "=", "service", ",", "rootDesc", "=", "service", ",", "schema", "=", "schema", ")" ]
[ 295, 0 ]
[ 382, 72 ]
python
en
['en', 'en', 'en']
True
_cast
(value, schema_type)
Convert value to a string based on JSON Schema type. See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on JSON Schema. Args: value: any, the value to convert schema_type: string, the type that value should be interpreted as Returns: A string representation of 'value' based on the schema_type.
Convert value to a string based on JSON Schema type.
def _cast(value, schema_type): """Convert value to a string based on JSON Schema type. See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on JSON Schema. Args: value: any, the value to convert schema_type: string, the type that value should be interpreted as Returns: A string representation of 'value' based on the schema_type. """ if schema_type == 'string': if type(value) == type('') or type(value) == type(u''): return value else: return str(value) elif schema_type == 'integer': return str(int(value)) elif schema_type == 'number': return str(float(value)) elif schema_type == 'boolean': return str(bool(value)).lower() else: if type(value) == type('') or type(value) == type(u''): return value else: return str(value)
[ "def", "_cast", "(", "value", ",", "schema_type", ")", ":", "if", "schema_type", "==", "'string'", ":", "if", "type", "(", "value", ")", "==", "type", "(", "''", ")", "or", "type", "(", "value", ")", "==", "type", "(", "u''", ")", ":", "return", "value", "else", ":", "return", "str", "(", "value", ")", "elif", "schema_type", "==", "'integer'", ":", "return", "str", "(", "int", "(", "value", ")", ")", "elif", "schema_type", "==", "'number'", ":", "return", "str", "(", "float", "(", "value", ")", ")", "elif", "schema_type", "==", "'boolean'", ":", "return", "str", "(", "bool", "(", "value", ")", ")", ".", "lower", "(", ")", "else", ":", "if", "type", "(", "value", ")", "==", "type", "(", "''", ")", "or", "type", "(", "value", ")", "==", "type", "(", "u''", ")", ":", "return", "value", "else", ":", "return", "str", "(", "value", ")" ]
[ 385, 0 ]
[ 413, 23 ]
python
en
['en', 'en', 'en']
True
_media_size_to_long
(maxSize)
Convert a string media size, such as 10GB or 3TB into an integer. Args: maxSize: string, size as a string, such as 2MB or 7GB. Returns: The size as an integer value.
Convert a string media size, such as 10GB or 3TB into an integer.
def _media_size_to_long(maxSize): """Convert a string media size, such as 10GB or 3TB into an integer. Args: maxSize: string, size as a string, such as 2MB or 7GB. Returns: The size as an integer value. """ if len(maxSize) < 2: return 0 units = maxSize[-2:].upper() bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units) if bit_shift is not None: return int(maxSize[:-2]) << bit_shift else: return int(maxSize)
[ "def", "_media_size_to_long", "(", "maxSize", ")", ":", "if", "len", "(", "maxSize", ")", "<", "2", ":", "return", "0", "units", "=", "maxSize", "[", "-", "2", ":", "]", ".", "upper", "(", ")", "bit_shift", "=", "_MEDIA_SIZE_BIT_SHIFTS", ".", "get", "(", "units", ")", "if", "bit_shift", "is", "not", "None", ":", "return", "int", "(", "maxSize", "[", ":", "-", "2", "]", ")", "<<", "bit_shift", "else", ":", "return", "int", "(", "maxSize", ")" ]
[ 416, 0 ]
[ 432, 23 ]
python
en
['en', 'en', 'en']
True
_media_path_url_from_info
(root_desc, path_url)
Creates an absolute media path URL. Constructed using the API root URI and service path from the discovery document and the relative path for the API method. Args: root_desc: Dictionary; the entire original deserialized discovery document. path_url: String; the relative URL for the API method. Relative to the API root, which is specified in the discovery document. Returns: String; the absolute URI for media upload for the API method.
Creates an absolute media path URL.
def _media_path_url_from_info(root_desc, path_url): """Creates an absolute media path URL. Constructed using the API root URI and service path from the discovery document and the relative path for the API method. Args: root_desc: Dictionary; the entire original deserialized discovery document. path_url: String; the relative URL for the API method. Relative to the API root, which is specified in the discovery document. Returns: String; the absolute URI for media upload for the API method. """ return '%(root)supload/%(service_path)s%(path)s' % { 'root': root_desc['rootUrl'], 'service_path': root_desc['servicePath'], 'path': path_url, }
[ "def", "_media_path_url_from_info", "(", "root_desc", ",", "path_url", ")", ":", "return", "'%(root)supload/%(service_path)s%(path)s'", "%", "{", "'root'", ":", "root_desc", "[", "'rootUrl'", "]", ",", "'service_path'", ":", "root_desc", "[", "'servicePath'", "]", ",", "'path'", ":", "path_url", ",", "}" ]
[ 435, 0 ]
[ 453, 3 ]
python
en
['en', 'gd', 'en']
True
_fix_up_parameters
(method_desc, root_desc, http_method)
Updates parameters of an API method with values specific to this library. Specifically, adds whatever global parameters are specified by the API to the parameters for the individual method. Also adds parameters which don't appear in the discovery document, but are available to all discovery based APIs (these are listed in STACK_QUERY_PARAMETERS). SIDE EFFECTS: This updates the parameters dictionary object in the method description. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document. root_desc: Dictionary; the entire original deserialized discovery document. http_method: String; the HTTP method used to call the API method described in method_desc. Returns: The updated Dictionary stored in the 'parameters' key of the method description dictionary.
Updates parameters of an API method with values specific to this library.
def _fix_up_parameters(method_desc, root_desc, http_method): """Updates parameters of an API method with values specific to this library. Specifically, adds whatever global parameters are specified by the API to the parameters for the individual method. Also adds parameters which don't appear in the discovery document, but are available to all discovery based APIs (these are listed in STACK_QUERY_PARAMETERS). SIDE EFFECTS: This updates the parameters dictionary object in the method description. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document. root_desc: Dictionary; the entire original deserialized discovery document. http_method: String; the HTTP method used to call the API method described in method_desc. Returns: The updated Dictionary stored in the 'parameters' key of the method description dictionary. """ parameters = method_desc.setdefault('parameters', {}) # Add in the parameters common to all methods. for name, description in six.iteritems(root_desc.get('parameters', {})): parameters[name] = description # Add in undocumented query parameters. for name in STACK_QUERY_PARAMETERS: parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy() # Add 'body' (our own reserved word) to parameters if the method supports # a request payload. if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc: body = BODY_PARAMETER_DEFAULT_VALUE.copy() body.update(method_desc['request']) parameters['body'] = body return parameters
[ "def", "_fix_up_parameters", "(", "method_desc", ",", "root_desc", ",", "http_method", ")", ":", "parameters", "=", "method_desc", ".", "setdefault", "(", "'parameters'", ",", "{", "}", ")", "# Add in the parameters common to all methods.", "for", "name", ",", "description", "in", "six", ".", "iteritems", "(", "root_desc", ".", "get", "(", "'parameters'", ",", "{", "}", ")", ")", ":", "parameters", "[", "name", "]", "=", "description", "# Add in undocumented query parameters.", "for", "name", "in", "STACK_QUERY_PARAMETERS", ":", "parameters", "[", "name", "]", "=", "STACK_QUERY_PARAMETER_DEFAULT_VALUE", ".", "copy", "(", ")", "# Add 'body' (our own reserved word) to parameters if the method supports", "# a request payload.", "if", "http_method", "in", "HTTP_PAYLOAD_METHODS", "and", "'request'", "in", "method_desc", ":", "body", "=", "BODY_PARAMETER_DEFAULT_VALUE", ".", "copy", "(", ")", "body", ".", "update", "(", "method_desc", "[", "'request'", "]", ")", "parameters", "[", "'body'", "]", "=", "body", "return", "parameters" ]
[ 456, 0 ]
[ 496, 19 ]
python
en
['en', 'en', 'en']
True
_fix_up_media_upload
(method_desc, root_desc, path_url, parameters)
Adds 'media_body' and 'media_mime_type' parameters if supported by method. SIDE EFFECTS: If the method supports media upload and has a required body, sets body to be optional (required=False) instead. Also, if there is a 'mediaUpload' in the method description, adds 'media_upload' key to parameters. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document. root_desc: Dictionary; the entire original deserialized discovery document. path_url: String; the relative URL for the API method. Relative to the API root, which is specified in the discovery document. parameters: A dictionary describing method parameters for method described in method_desc. Returns: Triple (accept, max_size, media_path_url) where: - accept is a list of strings representing what content types are accepted for media upload. Defaults to empty list if not in the discovery document. - max_size is a long representing the max size in bytes allowed for a media upload. Defaults to 0L if not in the discovery document. - media_path_url is a String; the absolute URI for media upload for the API method. Constructed using the API root URI and service path from the discovery document and the relative path for the API method. If media upload is not supported, this is None.
Adds 'media_body' and 'media_mime_type' parameters if supported by method.
def _fix_up_media_upload(method_desc, root_desc, path_url, parameters): """Adds 'media_body' and 'media_mime_type' parameters if supported by method. SIDE EFFECTS: If the method supports media upload and has a required body, sets body to be optional (required=False) instead. Also, if there is a 'mediaUpload' in the method description, adds 'media_upload' key to parameters. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document. root_desc: Dictionary; the entire original deserialized discovery document. path_url: String; the relative URL for the API method. Relative to the API root, which is specified in the discovery document. parameters: A dictionary describing method parameters for method described in method_desc. Returns: Triple (accept, max_size, media_path_url) where: - accept is a list of strings representing what content types are accepted for media upload. Defaults to empty list if not in the discovery document. - max_size is a long representing the max size in bytes allowed for a media upload. Defaults to 0L if not in the discovery document. - media_path_url is a String; the absolute URI for media upload for the API method. Constructed using the API root URI and service path from the discovery document and the relative path for the API method. If media upload is not supported, this is None. """ media_upload = method_desc.get('mediaUpload', {}) accept = media_upload.get('accept', []) max_size = _media_size_to_long(media_upload.get('maxSize', '')) media_path_url = None if media_upload: media_path_url = _media_path_url_from_info(root_desc, path_url) parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy() parameters['media_mime_type'] = MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE.copy() if 'body' in parameters: parameters['body']['required'] = False return accept, max_size, media_path_url
[ "def", "_fix_up_media_upload", "(", "method_desc", ",", "root_desc", ",", "path_url", ",", "parameters", ")", ":", "media_upload", "=", "method_desc", ".", "get", "(", "'mediaUpload'", ",", "{", "}", ")", "accept", "=", "media_upload", ".", "get", "(", "'accept'", ",", "[", "]", ")", "max_size", "=", "_media_size_to_long", "(", "media_upload", ".", "get", "(", "'maxSize'", ",", "''", ")", ")", "media_path_url", "=", "None", "if", "media_upload", ":", "media_path_url", "=", "_media_path_url_from_info", "(", "root_desc", ",", "path_url", ")", "parameters", "[", "'media_body'", "]", "=", "MEDIA_BODY_PARAMETER_DEFAULT_VALUE", ".", "copy", "(", ")", "parameters", "[", "'media_mime_type'", "]", "=", "MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE", ".", "copy", "(", ")", "if", "'body'", "in", "parameters", ":", "parameters", "[", "'body'", "]", "[", "'required'", "]", "=", "False", "return", "accept", ",", "max_size", ",", "media_path_url" ]
[ 499, 0 ]
[ 541, 41 ]
python
en
['en', 'en', 'en']
True
_fix_up_method_description
(method_desc, root_desc)
Updates a method description in a discovery document. SIDE EFFECTS: Changes the parameters dictionary in the method description with extra parameters which are used locally. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document. root_desc: Dictionary; the entire original deserialized discovery document. Returns: Tuple (path_url, http_method, method_id, accept, max_size, media_path_url) where: - path_url is a String; the relative URL for the API method. Relative to the API root, which is specified in the discovery document. - http_method is a String; the HTTP method used to call the API method described in the method description. - method_id is a String; the name of the RPC method associated with the API method, and is in the method description in the 'id' key. - accept is a list of strings representing what content types are accepted for media upload. Defaults to empty list if not in the discovery document. - max_size is a long representing the max size in bytes allowed for a media upload. Defaults to 0L if not in the discovery document. - media_path_url is a String; the absolute URI for media upload for the API method. Constructed using the API root URI and service path from the discovery document and the relative path for the API method. If media upload is not supported, this is None.
Updates a method description in a discovery document.
def _fix_up_method_description(method_desc, root_desc): """Updates a method description in a discovery document. SIDE EFFECTS: Changes the parameters dictionary in the method description with extra parameters which are used locally. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document. root_desc: Dictionary; the entire original deserialized discovery document. Returns: Tuple (path_url, http_method, method_id, accept, max_size, media_path_url) where: - path_url is a String; the relative URL for the API method. Relative to the API root, which is specified in the discovery document. - http_method is a String; the HTTP method used to call the API method described in the method description. - method_id is a String; the name of the RPC method associated with the API method, and is in the method description in the 'id' key. - accept is a list of strings representing what content types are accepted for media upload. Defaults to empty list if not in the discovery document. - max_size is a long representing the max size in bytes allowed for a media upload. Defaults to 0L if not in the discovery document. - media_path_url is a String; the absolute URI for media upload for the API method. Constructed using the API root URI and service path from the discovery document and the relative path for the API method. If media upload is not supported, this is None. """ path_url = method_desc['path'] http_method = method_desc['httpMethod'] method_id = method_desc['id'] parameters = _fix_up_parameters(method_desc, root_desc, http_method) # Order is important. `_fix_up_media_upload` needs `method_desc` to have a # 'parameters' key and needs to know if there is a 'body' parameter because it # also sets a 'media_body' parameter. accept, max_size, media_path_url = _fix_up_media_upload( method_desc, root_desc, path_url, parameters) return path_url, http_method, method_id, accept, max_size, media_path_url
[ "def", "_fix_up_method_description", "(", "method_desc", ",", "root_desc", ")", ":", "path_url", "=", "method_desc", "[", "'path'", "]", "http_method", "=", "method_desc", "[", "'httpMethod'", "]", "method_id", "=", "method_desc", "[", "'id'", "]", "parameters", "=", "_fix_up_parameters", "(", "method_desc", ",", "root_desc", ",", "http_method", ")", "# Order is important. `_fix_up_media_upload` needs `method_desc` to have a", "# 'parameters' key and needs to know if there is a 'body' parameter because it", "# also sets a 'media_body' parameter.", "accept", ",", "max_size", ",", "media_path_url", "=", "_fix_up_media_upload", "(", "method_desc", ",", "root_desc", ",", "path_url", ",", "parameters", ")", "return", "path_url", ",", "http_method", ",", "method_id", ",", "accept", ",", "max_size", ",", "media_path_url" ]
[ 544, 0 ]
[ 586, 75 ]
python
en
['en', 'en', 'en']
True
_urljoin
(base, url)
Custom urljoin replacement supporting : before / in url.
Custom urljoin replacement supporting : before / in url.
def _urljoin(base, url): """Custom urljoin replacement supporting : before / in url.""" # In general, it's unsafe to simply join base and url. However, for # the case of discovery documents, we know: # * base will never contain params, query, or fragment # * url will never contain a scheme or net_loc. # In general, this means we can safely join on /; we just need to # ensure we end up with precisely one / joining base and url. The # exception here is the case of media uploads, where url will be an # absolute url. if url.startswith('http://') or url.startswith('https://'): return urljoin(base, url) new_base = base if base.endswith('/') else base + '/' new_url = url[1:] if url.startswith('/') else url return new_base + new_url
[ "def", "_urljoin", "(", "base", ",", "url", ")", ":", "# In general, it's unsafe to simply join base and url. However, for", "# the case of discovery documents, we know:", "# * base will never contain params, query, or fragment", "# * url will never contain a scheme or net_loc.", "# In general, this means we can safely join on /; we just need to", "# ensure we end up with precisely one / joining base and url. The", "# exception here is the case of media uploads, where url will be an", "# absolute url.", "if", "url", ".", "startswith", "(", "'http://'", ")", "or", "url", ".", "startswith", "(", "'https://'", ")", ":", "return", "urljoin", "(", "base", ",", "url", ")", "new_base", "=", "base", "if", "base", ".", "endswith", "(", "'/'", ")", "else", "base", "+", "'/'", "new_url", "=", "url", "[", "1", ":", "]", "if", "url", ".", "startswith", "(", "'/'", ")", "else", "url", "return", "new_base", "+", "new_url" ]
[ 589, 0 ]
[ 603, 27 ]
python
en
['en', 'en', 'en']
True
createMethod
(methodName, methodDesc, rootDesc, schema)
Creates a method for attaching to a Resource. Args: methodName: string, name of the method to use. methodDesc: object, fragment of deserialized discovery document that describes the method. rootDesc: object, the entire deserialized discovery document. schema: object, mapping of schema names to schema descriptions.
Creates a method for attaching to a Resource.
def createMethod(methodName, methodDesc, rootDesc, schema): """Creates a method for attaching to a Resource. Args: methodName: string, name of the method to use. methodDesc: object, fragment of deserialized discovery document that describes the method. rootDesc: object, the entire deserialized discovery document. schema: object, mapping of schema names to schema descriptions. """ methodName = fix_method_name(methodName) (pathUrl, httpMethod, methodId, accept, maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc) parameters = ResourceMethodParameters(methodDesc) def method(self, **kwargs): # Don't bother with doc string, it will be over-written by createMethod. for name in six.iterkeys(kwargs): if name not in parameters.argmap: raise TypeError('Got an unexpected keyword argument "%s"' % name) # Remove args that have a value of None. keys = list(kwargs.keys()) for name in keys: if kwargs[name] is None: del kwargs[name] for name in parameters.required_params: if name not in kwargs: raise TypeError('Missing required parameter "%s"' % name) for name, regex in six.iteritems(parameters.pattern_params): if name in kwargs: if isinstance(kwargs[name], six.string_types): pvalues = [kwargs[name]] else: pvalues = kwargs[name] for pvalue in pvalues: if re.match(regex, pvalue) is None: raise TypeError( 'Parameter "%s" value "%s" does not match the pattern "%s"' % (name, pvalue, regex)) for name, enums in six.iteritems(parameters.enum_params): if name in kwargs: # We need to handle the case of a repeated enum # name differently, since we want to handle both # arg='value' and arg=['value1', 'value2'] if (name in parameters.repeated_params and not isinstance(kwargs[name], six.string_types)): values = kwargs[name] else: values = [kwargs[name]] for value in values: if value not in enums: raise TypeError( 'Parameter "%s" value "%s" is not an allowed value in "%s"' % (name, value, str(enums))) actual_query_params = {} actual_path_params = {} for key, value in six.iteritems(kwargs): to_type = parameters.param_types.get(key, 'string') # For repeated parameters we cast each member of the list. if key in parameters.repeated_params and type(value) == type([]): cast_value = [_cast(x, to_type) for x in value] else: cast_value = _cast(value, to_type) if key in parameters.query_params: actual_query_params[parameters.argmap[key]] = cast_value if key in parameters.path_params: actual_path_params[parameters.argmap[key]] = cast_value body_value = kwargs.get('body', None) media_filename = kwargs.get('media_body', None) media_mime_type = kwargs.get('media_mime_type', None) if self._developerKey: actual_query_params['key'] = self._developerKey model = self._model if methodName.endswith('_media'): model = MediaModel() elif 'response' not in methodDesc: model = RawModel() headers = {} headers, params, query, body = model.request(headers, actual_path_params, actual_query_params, body_value) expanded_url = uritemplate.expand(pathUrl, params) url = _urljoin(self._baseUrl, expanded_url + query) resumable = None multipart_boundary = '' if media_filename: # Ensure we end up with a valid MediaUpload object. if isinstance(media_filename, six.string_types): if media_mime_type is None: logger.warning( 'media_mime_type argument not specified: trying to auto-detect for %s', media_filename) media_mime_type, _ = mimetypes.guess_type(media_filename) if media_mime_type is None: raise UnknownFileType(media_filename) if not mimeparse.best_match([media_mime_type], ','.join(accept)): raise UnacceptableMimeTypeError(media_mime_type) media_upload = MediaFileUpload(media_filename, mimetype=media_mime_type) elif isinstance(media_filename, MediaUpload): media_upload = media_filename else: raise TypeError('media_filename must be str or MediaUpload.') # Check the maxSize if media_upload.size() is not None and media_upload.size() > maxSize > 0: raise MediaUploadSizeError("Media larger than: %s" % maxSize) # Use the media path uri for media uploads expanded_url = uritemplate.expand(mediaPathUrl, params) url = _urljoin(self._baseUrl, expanded_url + query) if media_upload.resumable(): url = _add_query_parameter(url, 'uploadType', 'resumable') if media_upload.resumable(): # This is all we need to do for resumable, if the body exists it gets # sent in the first request, otherwise an empty body is sent. resumable = media_upload else: # A non-resumable upload if body is None: # This is a simple media upload headers['content-type'] = media_upload.mimetype() body = media_upload.getbytes(0, media_upload.size()) url = _add_query_parameter(url, 'uploadType', 'media') else: # This is a multipart/related upload. msgRoot = MIMEMultipart('related') # msgRoot should not write out it's own headers setattr(msgRoot, '_write_headers', lambda self: None) # attach the body as one part msg = MIMENonMultipart(*headers['content-type'].split('/')) msg.set_payload(body) msgRoot.attach(msg) # attach the media as the second part msg = MIMENonMultipart(*media_upload.mimetype().split('/')) msg['Content-Transfer-Encoding'] = 'binary' payload = media_upload.getbytes(0, media_upload.size()) msg.set_payload(payload) msgRoot.attach(msg) # encode the body: note that we can't use `as_string`, because # it plays games with `From ` lines. fp = BytesIO() g = _BytesGenerator(fp, mangle_from_=False) g.flatten(msgRoot, unixfrom=False) body = fp.getvalue() multipart_boundary = msgRoot.get_boundary() headers['content-type'] = ('multipart/related; ' 'boundary="%s"') % multipart_boundary url = _add_query_parameter(url, 'uploadType', 'multipart') logger.info('URL being requested: %s %s' % (httpMethod,url)) return self._requestBuilder(self._http, model.response, url, method=httpMethod, body=body, headers=headers, methodId=methodId, resumable=resumable) docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n'] if len(parameters.argmap) > 0: docs.append('Args:\n') # Skip undocumented params and params common to all methods. skip_parameters = list(rootDesc.get('parameters', {}).keys()) skip_parameters.extend(STACK_QUERY_PARAMETERS) all_args = list(parameters.argmap.keys()) args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])] # Move body to the front of the line. if 'body' in all_args: args_ordered.append('body') for name in all_args: if name not in args_ordered: args_ordered.append(name) for arg in args_ordered: if arg in skip_parameters: continue repeated = '' if arg in parameters.repeated_params: repeated = ' (repeated)' required = '' if arg in parameters.required_params: required = ' (required)' paramdesc = methodDesc['parameters'][parameters.argmap[arg]] paramdoc = paramdesc.get('description', 'A parameter') if '$ref' in paramdesc: docs.append( (' %s: object, %s%s%s\n The object takes the' ' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated, schema.prettyPrintByName(paramdesc['$ref']))) else: paramtype = paramdesc.get('type', 'string') docs.append(' %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required, repeated)) enum = paramdesc.get('enum', []) enumDesc = paramdesc.get('enumDescriptions', []) if enum and enumDesc: docs.append(' Allowed values\n') for (name, desc) in zip(enum, enumDesc): docs.append(' %s - %s\n' % (name, desc)) if 'response' in methodDesc: if methodName.endswith('_media'): docs.append('\nReturns:\n The media object as a string.\n\n ') else: docs.append('\nReturns:\n An object of the form:\n\n ') docs.append(schema.prettyPrintSchema(methodDesc['response'])) setattr(method, '__doc__', ''.join(docs)) return (methodName, method)
[ "def", "createMethod", "(", "methodName", ",", "methodDesc", ",", "rootDesc", ",", "schema", ")", ":", "methodName", "=", "fix_method_name", "(", "methodName", ")", "(", "pathUrl", ",", "httpMethod", ",", "methodId", ",", "accept", ",", "maxSize", ",", "mediaPathUrl", ")", "=", "_fix_up_method_description", "(", "methodDesc", ",", "rootDesc", ")", "parameters", "=", "ResourceMethodParameters", "(", "methodDesc", ")", "def", "method", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# Don't bother with doc string, it will be over-written by createMethod.", "for", "name", "in", "six", ".", "iterkeys", "(", "kwargs", ")", ":", "if", "name", "not", "in", "parameters", ".", "argmap", ":", "raise", "TypeError", "(", "'Got an unexpected keyword argument \"%s\"'", "%", "name", ")", "# Remove args that have a value of None.", "keys", "=", "list", "(", "kwargs", ".", "keys", "(", ")", ")", "for", "name", "in", "keys", ":", "if", "kwargs", "[", "name", "]", "is", "None", ":", "del", "kwargs", "[", "name", "]", "for", "name", "in", "parameters", ".", "required_params", ":", "if", "name", "not", "in", "kwargs", ":", "raise", "TypeError", "(", "'Missing required parameter \"%s\"'", "%", "name", ")", "for", "name", ",", "regex", "in", "six", ".", "iteritems", "(", "parameters", ".", "pattern_params", ")", ":", "if", "name", "in", "kwargs", ":", "if", "isinstance", "(", "kwargs", "[", "name", "]", ",", "six", ".", "string_types", ")", ":", "pvalues", "=", "[", "kwargs", "[", "name", "]", "]", "else", ":", "pvalues", "=", "kwargs", "[", "name", "]", "for", "pvalue", "in", "pvalues", ":", "if", "re", ".", "match", "(", "regex", ",", "pvalue", ")", "is", "None", ":", "raise", "TypeError", "(", "'Parameter \"%s\" value \"%s\" does not match the pattern \"%s\"'", "%", "(", "name", ",", "pvalue", ",", "regex", ")", ")", "for", "name", ",", "enums", "in", "six", ".", "iteritems", "(", "parameters", ".", "enum_params", ")", ":", "if", "name", "in", "kwargs", ":", "# We need to handle the case of a repeated enum", "# name differently, since we want to handle both", "# arg='value' and arg=['value1', 'value2']", "if", "(", "name", "in", "parameters", ".", "repeated_params", "and", "not", "isinstance", "(", "kwargs", "[", "name", "]", ",", "six", ".", "string_types", ")", ")", ":", "values", "=", "kwargs", "[", "name", "]", "else", ":", "values", "=", "[", "kwargs", "[", "name", "]", "]", "for", "value", "in", "values", ":", "if", "value", "not", "in", "enums", ":", "raise", "TypeError", "(", "'Parameter \"%s\" value \"%s\" is not an allowed value in \"%s\"'", "%", "(", "name", ",", "value", ",", "str", "(", "enums", ")", ")", ")", "actual_query_params", "=", "{", "}", "actual_path_params", "=", "{", "}", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "kwargs", ")", ":", "to_type", "=", "parameters", ".", "param_types", ".", "get", "(", "key", ",", "'string'", ")", "# For repeated parameters we cast each member of the list.", "if", "key", "in", "parameters", ".", "repeated_params", "and", "type", "(", "value", ")", "==", "type", "(", "[", "]", ")", ":", "cast_value", "=", "[", "_cast", "(", "x", ",", "to_type", ")", "for", "x", "in", "value", "]", "else", ":", "cast_value", "=", "_cast", "(", "value", ",", "to_type", ")", "if", "key", "in", "parameters", ".", "query_params", ":", "actual_query_params", "[", "parameters", ".", "argmap", "[", "key", "]", "]", "=", "cast_value", "if", "key", "in", "parameters", ".", "path_params", ":", "actual_path_params", "[", "parameters", ".", "argmap", "[", "key", "]", "]", "=", "cast_value", "body_value", "=", "kwargs", ".", "get", "(", "'body'", ",", "None", ")", "media_filename", "=", "kwargs", ".", "get", "(", "'media_body'", ",", "None", ")", "media_mime_type", "=", "kwargs", ".", "get", "(", "'media_mime_type'", ",", "None", ")", "if", "self", ".", "_developerKey", ":", "actual_query_params", "[", "'key'", "]", "=", "self", ".", "_developerKey", "model", "=", "self", ".", "_model", "if", "methodName", ".", "endswith", "(", "'_media'", ")", ":", "model", "=", "MediaModel", "(", ")", "elif", "'response'", "not", "in", "methodDesc", ":", "model", "=", "RawModel", "(", ")", "headers", "=", "{", "}", "headers", ",", "params", ",", "query", ",", "body", "=", "model", ".", "request", "(", "headers", ",", "actual_path_params", ",", "actual_query_params", ",", "body_value", ")", "expanded_url", "=", "uritemplate", ".", "expand", "(", "pathUrl", ",", "params", ")", "url", "=", "_urljoin", "(", "self", ".", "_baseUrl", ",", "expanded_url", "+", "query", ")", "resumable", "=", "None", "multipart_boundary", "=", "''", "if", "media_filename", ":", "# Ensure we end up with a valid MediaUpload object.", "if", "isinstance", "(", "media_filename", ",", "six", ".", "string_types", ")", ":", "if", "media_mime_type", "is", "None", ":", "logger", ".", "warning", "(", "'media_mime_type argument not specified: trying to auto-detect for %s'", ",", "media_filename", ")", "media_mime_type", ",", "_", "=", "mimetypes", ".", "guess_type", "(", "media_filename", ")", "if", "media_mime_type", "is", "None", ":", "raise", "UnknownFileType", "(", "media_filename", ")", "if", "not", "mimeparse", ".", "best_match", "(", "[", "media_mime_type", "]", ",", "','", ".", "join", "(", "accept", ")", ")", ":", "raise", "UnacceptableMimeTypeError", "(", "media_mime_type", ")", "media_upload", "=", "MediaFileUpload", "(", "media_filename", ",", "mimetype", "=", "media_mime_type", ")", "elif", "isinstance", "(", "media_filename", ",", "MediaUpload", ")", ":", "media_upload", "=", "media_filename", "else", ":", "raise", "TypeError", "(", "'media_filename must be str or MediaUpload.'", ")", "# Check the maxSize", "if", "media_upload", ".", "size", "(", ")", "is", "not", "None", "and", "media_upload", ".", "size", "(", ")", ">", "maxSize", ">", "0", ":", "raise", "MediaUploadSizeError", "(", "\"Media larger than: %s\"", "%", "maxSize", ")", "# Use the media path uri for media uploads", "expanded_url", "=", "uritemplate", ".", "expand", "(", "mediaPathUrl", ",", "params", ")", "url", "=", "_urljoin", "(", "self", ".", "_baseUrl", ",", "expanded_url", "+", "query", ")", "if", "media_upload", ".", "resumable", "(", ")", ":", "url", "=", "_add_query_parameter", "(", "url", ",", "'uploadType'", ",", "'resumable'", ")", "if", "media_upload", ".", "resumable", "(", ")", ":", "# This is all we need to do for resumable, if the body exists it gets", "# sent in the first request, otherwise an empty body is sent.", "resumable", "=", "media_upload", "else", ":", "# A non-resumable upload", "if", "body", "is", "None", ":", "# This is a simple media upload", "headers", "[", "'content-type'", "]", "=", "media_upload", ".", "mimetype", "(", ")", "body", "=", "media_upload", ".", "getbytes", "(", "0", ",", "media_upload", ".", "size", "(", ")", ")", "url", "=", "_add_query_parameter", "(", "url", ",", "'uploadType'", ",", "'media'", ")", "else", ":", "# This is a multipart/related upload.", "msgRoot", "=", "MIMEMultipart", "(", "'related'", ")", "# msgRoot should not write out it's own headers", "setattr", "(", "msgRoot", ",", "'_write_headers'", ",", "lambda", "self", ":", "None", ")", "# attach the body as one part", "msg", "=", "MIMENonMultipart", "(", "*", "headers", "[", "'content-type'", "]", ".", "split", "(", "'/'", ")", ")", "msg", ".", "set_payload", "(", "body", ")", "msgRoot", ".", "attach", "(", "msg", ")", "# attach the media as the second part", "msg", "=", "MIMENonMultipart", "(", "*", "media_upload", ".", "mimetype", "(", ")", ".", "split", "(", "'/'", ")", ")", "msg", "[", "'Content-Transfer-Encoding'", "]", "=", "'binary'", "payload", "=", "media_upload", ".", "getbytes", "(", "0", ",", "media_upload", ".", "size", "(", ")", ")", "msg", ".", "set_payload", "(", "payload", ")", "msgRoot", ".", "attach", "(", "msg", ")", "# encode the body: note that we can't use `as_string`, because", "# it plays games with `From ` lines.", "fp", "=", "BytesIO", "(", ")", "g", "=", "_BytesGenerator", "(", "fp", ",", "mangle_from_", "=", "False", ")", "g", ".", "flatten", "(", "msgRoot", ",", "unixfrom", "=", "False", ")", "body", "=", "fp", ".", "getvalue", "(", ")", "multipart_boundary", "=", "msgRoot", ".", "get_boundary", "(", ")", "headers", "[", "'content-type'", "]", "=", "(", "'multipart/related; '", "'boundary=\"%s\"'", ")", "%", "multipart_boundary", "url", "=", "_add_query_parameter", "(", "url", ",", "'uploadType'", ",", "'multipart'", ")", "logger", ".", "info", "(", "'URL being requested: %s %s'", "%", "(", "httpMethod", ",", "url", ")", ")", "return", "self", ".", "_requestBuilder", "(", "self", ".", "_http", ",", "model", ".", "response", ",", "url", ",", "method", "=", "httpMethod", ",", "body", "=", "body", ",", "headers", "=", "headers", ",", "methodId", "=", "methodId", ",", "resumable", "=", "resumable", ")", "docs", "=", "[", "methodDesc", ".", "get", "(", "'description'", ",", "DEFAULT_METHOD_DOC", ")", ",", "'\\n\\n'", "]", "if", "len", "(", "parameters", ".", "argmap", ")", ">", "0", ":", "docs", ".", "append", "(", "'Args:\\n'", ")", "# Skip undocumented params and params common to all methods.", "skip_parameters", "=", "list", "(", "rootDesc", ".", "get", "(", "'parameters'", ",", "{", "}", ")", ".", "keys", "(", ")", ")", "skip_parameters", ".", "extend", "(", "STACK_QUERY_PARAMETERS", ")", "all_args", "=", "list", "(", "parameters", ".", "argmap", ".", "keys", "(", ")", ")", "args_ordered", "=", "[", "key2param", "(", "s", ")", "for", "s", "in", "methodDesc", ".", "get", "(", "'parameterOrder'", ",", "[", "]", ")", "]", "# Move body to the front of the line.", "if", "'body'", "in", "all_args", ":", "args_ordered", ".", "append", "(", "'body'", ")", "for", "name", "in", "all_args", ":", "if", "name", "not", "in", "args_ordered", ":", "args_ordered", ".", "append", "(", "name", ")", "for", "arg", "in", "args_ordered", ":", "if", "arg", "in", "skip_parameters", ":", "continue", "repeated", "=", "''", "if", "arg", "in", "parameters", ".", "repeated_params", ":", "repeated", "=", "' (repeated)'", "required", "=", "''", "if", "arg", "in", "parameters", ".", "required_params", ":", "required", "=", "' (required)'", "paramdesc", "=", "methodDesc", "[", "'parameters'", "]", "[", "parameters", ".", "argmap", "[", "arg", "]", "]", "paramdoc", "=", "paramdesc", ".", "get", "(", "'description'", ",", "'A parameter'", ")", "if", "'$ref'", "in", "paramdesc", ":", "docs", ".", "append", "(", "(", "' %s: object, %s%s%s\\n The object takes the'", "' form of:\\n\\n%s\\n\\n'", ")", "%", "(", "arg", ",", "paramdoc", ",", "required", ",", "repeated", ",", "schema", ".", "prettyPrintByName", "(", "paramdesc", "[", "'$ref'", "]", ")", ")", ")", "else", ":", "paramtype", "=", "paramdesc", ".", "get", "(", "'type'", ",", "'string'", ")", "docs", ".", "append", "(", "' %s: %s, %s%s%s\\n'", "%", "(", "arg", ",", "paramtype", ",", "paramdoc", ",", "required", ",", "repeated", ")", ")", "enum", "=", "paramdesc", ".", "get", "(", "'enum'", ",", "[", "]", ")", "enumDesc", "=", "paramdesc", ".", "get", "(", "'enumDescriptions'", ",", "[", "]", ")", "if", "enum", "and", "enumDesc", ":", "docs", ".", "append", "(", "' Allowed values\\n'", ")", "for", "(", "name", ",", "desc", ")", "in", "zip", "(", "enum", ",", "enumDesc", ")", ":", "docs", ".", "append", "(", "' %s - %s\\n'", "%", "(", "name", ",", "desc", ")", ")", "if", "'response'", "in", "methodDesc", ":", "if", "methodName", ".", "endswith", "(", "'_media'", ")", ":", "docs", ".", "append", "(", "'\\nReturns:\\n The media object as a string.\\n\\n '", ")", "else", ":", "docs", ".", "append", "(", "'\\nReturns:\\n An object of the form:\\n\\n '", ")", "docs", ".", "append", "(", "schema", ".", "prettyPrintSchema", "(", "methodDesc", "[", "'response'", "]", ")", ")", "setattr", "(", "method", ",", "'__doc__'", ",", "''", ".", "join", "(", "docs", ")", ")", "return", "(", "methodName", ",", "method", ")" ]
[ 695, 0 ]
[ 926, 29 ]
python
en
['en', 'en', 'en']
True
createNextMethod
(methodName)
Creates any _next methods for attaching to a Resource. The _next methods allow for easy iteration through list() responses. Args: methodName: string, name of the method to use.
Creates any _next methods for attaching to a Resource.
def createNextMethod(methodName): """Creates any _next methods for attaching to a Resource. The _next methods allow for easy iteration through list() responses. Args: methodName: string, name of the method to use. """ methodName = fix_method_name(methodName) def methodNext(self, previous_request, previous_response): """Retrieves the next page of results. Args: previous_request: The request for the previous page. (required) previous_response: The response from the request for the previous page. (required) Returns: A request object that you can call 'execute()' on to request the next page. Returns None if there are no more items in the collection. """ # Retrieve nextPageToken from previous_response # Use as pageToken in previous_request to create new request. if 'nextPageToken' not in previous_response or not previous_response['nextPageToken']: return None request = copy.copy(previous_request) pageToken = previous_response['nextPageToken'] parsed = list(urlparse(request.uri)) q = parse_qsl(parsed[4]) # Find and remove old 'pageToken' value from URI newq = [(key, value) for (key, value) in q if key != 'pageToken'] newq.append(('pageToken', pageToken)) parsed[4] = urlencode(newq) uri = urlunparse(parsed) request.uri = uri logger.info('URL being requested: %s %s' % (methodName,uri)) return request return (methodName, methodNext)
[ "def", "createNextMethod", "(", "methodName", ")", ":", "methodName", "=", "fix_method_name", "(", "methodName", ")", "def", "methodNext", "(", "self", ",", "previous_request", ",", "previous_response", ")", ":", "\"\"\"Retrieves the next page of results.\n\nArgs:\n previous_request: The request for the previous page. (required)\n previous_response: The response from the request for the previous page. (required)\n\nReturns:\n A request object that you can call 'execute()' on to request the next\n page. Returns None if there are no more items in the collection.\n \"\"\"", "# Retrieve nextPageToken from previous_response", "# Use as pageToken in previous_request to create new request.", "if", "'nextPageToken'", "not", "in", "previous_response", "or", "not", "previous_response", "[", "'nextPageToken'", "]", ":", "return", "None", "request", "=", "copy", ".", "copy", "(", "previous_request", ")", "pageToken", "=", "previous_response", "[", "'nextPageToken'", "]", "parsed", "=", "list", "(", "urlparse", "(", "request", ".", "uri", ")", ")", "q", "=", "parse_qsl", "(", "parsed", "[", "4", "]", ")", "# Find and remove old 'pageToken' value from URI", "newq", "=", "[", "(", "key", ",", "value", ")", "for", "(", "key", ",", "value", ")", "in", "q", "if", "key", "!=", "'pageToken'", "]", "newq", ".", "append", "(", "(", "'pageToken'", ",", "pageToken", ")", ")", "parsed", "[", "4", "]", "=", "urlencode", "(", "newq", ")", "uri", "=", "urlunparse", "(", "parsed", ")", "request", ".", "uri", "=", "uri", "logger", ".", "info", "(", "'URL being requested: %s %s'", "%", "(", "methodName", ",", "uri", ")", ")", "return", "request", "return", "(", "methodName", ",", "methodNext", ")" ]
[ 929, 0 ]
[ 974, 33 ]
python
en
['en', 'en', 'en']
True
ResourceMethodParameters.__init__
(self, method_desc)
Constructor for ResourceMethodParameters. Sets default values and defers to set_parameters to populate. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document.
Constructor for ResourceMethodParameters.
def __init__(self, method_desc): """Constructor for ResourceMethodParameters. Sets default values and defers to set_parameters to populate. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document. """ self.argmap = {} self.required_params = [] self.repeated_params = [] self.pattern_params = {} self.query_params = [] # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE # parsing is gotten rid of. self.path_params = set() self.param_types = {} self.enum_params = {} self.set_parameters(method_desc)
[ "def", "__init__", "(", "self", ",", "method_desc", ")", ":", "self", ".", "argmap", "=", "{", "}", "self", ".", "required_params", "=", "[", "]", "self", ".", "repeated_params", "=", "[", "]", "self", ".", "pattern_params", "=", "{", "}", "self", ".", "query_params", "=", "[", "]", "# TODO(dhermes): Change path_params to a list if the extra URITEMPLATE", "# parsing is gotten rid of.", "self", ".", "path_params", "=", "set", "(", ")", "self", ".", "param_types", "=", "{", "}", "self", ".", "enum_params", "=", "{", "}", "self", ".", "set_parameters", "(", "method_desc", ")" ]
[ 632, 2 ]
[ 653, 36 ]
python
en
['da', 'en', 'en']
True
ResourceMethodParameters.set_parameters
(self, method_desc)
Populates maps and lists based on method description. Iterates through each parameter for the method and parses the values from the parameter dictionary. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document.
Populates maps and lists based on method description.
def set_parameters(self, method_desc): """Populates maps and lists based on method description. Iterates through each parameter for the method and parses the values from the parameter dictionary. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document. """ for arg, desc in six.iteritems(method_desc.get('parameters', {})): param = key2param(arg) self.argmap[param] = arg if desc.get('pattern'): self.pattern_params[param] = desc['pattern'] if desc.get('enum'): self.enum_params[param] = desc['enum'] if desc.get('required'): self.required_params.append(param) if desc.get('repeated'): self.repeated_params.append(param) if desc.get('location') == 'query': self.query_params.append(param) if desc.get('location') == 'path': self.path_params.add(param) self.param_types[param] = desc.get('type', 'string') # TODO(dhermes): Determine if this is still necessary. Discovery based APIs # should have all path parameters already marked with # 'location: path'. for match in URITEMPLATE.finditer(method_desc['path']): for namematch in VARNAME.finditer(match.group(0)): name = key2param(namematch.group(0)) self.path_params.add(name) if name in self.query_params: self.query_params.remove(name)
[ "def", "set_parameters", "(", "self", ",", "method_desc", ")", ":", "for", "arg", ",", "desc", "in", "six", ".", "iteritems", "(", "method_desc", ".", "get", "(", "'parameters'", ",", "{", "}", ")", ")", ":", "param", "=", "key2param", "(", "arg", ")", "self", ".", "argmap", "[", "param", "]", "=", "arg", "if", "desc", ".", "get", "(", "'pattern'", ")", ":", "self", ".", "pattern_params", "[", "param", "]", "=", "desc", "[", "'pattern'", "]", "if", "desc", ".", "get", "(", "'enum'", ")", ":", "self", ".", "enum_params", "[", "param", "]", "=", "desc", "[", "'enum'", "]", "if", "desc", ".", "get", "(", "'required'", ")", ":", "self", ".", "required_params", ".", "append", "(", "param", ")", "if", "desc", ".", "get", "(", "'repeated'", ")", ":", "self", ".", "repeated_params", ".", "append", "(", "param", ")", "if", "desc", ".", "get", "(", "'location'", ")", "==", "'query'", ":", "self", ".", "query_params", ".", "append", "(", "param", ")", "if", "desc", ".", "get", "(", "'location'", ")", "==", "'path'", ":", "self", ".", "path_params", ".", "add", "(", "param", ")", "self", ".", "param_types", "[", "param", "]", "=", "desc", ".", "get", "(", "'type'", ",", "'string'", ")", "# TODO(dhermes): Determine if this is still necessary. Discovery based APIs", "# should have all path parameters already marked with", "# 'location: path'.", "for", "match", "in", "URITEMPLATE", ".", "finditer", "(", "method_desc", "[", "'path'", "]", ")", ":", "for", "namematch", "in", "VARNAME", ".", "finditer", "(", "match", ".", "group", "(", "0", ")", ")", ":", "name", "=", "key2param", "(", "namematch", ".", "group", "(", "0", ")", ")", "self", ".", "path_params", ".", "add", "(", "name", ")", "if", "name", "in", "self", ".", "query_params", ":", "self", ".", "query_params", ".", "remove", "(", "name", ")" ]
[ 655, 2 ]
[ 692, 40 ]
python
en
['en', 'en', 'en']
True
Resource.__init__
(self, http, baseUrl, model, requestBuilder, developerKey, resourceDesc, rootDesc, schema)
Build a Resource from the API description. Args: http: httplib2.Http, Object to make http requests with. baseUrl: string, base URL for the API. All requests are relative to this URI. model: googleapiclient.Model, converts to and from the wire format. requestBuilder: class or callable that instantiates an googleapiclient.HttpRequest object. developerKey: string, key obtained from https://code.google.com/apis/console resourceDesc: object, section of deserialized discovery document that describes a resource. Note that the top level discovery document is considered a resource. rootDesc: object, the entire deserialized discovery document. schema: object, mapping of schema names to schema descriptions.
Build a Resource from the API description.
def __init__(self, http, baseUrl, model, requestBuilder, developerKey, resourceDesc, rootDesc, schema): """Build a Resource from the API description. Args: http: httplib2.Http, Object to make http requests with. baseUrl: string, base URL for the API. All requests are relative to this URI. model: googleapiclient.Model, converts to and from the wire format. requestBuilder: class or callable that instantiates an googleapiclient.HttpRequest object. developerKey: string, key obtained from https://code.google.com/apis/console resourceDesc: object, section of deserialized discovery document that describes a resource. Note that the top level discovery document is considered a resource. rootDesc: object, the entire deserialized discovery document. schema: object, mapping of schema names to schema descriptions. """ self._dynamic_attrs = [] self._http = http self._baseUrl = baseUrl self._model = model self._developerKey = developerKey self._requestBuilder = requestBuilder self._resourceDesc = resourceDesc self._rootDesc = rootDesc self._schema = schema self._set_service_methods()
[ "def", "__init__", "(", "self", ",", "http", ",", "baseUrl", ",", "model", ",", "requestBuilder", ",", "developerKey", ",", "resourceDesc", ",", "rootDesc", ",", "schema", ")", ":", "self", ".", "_dynamic_attrs", "=", "[", "]", "self", ".", "_http", "=", "http", "self", ".", "_baseUrl", "=", "baseUrl", "self", ".", "_model", "=", "model", "self", ".", "_developerKey", "=", "developerKey", "self", ".", "_requestBuilder", "=", "requestBuilder", "self", ".", "_resourceDesc", "=", "resourceDesc", "self", ".", "_rootDesc", "=", "rootDesc", "self", ".", "_schema", "=", "schema", "self", ".", "_set_service_methods", "(", ")" ]
[ 980, 2 ]
[ 1010, 31 ]
python
en
['en', 'en', 'en']
True
Resource._set_dynamic_attr
(self, attr_name, value)
Sets an instance attribute and tracks it in a list of dynamic attributes. Args: attr_name: string; The name of the attribute to be set value: The value being set on the object and tracked in the dynamic cache.
Sets an instance attribute and tracks it in a list of dynamic attributes.
def _set_dynamic_attr(self, attr_name, value): """Sets an instance attribute and tracks it in a list of dynamic attributes. Args: attr_name: string; The name of the attribute to be set value: The value being set on the object and tracked in the dynamic cache. """ self._dynamic_attrs.append(attr_name) self.__dict__[attr_name] = value
[ "def", "_set_dynamic_attr", "(", "self", ",", "attr_name", ",", "value", ")", ":", "self", ".", "_dynamic_attrs", ".", "append", "(", "attr_name", ")", "self", ".", "__dict__", "[", "attr_name", "]", "=", "value" ]
[ 1012, 2 ]
[ 1020, 36 ]
python
en
['en', 'en', 'en']
True
Resource.__getstate__
(self)
Trim the state down to something that can be pickled. Uses the fact that the instance variable _dynamic_attrs holds attrs that will be wiped and restored on pickle serialization.
Trim the state down to something that can be pickled.
def __getstate__(self): """Trim the state down to something that can be pickled. Uses the fact that the instance variable _dynamic_attrs holds attrs that will be wiped and restored on pickle serialization. """ state_dict = copy.copy(self.__dict__) for dynamic_attr in self._dynamic_attrs: del state_dict[dynamic_attr] del state_dict['_dynamic_attrs'] return state_dict
[ "def", "__getstate__", "(", "self", ")", ":", "state_dict", "=", "copy", ".", "copy", "(", "self", ".", "__dict__", ")", "for", "dynamic_attr", "in", "self", ".", "_dynamic_attrs", ":", "del", "state_dict", "[", "dynamic_attr", "]", "del", "state_dict", "[", "'_dynamic_attrs'", "]", "return", "state_dict" ]
[ 1022, 2 ]
[ 1032, 21 ]
python
en
['en', 'en', 'en']
True
Resource.__setstate__
(self, state)
Reconstitute the state of the object from being pickled. Uses the fact that the instance variable _dynamic_attrs holds attrs that will be wiped and restored on pickle serialization.
Reconstitute the state of the object from being pickled.
def __setstate__(self, state): """Reconstitute the state of the object from being pickled. Uses the fact that the instance variable _dynamic_attrs holds attrs that will be wiped and restored on pickle serialization. """ self.__dict__.update(state) self._dynamic_attrs = [] self._set_service_methods()
[ "def", "__setstate__", "(", "self", ",", "state", ")", ":", "self", ".", "__dict__", ".", "update", "(", "state", ")", "self", ".", "_dynamic_attrs", "=", "[", "]", "self", ".", "_set_service_methods", "(", ")" ]
[ 1034, 2 ]
[ 1042, 31 ]
python
en
['en', 'en', 'en']
True
is_text_serializer
(serializer)
Checks whether a serializer generates text or binary.
Checks whether a serializer generates text or binary.
def is_text_serializer(serializer): """Checks whether a serializer generates text or binary.""" return isinstance(serializer.dumps({}), text_type)
[ "def", "is_text_serializer", "(", "serializer", ")", ":", "return", "isinstance", "(", "serializer", ".", "dumps", "(", "{", "}", ")", ",", "text_type", ")" ]
[ 10, 0 ]
[ 12, 54 ]
python
en
['en', 'en', 'en']
True
Serializer.load_payload
(self, payload, serializer=None)
Loads the encoded object. This function raises :class:`.BadPayload` if the payload is not valid. The ``serializer`` parameter can be used to override the serializer stored on the class. The encoded ``payload`` should always be bytes.
Loads the encoded object. This function raises :class:`.BadPayload` if the payload is not valid. The ``serializer`` parameter can be used to override the serializer stored on the class. The encoded ``payload`` should always be bytes.
def load_payload(self, payload, serializer=None): """Loads the encoded object. This function raises :class:`.BadPayload` if the payload is not valid. The ``serializer`` parameter can be used to override the serializer stored on the class. The encoded ``payload`` should always be bytes. """ if serializer is None: serializer = self.serializer is_text = self.is_text_serializer else: is_text = is_text_serializer(serializer) try: if is_text: payload = payload.decode("utf-8") return serializer.loads(payload) except Exception as e: raise BadPayload( "Could not load the payload because an exception" " occurred on unserializing the data.", original_error=e, )
[ "def", "load_payload", "(", "self", ",", "payload", ",", "serializer", "=", "None", ")", ":", "if", "serializer", "is", "None", ":", "serializer", "=", "self", ".", "serializer", "is_text", "=", "self", ".", "is_text_serializer", "else", ":", "is_text", "=", "is_text_serializer", "(", "serializer", ")", "try", ":", "if", "is_text", ":", "payload", "=", "payload", ".", "decode", "(", "\"utf-8\"", ")", "return", "serializer", ".", "loads", "(", "payload", ")", "except", "Exception", "as", "e", ":", "raise", "BadPayload", "(", "\"Could not load the payload because an exception\"", "\" occurred on unserializing the data.\"", ",", "original_error", "=", "e", ",", ")" ]
[ 104, 4 ]
[ 125, 13 ]
python
en
['en', 'en', 'en']
True
Serializer.dump_payload
(self, obj)
Dumps the encoded object. The return value is always bytes. If the internal serializer returns text, the value will be encoded as UTF-8.
Dumps the encoded object. The return value is always bytes. If the internal serializer returns text, the value will be encoded as UTF-8.
def dump_payload(self, obj): """Dumps the encoded object. The return value is always bytes. If the internal serializer returns text, the value will be encoded as UTF-8. """ return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs))
[ "def", "dump_payload", "(", "self", ",", "obj", ")", ":", "return", "want_bytes", "(", "self", ".", "serializer", ".", "dumps", "(", "obj", ",", "*", "*", "self", ".", "serializer_kwargs", ")", ")" ]
[ 127, 4 ]
[ 132, 79 ]
python
en
['en', 'en', 'en']
True
Serializer.make_signer
(self, salt=None)
Creates a new instance of the signer to be used. The default implementation uses the :class:`.Signer` base class.
Creates a new instance of the signer to be used. The default implementation uses the :class:`.Signer` base class.
def make_signer(self, salt=None): """Creates a new instance of the signer to be used. The default implementation uses the :class:`.Signer` base class. """ if salt is None: salt = self.salt return self.signer(self.secret_key, salt=salt, **self.signer_kwargs)
[ "def", "make_signer", "(", "self", ",", "salt", "=", "None", ")", ":", "if", "salt", "is", "None", ":", "salt", "=", "self", ".", "salt", "return", "self", ".", "signer", "(", "self", ".", "secret_key", ",", "salt", "=", "salt", ",", "*", "*", "self", ".", "signer_kwargs", ")" ]
[ 134, 4 ]
[ 140, 76 ]
python
en
['en', 'en', 'en']
True
Serializer.iter_unsigners
(self, salt=None)
Iterates over all signers to be tried for unsigning. Starts with the configured signer, then constructs each signer specified in ``fallback_signers``.
Iterates over all signers to be tried for unsigning. Starts with the configured signer, then constructs each signer specified in ``fallback_signers``.
def iter_unsigners(self, salt=None): """Iterates over all signers to be tried for unsigning. Starts with the configured signer, then constructs each signer specified in ``fallback_signers``. """ if salt is None: salt = self.salt yield self.make_signer(salt) for fallback in self.fallback_signers: if type(fallback) is dict: kwargs = fallback fallback = self.signer elif type(fallback) is tuple: fallback, kwargs = fallback else: kwargs = self.signer_kwargs yield fallback(self.secret_key, salt=salt, **kwargs)
[ "def", "iter_unsigners", "(", "self", ",", "salt", "=", "None", ")", ":", "if", "salt", "is", "None", ":", "salt", "=", "self", ".", "salt", "yield", "self", ".", "make_signer", "(", "salt", ")", "for", "fallback", "in", "self", ".", "fallback_signers", ":", "if", "type", "(", "fallback", ")", "is", "dict", ":", "kwargs", "=", "fallback", "fallback", "=", "self", ".", "signer", "elif", "type", "(", "fallback", ")", "is", "tuple", ":", "fallback", ",", "kwargs", "=", "fallback", "else", ":", "kwargs", "=", "self", ".", "signer_kwargs", "yield", "fallback", "(", "self", ".", "secret_key", ",", "salt", "=", "salt", ",", "*", "*", "kwargs", ")" ]
[ 142, 4 ]
[ 158, 64 ]
python
en
['en', 'en', 'en']
True
Serializer.dumps
(self, obj, salt=None)
Returns a signed string serialized with the internal serializer. The return value can be either a byte or unicode string depending on the format of the internal serializer.
Returns a signed string serialized with the internal serializer. The return value can be either a byte or unicode string depending on the format of the internal serializer.
def dumps(self, obj, salt=None): """Returns a signed string serialized with the internal serializer. The return value can be either a byte or unicode string depending on the format of the internal serializer. """ payload = want_bytes(self.dump_payload(obj)) rv = self.make_signer(salt).sign(payload) if self.is_text_serializer: rv = rv.decode("utf-8") return rv
[ "def", "dumps", "(", "self", ",", "obj", ",", "salt", "=", "None", ")", ":", "payload", "=", "want_bytes", "(", "self", ".", "dump_payload", "(", "obj", ")", ")", "rv", "=", "self", ".", "make_signer", "(", "salt", ")", ".", "sign", "(", "payload", ")", "if", "self", ".", "is_text_serializer", ":", "rv", "=", "rv", ".", "decode", "(", "\"utf-8\"", ")", "return", "rv" ]
[ 160, 4 ]
[ 169, 17 ]
python
en
['en', 'en', 'en']
True
Serializer.dump
(self, obj, f, salt=None)
Like :meth:`dumps` but dumps into a file. The file handle has to be compatible with what the internal serializer expects.
Like :meth:`dumps` but dumps into a file. The file handle has to be compatible with what the internal serializer expects.
def dump(self, obj, f, salt=None): """Like :meth:`dumps` but dumps into a file. The file handle has to be compatible with what the internal serializer expects. """ f.write(self.dumps(obj, salt))
[ "def", "dump", "(", "self", ",", "obj", ",", "f", ",", "salt", "=", "None", ")", ":", "f", ".", "write", "(", "self", ".", "dumps", "(", "obj", ",", "salt", ")", ")" ]
[ 171, 4 ]
[ 175, 38 ]
python
en
['en', 'en', 'en']
True
Serializer.loads
(self, s, salt=None)
Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the signature validation fails.
Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the signature validation fails.
def loads(self, s, salt=None): """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the signature validation fails. """ s = want_bytes(s) last_exception = None for signer in self.iter_unsigners(salt): try: return self.load_payload(signer.unsign(s)) except BadSignature as err: last_exception = err raise last_exception
[ "def", "loads", "(", "self", ",", "s", ",", "salt", "=", "None", ")", ":", "s", "=", "want_bytes", "(", "s", ")", "last_exception", "=", "None", "for", "signer", "in", "self", ".", "iter_unsigners", "(", "salt", ")", ":", "try", ":", "return", "self", ".", "load_payload", "(", "signer", ".", "unsign", "(", "s", ")", ")", "except", "BadSignature", "as", "err", ":", "last_exception", "=", "err", "raise", "last_exception" ]
[ 177, 4 ]
[ 188, 28 ]
python
en
['en', 'la', 'en']
True
Serializer.load
(self, f, salt=None)
Like :meth:`loads` but loads from a file.
Like :meth:`loads` but loads from a file.
def load(self, f, salt=None): """Like :meth:`loads` but loads from a file.""" return self.loads(f.read(), salt)
[ "def", "load", "(", "self", ",", "f", ",", "salt", "=", "None", ")", ":", "return", "self", ".", "loads", "(", "f", ".", "read", "(", ")", ",", "salt", ")" ]
[ 190, 4 ]
[ 192, 41 ]
python
en
['en', 'en', 'en']
True
Serializer.loads_unsafe
(self, s, salt=None)
Like :meth:`loads` but without verifying the signature. This is potentially very dangerous to use depending on how your serializer works. The return value is ``(signature_valid, payload)`` instead of just the payload. The first item will be a boolean that indicates if the signature is valid. This function never fails. Use it for debugging only and if you know that your serializer module is not exploitable (for example, do not use it with a pickle serializer). .. versionadded:: 0.15
Like :meth:`loads` but without verifying the signature. This is potentially very dangerous to use depending on how your serializer works. The return value is ``(signature_valid, payload)`` instead of just the payload. The first item will be a boolean that indicates if the signature is valid. This function never fails.
def loads_unsafe(self, s, salt=None): """Like :meth:`loads` but without verifying the signature. This is potentially very dangerous to use depending on how your serializer works. The return value is ``(signature_valid, payload)`` instead of just the payload. The first item will be a boolean that indicates if the signature is valid. This function never fails. Use it for debugging only and if you know that your serializer module is not exploitable (for example, do not use it with a pickle serializer). .. versionadded:: 0.15 """ return self._loads_unsafe_impl(s, salt)
[ "def", "loads_unsafe", "(", "self", ",", "s", ",", "salt", "=", "None", ")", ":", "return", "self", ".", "_loads_unsafe_impl", "(", "s", ",", "salt", ")" ]
[ 194, 4 ]
[ 208, 47 ]
python
en
['en', 'en', 'en']
True
Serializer._loads_unsafe_impl
(self, s, salt, load_kwargs=None, load_payload_kwargs=None)
Low level helper function to implement :meth:`loads_unsafe` in serializer subclasses.
Low level helper function to implement :meth:`loads_unsafe` in serializer subclasses.
def _loads_unsafe_impl(self, s, salt, load_kwargs=None, load_payload_kwargs=None): """Low level helper function to implement :meth:`loads_unsafe` in serializer subclasses. """ try: return True, self.loads(s, salt=salt, **(load_kwargs or {})) except BadSignature as e: if e.payload is None: return False, None try: return ( False, self.load_payload(e.payload, **(load_payload_kwargs or {})), ) except BadPayload: return False, None
[ "def", "_loads_unsafe_impl", "(", "self", ",", "s", ",", "salt", ",", "load_kwargs", "=", "None", ",", "load_payload_kwargs", "=", "None", ")", ":", "try", ":", "return", "True", ",", "self", ".", "loads", "(", "s", ",", "salt", "=", "salt", ",", "*", "*", "(", "load_kwargs", "or", "{", "}", ")", ")", "except", "BadSignature", "as", "e", ":", "if", "e", ".", "payload", "is", "None", ":", "return", "False", ",", "None", "try", ":", "return", "(", "False", ",", "self", ".", "load_payload", "(", "e", ".", "payload", ",", "*", "*", "(", "load_payload_kwargs", "or", "{", "}", ")", ")", ",", ")", "except", "BadPayload", ":", "return", "False", ",", "None" ]
[ 210, 4 ]
[ 225, 34 ]
python
en
['en', 'en', 'en']
True
Serializer.load_unsafe
(self, f, *args, **kwargs)
Like :meth:`loads_unsafe` but loads from a file. .. versionadded:: 0.15
Like :meth:`loads_unsafe` but loads from a file.
def load_unsafe(self, f, *args, **kwargs): """Like :meth:`loads_unsafe` but loads from a file. .. versionadded:: 0.15 """ return self.loads_unsafe(f.read(), *args, **kwargs)
[ "def", "load_unsafe", "(", "self", ",", "f", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "loads_unsafe", "(", "f", ".", "read", "(", ")", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
[ 227, 4 ]
[ 232, 59 ]
python
en
['en', 'en', 'en']
True