repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
postlund/pyatv
pyatv/mrp/srp.py
Credentials.parse
def parse(cls, detail_string): """Parse a string represention of Credentials.""" split = detail_string.split(':') if len(split) != 4: raise Exception('invalid credentials') # TODO: other exception ltpk = binascii.unhexlify(split[0]) ltsk = binascii.unhexlify(split[1]) atv_id = binascii.unhexlify(split[2]) client_id = binascii.unhexlify(split[3]) return Credentials(ltpk, ltsk, atv_id, client_id)
python
def parse(cls, detail_string): """Parse a string represention of Credentials.""" split = detail_string.split(':') if len(split) != 4: raise Exception('invalid credentials') # TODO: other exception ltpk = binascii.unhexlify(split[0]) ltsk = binascii.unhexlify(split[1]) atv_id = binascii.unhexlify(split[2]) client_id = binascii.unhexlify(split[3]) return Credentials(ltpk, ltsk, atv_id, client_id)
[ "def", "parse", "(", "cls", ",", "detail_string", ")", ":", "split", "=", "detail_string", ".", "split", "(", "':'", ")", "if", "len", "(", "split", ")", "!=", "4", ":", "raise", "Exception", "(", "'invalid credentials'", ")", "# TODO: other exception", "ltpk", "=", "binascii", ".", "unhexlify", "(", "split", "[", "0", "]", ")", "ltsk", "=", "binascii", ".", "unhexlify", "(", "split", "[", "1", "]", ")", "atv_id", "=", "binascii", ".", "unhexlify", "(", "split", "[", "2", "]", ")", "client_id", "=", "binascii", ".", "unhexlify", "(", "split", "[", "3", "]", ")", "return", "Credentials", "(", "ltpk", ",", "ltsk", ",", "atv_id", ",", "client_id", ")" ]
Parse a string represention of Credentials.
[ "Parse", "a", "string", "represention", "of", "Credentials", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/srp.py#L32-L42
train
postlund/pyatv
pyatv/mrp/srp.py
SRPAuthHandler.initialize
def initialize(self): """Initialize operation by generating new keys.""" self._signing_key = SigningKey(os.urandom(32)) self._auth_private = self._signing_key.to_seed() self._auth_public = self._signing_key.get_verifying_key().to_bytes() self._verify_private = curve25519.Private(secret=os.urandom(32)) self._verify_public = self._verify_private.get_public() return self._auth_public, self._verify_public.serialize()
python
def initialize(self): """Initialize operation by generating new keys.""" self._signing_key = SigningKey(os.urandom(32)) self._auth_private = self._signing_key.to_seed() self._auth_public = self._signing_key.get_verifying_key().to_bytes() self._verify_private = curve25519.Private(secret=os.urandom(32)) self._verify_public = self._verify_private.get_public() return self._auth_public, self._verify_public.serialize()
[ "def", "initialize", "(", "self", ")", ":", "self", ".", "_signing_key", "=", "SigningKey", "(", "os", ".", "urandom", "(", "32", ")", ")", "self", ".", "_auth_private", "=", "self", ".", "_signing_key", ".", "to_seed", "(", ")", "self", ".", "_auth_public", "=", "self", ".", "_signing_key", ".", "get_verifying_key", "(", ")", ".", "to_bytes", "(", ")", "self", ".", "_verify_private", "=", "curve25519", ".", "Private", "(", "secret", "=", "os", ".", "urandom", "(", "32", ")", ")", "self", ".", "_verify_public", "=", "self", ".", "_verify_private", ".", "get_public", "(", ")", "return", "self", ".", "_auth_public", ",", "self", ".", "_verify_public", ".", "serialize", "(", ")" ]
Initialize operation by generating new keys.
[ "Initialize", "operation", "by", "generating", "new", "keys", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/srp.py#L85-L92
train
postlund/pyatv
pyatv/mrp/srp.py
SRPAuthHandler.verify1
def verify1(self, credentials, session_pub_key, encrypted): """First verification step.""" # No additional hashing used self._shared = self._verify_private.get_shared_key( curve25519.Public(session_pub_key), hashfunc=lambda x: x) session_key = hkdf_expand('Pair-Verify-Encrypt-Salt', 'Pair-Verify-Encrypt-Info', self._shared) chacha = chacha20.Chacha20Cipher(session_key, session_key) decrypted_tlv = tlv8.read_tlv( chacha.decrypt(encrypted, nounce='PV-Msg02'.encode())) identifier = decrypted_tlv[tlv8.TLV_IDENTIFIER] signature = decrypted_tlv[tlv8.TLV_SIGNATURE] if identifier != credentials.atv_id: raise Exception('incorrect device response') # TODO: new exception info = session_pub_key + \ bytes(identifier) + self._verify_public.serialize() ltpk = VerifyingKey(bytes(credentials.ltpk)) ltpk.verify(bytes(signature), bytes(info)) # throws if no match device_info = self._verify_public.serialize() + \ credentials.client_id + session_pub_key device_signature = SigningKey(credentials.ltsk).sign(device_info) tlv = tlv8.write_tlv({tlv8.TLV_IDENTIFIER: credentials.client_id, tlv8.TLV_SIGNATURE: device_signature}) return chacha.encrypt(tlv, nounce='PV-Msg03'.encode())
python
def verify1(self, credentials, session_pub_key, encrypted): """First verification step.""" # No additional hashing used self._shared = self._verify_private.get_shared_key( curve25519.Public(session_pub_key), hashfunc=lambda x: x) session_key = hkdf_expand('Pair-Verify-Encrypt-Salt', 'Pair-Verify-Encrypt-Info', self._shared) chacha = chacha20.Chacha20Cipher(session_key, session_key) decrypted_tlv = tlv8.read_tlv( chacha.decrypt(encrypted, nounce='PV-Msg02'.encode())) identifier = decrypted_tlv[tlv8.TLV_IDENTIFIER] signature = decrypted_tlv[tlv8.TLV_SIGNATURE] if identifier != credentials.atv_id: raise Exception('incorrect device response') # TODO: new exception info = session_pub_key + \ bytes(identifier) + self._verify_public.serialize() ltpk = VerifyingKey(bytes(credentials.ltpk)) ltpk.verify(bytes(signature), bytes(info)) # throws if no match device_info = self._verify_public.serialize() + \ credentials.client_id + session_pub_key device_signature = SigningKey(credentials.ltsk).sign(device_info) tlv = tlv8.write_tlv({tlv8.TLV_IDENTIFIER: credentials.client_id, tlv8.TLV_SIGNATURE: device_signature}) return chacha.encrypt(tlv, nounce='PV-Msg03'.encode())
[ "def", "verify1", "(", "self", ",", "credentials", ",", "session_pub_key", ",", "encrypted", ")", ":", "# No additional hashing used", "self", ".", "_shared", "=", "self", ".", "_verify_private", ".", "get_shared_key", "(", "curve25519", ".", "Public", "(", "session_pub_key", ")", ",", "hashfunc", "=", "lambda", "x", ":", "x", ")", "session_key", "=", "hkdf_expand", "(", "'Pair-Verify-Encrypt-Salt'", ",", "'Pair-Verify-Encrypt-Info'", ",", "self", ".", "_shared", ")", "chacha", "=", "chacha20", ".", "Chacha20Cipher", "(", "session_key", ",", "session_key", ")", "decrypted_tlv", "=", "tlv8", ".", "read_tlv", "(", "chacha", ".", "decrypt", "(", "encrypted", ",", "nounce", "=", "'PV-Msg02'", ".", "encode", "(", ")", ")", ")", "identifier", "=", "decrypted_tlv", "[", "tlv8", ".", "TLV_IDENTIFIER", "]", "signature", "=", "decrypted_tlv", "[", "tlv8", ".", "TLV_SIGNATURE", "]", "if", "identifier", "!=", "credentials", ".", "atv_id", ":", "raise", "Exception", "(", "'incorrect device response'", ")", "# TODO: new exception", "info", "=", "session_pub_key", "+", "bytes", "(", "identifier", ")", "+", "self", ".", "_verify_public", ".", "serialize", "(", ")", "ltpk", "=", "VerifyingKey", "(", "bytes", "(", "credentials", ".", "ltpk", ")", ")", "ltpk", ".", "verify", "(", "bytes", "(", "signature", ")", ",", "bytes", "(", "info", ")", ")", "# throws if no match", "device_info", "=", "self", ".", "_verify_public", ".", "serialize", "(", ")", "+", "credentials", ".", "client_id", "+", "session_pub_key", "device_signature", "=", "SigningKey", "(", "credentials", ".", "ltsk", ")", ".", "sign", "(", "device_info", ")", "tlv", "=", "tlv8", ".", "write_tlv", "(", "{", "tlv8", ".", "TLV_IDENTIFIER", ":", "credentials", ".", "client_id", ",", "tlv8", ".", "TLV_SIGNATURE", ":", "device_signature", "}", ")", "return", "chacha", ".", "encrypt", "(", "tlv", ",", "nounce", "=", "'PV-Msg03'", ".", "encode", "(", ")", ")" ]
First verification step.
[ "First", "verification", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/srp.py#L94-L127
train
postlund/pyatv
pyatv/mrp/srp.py
SRPAuthHandler.verify2
def verify2(self): """Last verification step. The derived keys (output, input) are returned here. """ output_key = hkdf_expand('MediaRemote-Salt', 'MediaRemote-Write-Encryption-Key', self._shared) input_key = hkdf_expand('MediaRemote-Salt', 'MediaRemote-Read-Encryption-Key', self._shared) log_binary(_LOGGER, 'Keys', Output=output_key, Input=input_key) return output_key, input_key
python
def verify2(self): """Last verification step. The derived keys (output, input) are returned here. """ output_key = hkdf_expand('MediaRemote-Salt', 'MediaRemote-Write-Encryption-Key', self._shared) input_key = hkdf_expand('MediaRemote-Salt', 'MediaRemote-Read-Encryption-Key', self._shared) log_binary(_LOGGER, 'Keys', Output=output_key, Input=input_key) return output_key, input_key
[ "def", "verify2", "(", "self", ")", ":", "output_key", "=", "hkdf_expand", "(", "'MediaRemote-Salt'", ",", "'MediaRemote-Write-Encryption-Key'", ",", "self", ".", "_shared", ")", "input_key", "=", "hkdf_expand", "(", "'MediaRemote-Salt'", ",", "'MediaRemote-Read-Encryption-Key'", ",", "self", ".", "_shared", ")", "log_binary", "(", "_LOGGER", ",", "'Keys'", ",", "Output", "=", "output_key", ",", "Input", "=", "input_key", ")", "return", "output_key", ",", "input_key" ]
Last verification step. The derived keys (output, input) are returned here.
[ "Last", "verification", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/srp.py#L129-L143
train
postlund/pyatv
pyatv/mrp/srp.py
SRPAuthHandler.step1
def step1(self, pin): """First pairing step.""" context = SRPContext( 'Pair-Setup', str(pin), prime=constants.PRIME_3072, generator=constants.PRIME_3072_GEN, hash_func=hashlib.sha512) self._session = SRPClientSession( context, binascii.hexlify(self._auth_private).decode())
python
def step1(self, pin): """First pairing step.""" context = SRPContext( 'Pair-Setup', str(pin), prime=constants.PRIME_3072, generator=constants.PRIME_3072_GEN, hash_func=hashlib.sha512) self._session = SRPClientSession( context, binascii.hexlify(self._auth_private).decode())
[ "def", "step1", "(", "self", ",", "pin", ")", ":", "context", "=", "SRPContext", "(", "'Pair-Setup'", ",", "str", "(", "pin", ")", ",", "prime", "=", "constants", ".", "PRIME_3072", ",", "generator", "=", "constants", ".", "PRIME_3072_GEN", ",", "hash_func", "=", "hashlib", ".", "sha512", ")", "self", ".", "_session", "=", "SRPClientSession", "(", "context", ",", "binascii", ".", "hexlify", "(", "self", ".", "_auth_private", ")", ".", "decode", "(", ")", ")" ]
First pairing step.
[ "First", "pairing", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/srp.py#L145-L153
train
postlund/pyatv
pyatv/mrp/srp.py
SRPAuthHandler.step2
def step2(self, atv_pub_key, atv_salt): """Second pairing step.""" pk_str = binascii.hexlify(atv_pub_key).decode() salt = binascii.hexlify(atv_salt).decode() self._client_session_key, _, _ = self._session.process(pk_str, salt) if not self._session.verify_proof(self._session.key_proof_hash): raise exceptions.AuthenticationError('proofs do not match (mitm?)') pub_key = binascii.unhexlify(self._session.public) proof = binascii.unhexlify(self._session.key_proof) log_binary(_LOGGER, 'Client', Public=pub_key, Proof=proof) return pub_key, proof
python
def step2(self, atv_pub_key, atv_salt): """Second pairing step.""" pk_str = binascii.hexlify(atv_pub_key).decode() salt = binascii.hexlify(atv_salt).decode() self._client_session_key, _, _ = self._session.process(pk_str, salt) if not self._session.verify_proof(self._session.key_proof_hash): raise exceptions.AuthenticationError('proofs do not match (mitm?)') pub_key = binascii.unhexlify(self._session.public) proof = binascii.unhexlify(self._session.key_proof) log_binary(_LOGGER, 'Client', Public=pub_key, Proof=proof) return pub_key, proof
[ "def", "step2", "(", "self", ",", "atv_pub_key", ",", "atv_salt", ")", ":", "pk_str", "=", "binascii", ".", "hexlify", "(", "atv_pub_key", ")", ".", "decode", "(", ")", "salt", "=", "binascii", ".", "hexlify", "(", "atv_salt", ")", ".", "decode", "(", ")", "self", ".", "_client_session_key", ",", "_", ",", "_", "=", "self", ".", "_session", ".", "process", "(", "pk_str", ",", "salt", ")", "if", "not", "self", ".", "_session", ".", "verify_proof", "(", "self", ".", "_session", ".", "key_proof_hash", ")", ":", "raise", "exceptions", ".", "AuthenticationError", "(", "'proofs do not match (mitm?)'", ")", "pub_key", "=", "binascii", ".", "unhexlify", "(", "self", ".", "_session", ".", "public", ")", "proof", "=", "binascii", ".", "unhexlify", "(", "self", ".", "_session", ".", "key_proof", ")", "log_binary", "(", "_LOGGER", ",", "'Client'", ",", "Public", "=", "pub_key", ",", "Proof", "=", "proof", ")", "return", "pub_key", ",", "proof" ]
Second pairing step.
[ "Second", "pairing", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/srp.py#L155-L167
train
postlund/pyatv
pyatv/mrp/srp.py
SRPAuthHandler.step3
def step3(self): """Third pairing step.""" ios_device_x = hkdf_expand( 'Pair-Setup-Controller-Sign-Salt', 'Pair-Setup-Controller-Sign-Info', binascii.unhexlify(self._client_session_key)) self._session_key = hkdf_expand( 'Pair-Setup-Encrypt-Salt', 'Pair-Setup-Encrypt-Info', binascii.unhexlify(self._client_session_key)) device_info = ios_device_x + self.pairing_id + self._auth_public device_signature = self._signing_key.sign(device_info) tlv = tlv8.write_tlv({tlv8.TLV_IDENTIFIER: self.pairing_id, tlv8.TLV_PUBLIC_KEY: self._auth_public, tlv8.TLV_SIGNATURE: device_signature}) chacha = chacha20.Chacha20Cipher(self._session_key, self._session_key) encrypted_data = chacha.encrypt(tlv, nounce='PS-Msg05'.encode()) log_binary(_LOGGER, 'Data', Encrypted=encrypted_data) return encrypted_data
python
def step3(self): """Third pairing step.""" ios_device_x = hkdf_expand( 'Pair-Setup-Controller-Sign-Salt', 'Pair-Setup-Controller-Sign-Info', binascii.unhexlify(self._client_session_key)) self._session_key = hkdf_expand( 'Pair-Setup-Encrypt-Salt', 'Pair-Setup-Encrypt-Info', binascii.unhexlify(self._client_session_key)) device_info = ios_device_x + self.pairing_id + self._auth_public device_signature = self._signing_key.sign(device_info) tlv = tlv8.write_tlv({tlv8.TLV_IDENTIFIER: self.pairing_id, tlv8.TLV_PUBLIC_KEY: self._auth_public, tlv8.TLV_SIGNATURE: device_signature}) chacha = chacha20.Chacha20Cipher(self._session_key, self._session_key) encrypted_data = chacha.encrypt(tlv, nounce='PS-Msg05'.encode()) log_binary(_LOGGER, 'Data', Encrypted=encrypted_data) return encrypted_data
[ "def", "step3", "(", "self", ")", ":", "ios_device_x", "=", "hkdf_expand", "(", "'Pair-Setup-Controller-Sign-Salt'", ",", "'Pair-Setup-Controller-Sign-Info'", ",", "binascii", ".", "unhexlify", "(", "self", ".", "_client_session_key", ")", ")", "self", ".", "_session_key", "=", "hkdf_expand", "(", "'Pair-Setup-Encrypt-Salt'", ",", "'Pair-Setup-Encrypt-Info'", ",", "binascii", ".", "unhexlify", "(", "self", ".", "_client_session_key", ")", ")", "device_info", "=", "ios_device_x", "+", "self", ".", "pairing_id", "+", "self", ".", "_auth_public", "device_signature", "=", "self", ".", "_signing_key", ".", "sign", "(", "device_info", ")", "tlv", "=", "tlv8", ".", "write_tlv", "(", "{", "tlv8", ".", "TLV_IDENTIFIER", ":", "self", ".", "pairing_id", ",", "tlv8", ".", "TLV_PUBLIC_KEY", ":", "self", ".", "_auth_public", ",", "tlv8", ".", "TLV_SIGNATURE", ":", "device_signature", "}", ")", "chacha", "=", "chacha20", ".", "Chacha20Cipher", "(", "self", ".", "_session_key", ",", "self", ".", "_session_key", ")", "encrypted_data", "=", "chacha", ".", "encrypt", "(", "tlv", ",", "nounce", "=", "'PS-Msg05'", ".", "encode", "(", ")", ")", "log_binary", "(", "_LOGGER", ",", "'Data'", ",", "Encrypted", "=", "encrypted_data", ")", "return", "encrypted_data" ]
Third pairing step.
[ "Third", "pairing", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/srp.py#L169-L191
train
postlund/pyatv
pyatv/mrp/srp.py
SRPAuthHandler.step4
def step4(self, encrypted_data): """Last pairing step.""" chacha = chacha20.Chacha20Cipher(self._session_key, self._session_key) decrypted_tlv_bytes = chacha.decrypt( encrypted_data, nounce='PS-Msg06'.encode()) if not decrypted_tlv_bytes: raise Exception('data decrypt failed') # TODO: new exception decrypted_tlv = tlv8.read_tlv(decrypted_tlv_bytes) _LOGGER.debug('PS-Msg06: %s', decrypted_tlv) atv_identifier = decrypted_tlv[tlv8.TLV_IDENTIFIER] atv_signature = decrypted_tlv[tlv8.TLV_SIGNATURE] atv_pub_key = decrypted_tlv[tlv8.TLV_PUBLIC_KEY] log_binary(_LOGGER, 'Device', Identifier=atv_identifier, Signature=atv_signature, Public=atv_pub_key) # TODO: verify signature here return Credentials(atv_pub_key, self._signing_key.to_seed(), atv_identifier, self.pairing_id)
python
def step4(self, encrypted_data): """Last pairing step.""" chacha = chacha20.Chacha20Cipher(self._session_key, self._session_key) decrypted_tlv_bytes = chacha.decrypt( encrypted_data, nounce='PS-Msg06'.encode()) if not decrypted_tlv_bytes: raise Exception('data decrypt failed') # TODO: new exception decrypted_tlv = tlv8.read_tlv(decrypted_tlv_bytes) _LOGGER.debug('PS-Msg06: %s', decrypted_tlv) atv_identifier = decrypted_tlv[tlv8.TLV_IDENTIFIER] atv_signature = decrypted_tlv[tlv8.TLV_SIGNATURE] atv_pub_key = decrypted_tlv[tlv8.TLV_PUBLIC_KEY] log_binary(_LOGGER, 'Device', Identifier=atv_identifier, Signature=atv_signature, Public=atv_pub_key) # TODO: verify signature here return Credentials(atv_pub_key, self._signing_key.to_seed(), atv_identifier, self.pairing_id)
[ "def", "step4", "(", "self", ",", "encrypted_data", ")", ":", "chacha", "=", "chacha20", ".", "Chacha20Cipher", "(", "self", ".", "_session_key", ",", "self", ".", "_session_key", ")", "decrypted_tlv_bytes", "=", "chacha", ".", "decrypt", "(", "encrypted_data", ",", "nounce", "=", "'PS-Msg06'", ".", "encode", "(", ")", ")", "if", "not", "decrypted_tlv_bytes", ":", "raise", "Exception", "(", "'data decrypt failed'", ")", "# TODO: new exception", "decrypted_tlv", "=", "tlv8", ".", "read_tlv", "(", "decrypted_tlv_bytes", ")", "_LOGGER", ".", "debug", "(", "'PS-Msg06: %s'", ",", "decrypted_tlv", ")", "atv_identifier", "=", "decrypted_tlv", "[", "tlv8", ".", "TLV_IDENTIFIER", "]", "atv_signature", "=", "decrypted_tlv", "[", "tlv8", ".", "TLV_SIGNATURE", "]", "atv_pub_key", "=", "decrypted_tlv", "[", "tlv8", ".", "TLV_PUBLIC_KEY", "]", "log_binary", "(", "_LOGGER", ",", "'Device'", ",", "Identifier", "=", "atv_identifier", ",", "Signature", "=", "atv_signature", ",", "Public", "=", "atv_pub_key", ")", "# TODO: verify signature here", "return", "Credentials", "(", "atv_pub_key", ",", "self", ".", "_signing_key", ".", "to_seed", "(", ")", ",", "atv_identifier", ",", "self", ".", "pairing_id", ")" ]
Last pairing step.
[ "Last", "pairing", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/srp.py#L193-L215
train
postlund/pyatv
pyatv/airplay/srp.py
hash_sha512
def hash_sha512(*indata): """Create SHA512 hash for input arguments.""" hasher = hashlib.sha512() for data in indata: if isinstance(data, str): hasher.update(data.encode('utf-8')) elif isinstance(data, bytes): hasher.update(data) else: raise Exception('invalid input data: ' + str(data)) return hasher.digest()
python
def hash_sha512(*indata): """Create SHA512 hash for input arguments.""" hasher = hashlib.sha512() for data in indata: if isinstance(data, str): hasher.update(data.encode('utf-8')) elif isinstance(data, bytes): hasher.update(data) else: raise Exception('invalid input data: ' + str(data)) return hasher.digest()
[ "def", "hash_sha512", "(", "*", "indata", ")", ":", "hasher", "=", "hashlib", ".", "sha512", "(", ")", "for", "data", "in", "indata", ":", "if", "isinstance", "(", "data", ",", "str", ")", ":", "hasher", ".", "update", "(", "data", ".", "encode", "(", "'utf-8'", ")", ")", "elif", "isinstance", "(", "data", ",", "bytes", ")", ":", "hasher", ".", "update", "(", "data", ")", "else", ":", "raise", "Exception", "(", "'invalid input data: '", "+", "str", "(", "data", ")", ")", "return", "hasher", ".", "digest", "(", ")" ]
Create SHA512 hash for input arguments.
[ "Create", "SHA512", "hash", "for", "input", "arguments", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/srp.py#L20-L30
train
postlund/pyatv
pyatv/airplay/srp.py
aes_encrypt
def aes_encrypt(mode, aes_key, aes_iv, *data): """Encrypt data with AES in specified mode.""" encryptor = Cipher( algorithms.AES(aes_key), mode(aes_iv), backend=default_backend()).encryptor() result = None for value in data: result = encryptor.update(value) encryptor.finalize() return result, None if not hasattr(encryptor, 'tag') else encryptor.tag
python
def aes_encrypt(mode, aes_key, aes_iv, *data): """Encrypt data with AES in specified mode.""" encryptor = Cipher( algorithms.AES(aes_key), mode(aes_iv), backend=default_backend()).encryptor() result = None for value in data: result = encryptor.update(value) encryptor.finalize() return result, None if not hasattr(encryptor, 'tag') else encryptor.tag
[ "def", "aes_encrypt", "(", "mode", ",", "aes_key", ",", "aes_iv", ",", "*", "data", ")", ":", "encryptor", "=", "Cipher", "(", "algorithms", ".", "AES", "(", "aes_key", ")", ",", "mode", "(", "aes_iv", ")", ",", "backend", "=", "default_backend", "(", ")", ")", ".", "encryptor", "(", ")", "result", "=", "None", "for", "value", "in", "data", ":", "result", "=", "encryptor", ".", "update", "(", "value", ")", "encryptor", ".", "finalize", "(", ")", "return", "result", ",", "None", "if", "not", "hasattr", "(", "encryptor", ",", "'tag'", ")", "else", "encryptor", ".", "tag" ]
Encrypt data with AES in specified mode.
[ "Encrypt", "data", "with", "AES", "in", "specified", "mode", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/srp.py#L33-L45
train
postlund/pyatv
pyatv/airplay/srp.py
new_credentials
def new_credentials(): """Generate a new identifier and seed for authentication. Use the returned values in the following way: * The identifier shall be passed as username to SRPAuthHandler.step1 * Seed shall be passed to SRPAuthHandler constructor """ identifier = binascii.b2a_hex(os.urandom(8)).decode().upper() seed = binascii.b2a_hex(os.urandom(32)) # Corresponds to private key return identifier, seed
python
def new_credentials(): """Generate a new identifier and seed for authentication. Use the returned values in the following way: * The identifier shall be passed as username to SRPAuthHandler.step1 * Seed shall be passed to SRPAuthHandler constructor """ identifier = binascii.b2a_hex(os.urandom(8)).decode().upper() seed = binascii.b2a_hex(os.urandom(32)) # Corresponds to private key return identifier, seed
[ "def", "new_credentials", "(", ")", ":", "identifier", "=", "binascii", ".", "b2a_hex", "(", "os", ".", "urandom", "(", "8", ")", ")", ".", "decode", "(", ")", ".", "upper", "(", ")", "seed", "=", "binascii", ".", "b2a_hex", "(", "os", ".", "urandom", "(", "32", ")", ")", "# Corresponds to private key", "return", "identifier", ",", "seed" ]
Generate a new identifier and seed for authentication. Use the returned values in the following way: * The identifier shall be passed as username to SRPAuthHandler.step1 * Seed shall be passed to SRPAuthHandler constructor
[ "Generate", "a", "new", "identifier", "and", "seed", "for", "authentication", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/srp.py#L48-L57
train
postlund/pyatv
pyatv/airplay/srp.py
SRPAuthHandler.initialize
def initialize(self, seed=None): """Initialize handler operation. This method will generate new encryption keys and must be called prior to doing authentication or verification. """ self.seed = seed or os.urandom(32) # Generate new seed if not provided signing_key = SigningKey(self.seed) verifying_key = signing_key.get_verifying_key() self._auth_private = signing_key.to_seed() self._auth_public = verifying_key.to_bytes() log_binary(_LOGGER, 'Authentication keys', Private=self._auth_private, Public=self._auth_public)
python
def initialize(self, seed=None): """Initialize handler operation. This method will generate new encryption keys and must be called prior to doing authentication or verification. """ self.seed = seed or os.urandom(32) # Generate new seed if not provided signing_key = SigningKey(self.seed) verifying_key = signing_key.get_verifying_key() self._auth_private = signing_key.to_seed() self._auth_public = verifying_key.to_bytes() log_binary(_LOGGER, 'Authentication keys', Private=self._auth_private, Public=self._auth_public)
[ "def", "initialize", "(", "self", ",", "seed", "=", "None", ")", ":", "self", ".", "seed", "=", "seed", "or", "os", ".", "urandom", "(", "32", ")", "# Generate new seed if not provided", "signing_key", "=", "SigningKey", "(", "self", ".", "seed", ")", "verifying_key", "=", "signing_key", ".", "get_verifying_key", "(", ")", "self", ".", "_auth_private", "=", "signing_key", ".", "to_seed", "(", ")", "self", ".", "_auth_public", "=", "verifying_key", ".", "to_bytes", "(", ")", "log_binary", "(", "_LOGGER", ",", "'Authentication keys'", ",", "Private", "=", "self", ".", "_auth_private", ",", "Public", "=", "self", ".", "_auth_public", ")" ]
Initialize handler operation. This method will generate new encryption keys and must be called prior to doing authentication or verification.
[ "Initialize", "handler", "operation", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/srp.py#L86-L100
train
postlund/pyatv
pyatv/airplay/srp.py
SRPAuthHandler.verify1
def verify1(self): """First device verification step.""" self._check_initialized() self._verify_private = curve25519.Private(secret=self.seed) self._verify_public = self._verify_private.get_public() log_binary(_LOGGER, 'Verification keys', Private=self._verify_private.serialize(), Public=self._verify_public.serialize()) verify_public = self._verify_public.serialize() return b'\x01\x00\x00\x00' + verify_public + self._auth_public
python
def verify1(self): """First device verification step.""" self._check_initialized() self._verify_private = curve25519.Private(secret=self.seed) self._verify_public = self._verify_private.get_public() log_binary(_LOGGER, 'Verification keys', Private=self._verify_private.serialize(), Public=self._verify_public.serialize()) verify_public = self._verify_public.serialize() return b'\x01\x00\x00\x00' + verify_public + self._auth_public
[ "def", "verify1", "(", "self", ")", ":", "self", ".", "_check_initialized", "(", ")", "self", ".", "_verify_private", "=", "curve25519", ".", "Private", "(", "secret", "=", "self", ".", "seed", ")", "self", ".", "_verify_public", "=", "self", ".", "_verify_private", ".", "get_public", "(", ")", "log_binary", "(", "_LOGGER", ",", "'Verification keys'", ",", "Private", "=", "self", ".", "_verify_private", ".", "serialize", "(", ")", ",", "Public", "=", "self", ".", "_verify_public", ".", "serialize", "(", ")", ")", "verify_public", "=", "self", ".", "_verify_public", ".", "serialize", "(", ")", "return", "b'\\x01\\x00\\x00\\x00'", "+", "verify_public", "+", "self", ".", "_auth_public" ]
First device verification step.
[ "First", "device", "verification", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/srp.py#L102-L112
train
postlund/pyatv
pyatv/airplay/srp.py
SRPAuthHandler.verify2
def verify2(self, atv_public_key, data): """Last device verification step.""" self._check_initialized() log_binary(_LOGGER, 'Verify', PublicSecret=atv_public_key, Data=data) # Generate a shared secret key public = curve25519.Public(atv_public_key) shared = self._verify_private.get_shared_key( public, hashfunc=lambda x: x) # No additional hashing used log_binary(_LOGGER, 'Shared secret', Secret=shared) # Derive new AES key and IV from shared key aes_key = hash_sha512('Pair-Verify-AES-Key', shared)[0:16] aes_iv = hash_sha512('Pair-Verify-AES-IV', shared)[0:16] log_binary(_LOGGER, 'Pair-Verify-AES', Key=aes_key, IV=aes_iv) # Sign public keys and encrypt with AES signer = SigningKey(self._auth_private) signed = signer.sign(self._verify_public.serialize() + atv_public_key) signature, _ = aes_encrypt(modes.CTR, aes_key, aes_iv, data, signed) log_binary(_LOGGER, 'Signature', Signature=signature) # Signature is prepended with 0x00000000 (alignment?) return b'\x00\x00\x00\x00' + signature
python
def verify2(self, atv_public_key, data): """Last device verification step.""" self._check_initialized() log_binary(_LOGGER, 'Verify', PublicSecret=atv_public_key, Data=data) # Generate a shared secret key public = curve25519.Public(atv_public_key) shared = self._verify_private.get_shared_key( public, hashfunc=lambda x: x) # No additional hashing used log_binary(_LOGGER, 'Shared secret', Secret=shared) # Derive new AES key and IV from shared key aes_key = hash_sha512('Pair-Verify-AES-Key', shared)[0:16] aes_iv = hash_sha512('Pair-Verify-AES-IV', shared)[0:16] log_binary(_LOGGER, 'Pair-Verify-AES', Key=aes_key, IV=aes_iv) # Sign public keys and encrypt with AES signer = SigningKey(self._auth_private) signed = signer.sign(self._verify_public.serialize() + atv_public_key) signature, _ = aes_encrypt(modes.CTR, aes_key, aes_iv, data, signed) log_binary(_LOGGER, 'Signature', Signature=signature) # Signature is prepended with 0x00000000 (alignment?) return b'\x00\x00\x00\x00' + signature
[ "def", "verify2", "(", "self", ",", "atv_public_key", ",", "data", ")", ":", "self", ".", "_check_initialized", "(", ")", "log_binary", "(", "_LOGGER", ",", "'Verify'", ",", "PublicSecret", "=", "atv_public_key", ",", "Data", "=", "data", ")", "# Generate a shared secret key", "public", "=", "curve25519", ".", "Public", "(", "atv_public_key", ")", "shared", "=", "self", ".", "_verify_private", ".", "get_shared_key", "(", "public", ",", "hashfunc", "=", "lambda", "x", ":", "x", ")", "# No additional hashing used", "log_binary", "(", "_LOGGER", ",", "'Shared secret'", ",", "Secret", "=", "shared", ")", "# Derive new AES key and IV from shared key", "aes_key", "=", "hash_sha512", "(", "'Pair-Verify-AES-Key'", ",", "shared", ")", "[", "0", ":", "16", "]", "aes_iv", "=", "hash_sha512", "(", "'Pair-Verify-AES-IV'", ",", "shared", ")", "[", "0", ":", "16", "]", "log_binary", "(", "_LOGGER", ",", "'Pair-Verify-AES'", ",", "Key", "=", "aes_key", ",", "IV", "=", "aes_iv", ")", "# Sign public keys and encrypt with AES", "signer", "=", "SigningKey", "(", "self", ".", "_auth_private", ")", "signed", "=", "signer", ".", "sign", "(", "self", ".", "_verify_public", ".", "serialize", "(", ")", "+", "atv_public_key", ")", "signature", ",", "_", "=", "aes_encrypt", "(", "modes", ".", "CTR", ",", "aes_key", ",", "aes_iv", ",", "data", ",", "signed", ")", "log_binary", "(", "_LOGGER", ",", "'Signature'", ",", "Signature", "=", "signature", ")", "# Signature is prepended with 0x00000000 (alignment?)", "return", "b'\\x00\\x00\\x00\\x00'", "+", "signature" ]
Last device verification step.
[ "Last", "device", "verification", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/srp.py#L114-L137
train
postlund/pyatv
pyatv/airplay/srp.py
SRPAuthHandler.step1
def step1(self, username, password): """First authentication step.""" self._check_initialized() context = AtvSRPContext( str(username), str(password), prime=constants.PRIME_2048, generator=constants.PRIME_2048_GEN) self.session = SRPClientSession( context, binascii.hexlify(self._auth_private).decode())
python
def step1(self, username, password): """First authentication step.""" self._check_initialized() context = AtvSRPContext( str(username), str(password), prime=constants.PRIME_2048, generator=constants.PRIME_2048_GEN) self.session = SRPClientSession( context, binascii.hexlify(self._auth_private).decode())
[ "def", "step1", "(", "self", ",", "username", ",", "password", ")", ":", "self", ".", "_check_initialized", "(", ")", "context", "=", "AtvSRPContext", "(", "str", "(", "username", ")", ",", "str", "(", "password", ")", ",", "prime", "=", "constants", ".", "PRIME_2048", ",", "generator", "=", "constants", ".", "PRIME_2048_GEN", ")", "self", ".", "session", "=", "SRPClientSession", "(", "context", ",", "binascii", ".", "hexlify", "(", "self", ".", "_auth_private", ")", ".", "decode", "(", ")", ")" ]
First authentication step.
[ "First", "authentication", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/srp.py#L139-L147
train
postlund/pyatv
pyatv/airplay/srp.py
SRPAuthHandler.step2
def step2(self, pub_key, salt): """Second authentication step.""" self._check_initialized() pk_str = binascii.hexlify(pub_key).decode() salt = binascii.hexlify(salt).decode() self.client_session_key, _, _ = self.session.process(pk_str, salt) _LOGGER.debug('Client session key: %s', self.client_session_key) # Generate client public and session key proof. client_public = self.session.public client_session_key_proof = self.session.key_proof _LOGGER.debug('Client public: %s, proof: %s', client_public, client_session_key_proof) if not self.session.verify_proof(self.session.key_proof_hash): raise AuthenticationError('proofs do not match (mitm?)') return client_public, client_session_key_proof
python
def step2(self, pub_key, salt): """Second authentication step.""" self._check_initialized() pk_str = binascii.hexlify(pub_key).decode() salt = binascii.hexlify(salt).decode() self.client_session_key, _, _ = self.session.process(pk_str, salt) _LOGGER.debug('Client session key: %s', self.client_session_key) # Generate client public and session key proof. client_public = self.session.public client_session_key_proof = self.session.key_proof _LOGGER.debug('Client public: %s, proof: %s', client_public, client_session_key_proof) if not self.session.verify_proof(self.session.key_proof_hash): raise AuthenticationError('proofs do not match (mitm?)') return client_public, client_session_key_proof
[ "def", "step2", "(", "self", ",", "pub_key", ",", "salt", ")", ":", "self", ".", "_check_initialized", "(", ")", "pk_str", "=", "binascii", ".", "hexlify", "(", "pub_key", ")", ".", "decode", "(", ")", "salt", "=", "binascii", ".", "hexlify", "(", "salt", ")", ".", "decode", "(", ")", "self", ".", "client_session_key", ",", "_", ",", "_", "=", "self", ".", "session", ".", "process", "(", "pk_str", ",", "salt", ")", "_LOGGER", ".", "debug", "(", "'Client session key: %s'", ",", "self", ".", "client_session_key", ")", "# Generate client public and session key proof.", "client_public", "=", "self", ".", "session", ".", "public", "client_session_key_proof", "=", "self", ".", "session", ".", "key_proof", "_LOGGER", ".", "debug", "(", "'Client public: %s, proof: %s'", ",", "client_public", ",", "client_session_key_proof", ")", "if", "not", "self", ".", "session", ".", "verify_proof", "(", "self", ".", "session", ".", "key_proof_hash", ")", ":", "raise", "AuthenticationError", "(", "'proofs do not match (mitm?)'", ")", "return", "client_public", ",", "client_session_key_proof" ]
Second authentication step.
[ "Second", "authentication", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/srp.py#L149-L165
train
postlund/pyatv
pyatv/airplay/srp.py
SRPAuthHandler.step3
def step3(self): """Last authentication step.""" self._check_initialized() # TODO: verify: self.client_session_key same as self.session.key_b64()? session_key = binascii.unhexlify(self.client_session_key) aes_key = hash_sha512('Pair-Setup-AES-Key', session_key)[0:16] tmp = bytearray(hash_sha512('Pair-Setup-AES-IV', session_key)[0:16]) tmp[-1] = tmp[-1] + 1 # Last byte must be increased by 1 aes_iv = bytes(tmp) log_binary(_LOGGER, 'Pair-Setup-AES', Key=aes_key, IV=aes_iv) epk, tag = aes_encrypt(modes.GCM, aes_key, aes_iv, self._auth_public) log_binary(_LOGGER, 'Pair-Setup EPK+Tag', EPK=epk, Tag=tag) return epk, tag
python
def step3(self): """Last authentication step.""" self._check_initialized() # TODO: verify: self.client_session_key same as self.session.key_b64()? session_key = binascii.unhexlify(self.client_session_key) aes_key = hash_sha512('Pair-Setup-AES-Key', session_key)[0:16] tmp = bytearray(hash_sha512('Pair-Setup-AES-IV', session_key)[0:16]) tmp[-1] = tmp[-1] + 1 # Last byte must be increased by 1 aes_iv = bytes(tmp) log_binary(_LOGGER, 'Pair-Setup-AES', Key=aes_key, IV=aes_iv) epk, tag = aes_encrypt(modes.GCM, aes_key, aes_iv, self._auth_public) log_binary(_LOGGER, 'Pair-Setup EPK+Tag', EPK=epk, Tag=tag) return epk, tag
[ "def", "step3", "(", "self", ")", ":", "self", ".", "_check_initialized", "(", ")", "# TODO: verify: self.client_session_key same as self.session.key_b64()?", "session_key", "=", "binascii", ".", "unhexlify", "(", "self", ".", "client_session_key", ")", "aes_key", "=", "hash_sha512", "(", "'Pair-Setup-AES-Key'", ",", "session_key", ")", "[", "0", ":", "16", "]", "tmp", "=", "bytearray", "(", "hash_sha512", "(", "'Pair-Setup-AES-IV'", ",", "session_key", ")", "[", "0", ":", "16", "]", ")", "tmp", "[", "-", "1", "]", "=", "tmp", "[", "-", "1", "]", "+", "1", "# Last byte must be increased by 1", "aes_iv", "=", "bytes", "(", "tmp", ")", "log_binary", "(", "_LOGGER", ",", "'Pair-Setup-AES'", ",", "Key", "=", "aes_key", ",", "IV", "=", "aes_iv", ")", "epk", ",", "tag", "=", "aes_encrypt", "(", "modes", ".", "GCM", ",", "aes_key", ",", "aes_iv", ",", "self", ".", "_auth_public", ")", "log_binary", "(", "_LOGGER", ",", "'Pair-Setup EPK+Tag'", ",", "EPK", "=", "epk", ",", "Tag", "=", "tag", ")", "return", "epk", ",", "tag" ]
Last authentication step.
[ "Last", "authentication", "step", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/srp.py#L167-L182
train
postlund/pyatv
pyatv/airplay/auth.py
DeviceAuthenticator.start_authentication
async def start_authentication(self): """Start the authentication process. This method will show the expected PIN on screen. """ _, code = await self.http.post_data( 'pair-pin-start', headers=_AIRPLAY_HEADERS) if code != 200: raise DeviceAuthenticationError('pair start failed')
python
async def start_authentication(self): """Start the authentication process. This method will show the expected PIN on screen. """ _, code = await self.http.post_data( 'pair-pin-start', headers=_AIRPLAY_HEADERS) if code != 200: raise DeviceAuthenticationError('pair start failed')
[ "async", "def", "start_authentication", "(", "self", ")", ":", "_", ",", "code", "=", "await", "self", ".", "http", ".", "post_data", "(", "'pair-pin-start'", ",", "headers", "=", "_AIRPLAY_HEADERS", ")", "if", "code", "!=", "200", ":", "raise", "DeviceAuthenticationError", "(", "'pair start failed'", ")" ]
Start the authentication process. This method will show the expected PIN on screen.
[ "Start", "the", "authentication", "process", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/auth.py#L26-L34
train
postlund/pyatv
pyatv/airplay/auth.py
DeviceAuthenticator.finish_authentication
async def finish_authentication(self, username, password): """Finish authentication process. A username (generated by new_credentials) and the PIN code shown on screen must be provided. """ # Step 1 self.srp.step1(username, password) data = await self._send_plist( 'step1', method='pin', user=username) resp = plistlib.loads(data) # Step 2 pub_key, key_proof = self.srp.step2(resp['pk'], resp['salt']) await self._send_plist( 'step2', pk=binascii.unhexlify(pub_key), proof=binascii.unhexlify(key_proof)) # Step 3 epk, tag = self.srp.step3() await self._send_plist('step3', epk=epk, authTag=tag) return True
python
async def finish_authentication(self, username, password): """Finish authentication process. A username (generated by new_credentials) and the PIN code shown on screen must be provided. """ # Step 1 self.srp.step1(username, password) data = await self._send_plist( 'step1', method='pin', user=username) resp = plistlib.loads(data) # Step 2 pub_key, key_proof = self.srp.step2(resp['pk'], resp['salt']) await self._send_plist( 'step2', pk=binascii.unhexlify(pub_key), proof=binascii.unhexlify(key_proof)) # Step 3 epk, tag = self.srp.step3() await self._send_plist('step3', epk=epk, authTag=tag) return True
[ "async", "def", "finish_authentication", "(", "self", ",", "username", ",", "password", ")", ":", "# Step 1", "self", ".", "srp", ".", "step1", "(", "username", ",", "password", ")", "data", "=", "await", "self", ".", "_send_plist", "(", "'step1'", ",", "method", "=", "'pin'", ",", "user", "=", "username", ")", "resp", "=", "plistlib", ".", "loads", "(", "data", ")", "# Step 2", "pub_key", ",", "key_proof", "=", "self", ".", "srp", ".", "step2", "(", "resp", "[", "'pk'", "]", ",", "resp", "[", "'salt'", "]", ")", "await", "self", ".", "_send_plist", "(", "'step2'", ",", "pk", "=", "binascii", ".", "unhexlify", "(", "pub_key", ")", ",", "proof", "=", "binascii", ".", "unhexlify", "(", "key_proof", ")", ")", "# Step 3", "epk", ",", "tag", "=", "self", ".", "srp", ".", "step3", "(", ")", "await", "self", ".", "_send_plist", "(", "'step3'", ",", "epk", "=", "epk", ",", "authTag", "=", "tag", ")", "return", "True" ]
Finish authentication process. A username (generated by new_credentials) and the PIN code shown on screen must be provided.
[ "Finish", "authentication", "process", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/auth.py#L36-L58
train
postlund/pyatv
pyatv/airplay/auth.py
AuthenticationVerifier.verify_authed
async def verify_authed(self): """Verify if device is allowed to use AirPlau.""" resp = await self._send(self.srp.verify1(), 'verify1') atv_public_secret = resp[0:32] data = resp[32:] # TODO: what is this? await self._send( self.srp.verify2(atv_public_secret, data), 'verify2') return True
python
async def verify_authed(self): """Verify if device is allowed to use AirPlau.""" resp = await self._send(self.srp.verify1(), 'verify1') atv_public_secret = resp[0:32] data = resp[32:] # TODO: what is this? await self._send( self.srp.verify2(atv_public_secret, data), 'verify2') return True
[ "async", "def", "verify_authed", "(", "self", ")", ":", "resp", "=", "await", "self", ".", "_send", "(", "self", ".", "srp", ".", "verify1", "(", ")", ",", "'verify1'", ")", "atv_public_secret", "=", "resp", "[", "0", ":", "32", "]", "data", "=", "resp", "[", "32", ":", "]", "# TODO: what is this?", "await", "self", ".", "_send", "(", "self", ".", "srp", ".", "verify2", "(", "atv_public_secret", ",", "data", ")", ",", "'verify2'", ")", "return", "True" ]
Verify if device is allowed to use AirPlau.
[ "Verify", "if", "device", "is", "allowed", "to", "use", "AirPlau", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/auth.py#L86-L94
train
postlund/pyatv
pyatv/airplay/api.py
AirPlayAPI.generate_credentials
async def generate_credentials(self): """Create new credentials for authentication. Credentials that have been authenticated shall be saved and loaded with load_credentials before playing anything. If credentials are lost, authentication must be performed again. """ identifier, seed = new_credentials() return '{0}:{1}'.format(identifier, seed.decode().upper())
python
async def generate_credentials(self): """Create new credentials for authentication. Credentials that have been authenticated shall be saved and loaded with load_credentials before playing anything. If credentials are lost, authentication must be performed again. """ identifier, seed = new_credentials() return '{0}:{1}'.format(identifier, seed.decode().upper())
[ "async", "def", "generate_credentials", "(", "self", ")", ":", "identifier", ",", "seed", "=", "new_credentials", "(", ")", "return", "'{0}:{1}'", ".", "format", "(", "identifier", ",", "seed", ".", "decode", "(", ")", ".", "upper", "(", ")", ")" ]
Create new credentials for authentication. Credentials that have been authenticated shall be saved and loaded with load_credentials before playing anything. If credentials are lost, authentication must be performed again.
[ "Create", "new", "credentials", "for", "authentication", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/api.py#L25-L33
train
postlund/pyatv
pyatv/airplay/api.py
AirPlayAPI.load_credentials
async def load_credentials(self, credentials): """Load existing credentials.""" split = credentials.split(':') self.identifier = split[0] self.srp.initialize(binascii.unhexlify(split[1])) _LOGGER.debug('Loaded AirPlay credentials: %s', credentials)
python
async def load_credentials(self, credentials): """Load existing credentials.""" split = credentials.split(':') self.identifier = split[0] self.srp.initialize(binascii.unhexlify(split[1])) _LOGGER.debug('Loaded AirPlay credentials: %s', credentials)
[ "async", "def", "load_credentials", "(", "self", ",", "credentials", ")", ":", "split", "=", "credentials", ".", "split", "(", "':'", ")", "self", ".", "identifier", "=", "split", "[", "0", "]", "self", ".", "srp", ".", "initialize", "(", "binascii", ".", "unhexlify", "(", "split", "[", "1", "]", ")", ")", "_LOGGER", ".", "debug", "(", "'Loaded AirPlay credentials: %s'", ",", "credentials", ")" ]
Load existing credentials.
[ "Load", "existing", "credentials", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/airplay/api.py#L35-L40
train
postlund/pyatv
pyatv/mrp/connection.py
MrpConnection.enable_encryption
def enable_encryption(self, output_key, input_key): """Enable encryption with the specified keys.""" self._chacha = chacha20.Chacha20Cipher(output_key, input_key)
python
def enable_encryption(self, output_key, input_key): """Enable encryption with the specified keys.""" self._chacha = chacha20.Chacha20Cipher(output_key, input_key)
[ "def", "enable_encryption", "(", "self", ",", "output_key", ",", "input_key", ")", ":", "self", ".", "_chacha", "=", "chacha20", ".", "Chacha20Cipher", "(", "output_key", ",", "input_key", ")" ]
Enable encryption with the specified keys.
[ "Enable", "encryption", "with", "the", "specified", "keys", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/connection.py#L37-L39
train
postlund/pyatv
pyatv/mrp/connection.py
MrpConnection.connect
def connect(self): """Connect to device.""" return self.loop.create_connection(lambda: self, self.host, self.port)
python
def connect(self): """Connect to device.""" return self.loop.create_connection(lambda: self, self.host, self.port)
[ "def", "connect", "(", "self", ")", ":", "return", "self", ".", "loop", ".", "create_connection", "(", "lambda", ":", "self", ",", "self", ".", "host", ",", "self", ".", "port", ")" ]
Connect to device.
[ "Connect", "to", "device", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/connection.py#L46-L48
train
postlund/pyatv
pyatv/mrp/connection.py
MrpConnection.close
def close(self): """Close connection to device.""" if self._transport: self._transport.close() self._transport = None self._chacha = None
python
def close(self): """Close connection to device.""" if self._transport: self._transport.close() self._transport = None self._chacha = None
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_transport", ":", "self", ".", "_transport", ".", "close", "(", ")", "self", ".", "_transport", "=", "None", "self", ".", "_chacha", "=", "None" ]
Close connection to device.
[ "Close", "connection", "to", "device", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/connection.py#L50-L55
train
postlund/pyatv
pyatv/mrp/connection.py
MrpConnection.send
def send(self, message): """Send message to device.""" serialized = message.SerializeToString() log_binary(_LOGGER, '>> Send', Data=serialized) if self._chacha: serialized = self._chacha.encrypt(serialized) log_binary(_LOGGER, '>> Send', Encrypted=serialized) data = write_variant(len(serialized)) + serialized self._transport.write(data) _LOGGER.debug('>> Send: Protobuf=%s', message)
python
def send(self, message): """Send message to device.""" serialized = message.SerializeToString() log_binary(_LOGGER, '>> Send', Data=serialized) if self._chacha: serialized = self._chacha.encrypt(serialized) log_binary(_LOGGER, '>> Send', Encrypted=serialized) data = write_variant(len(serialized)) + serialized self._transport.write(data) _LOGGER.debug('>> Send: Protobuf=%s', message)
[ "def", "send", "(", "self", ",", "message", ")", ":", "serialized", "=", "message", ".", "SerializeToString", "(", ")", "log_binary", "(", "_LOGGER", ",", "'>> Send'", ",", "Data", "=", "serialized", ")", "if", "self", ".", "_chacha", ":", "serialized", "=", "self", ".", "_chacha", ".", "encrypt", "(", "serialized", ")", "log_binary", "(", "_LOGGER", ",", "'>> Send'", ",", "Encrypted", "=", "serialized", ")", "data", "=", "write_variant", "(", "len", "(", "serialized", ")", ")", "+", "serialized", "self", ".", "_transport", ".", "write", "(", "data", ")", "_LOGGER", ".", "debug", "(", "'>> Send: Protobuf=%s'", ",", "message", ")" ]
Send message to device.
[ "Send", "message", "to", "device", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/connection.py#L57-L68
train
postlund/pyatv
pyatv/net.py
HttpSession.get_data
async def get_data(self, path, headers=None, timeout=None): """Perform a GET request.""" url = self.base_url + path _LOGGER.debug('GET URL: %s', url) resp = None try: resp = await self._session.get( url, headers=headers, timeout=DEFAULT_TIMEOUT if timeout is None else timeout) if resp.content_length is not None: resp_data = await resp.read() else: resp_data = None return resp_data, resp.status except Exception as ex: if resp is not None: resp.close() raise ex finally: if resp is not None: await resp.release()
python
async def get_data(self, path, headers=None, timeout=None): """Perform a GET request.""" url = self.base_url + path _LOGGER.debug('GET URL: %s', url) resp = None try: resp = await self._session.get( url, headers=headers, timeout=DEFAULT_TIMEOUT if timeout is None else timeout) if resp.content_length is not None: resp_data = await resp.read() else: resp_data = None return resp_data, resp.status except Exception as ex: if resp is not None: resp.close() raise ex finally: if resp is not None: await resp.release()
[ "async", "def", "get_data", "(", "self", ",", "path", ",", "headers", "=", "None", ",", "timeout", "=", "None", ")", ":", "url", "=", "self", ".", "base_url", "+", "path", "_LOGGER", ".", "debug", "(", "'GET URL: %s'", ",", "url", ")", "resp", "=", "None", "try", ":", "resp", "=", "await", "self", ".", "_session", ".", "get", "(", "url", ",", "headers", "=", "headers", ",", "timeout", "=", "DEFAULT_TIMEOUT", "if", "timeout", "is", "None", "else", "timeout", ")", "if", "resp", ".", "content_length", "is", "not", "None", ":", "resp_data", "=", "await", "resp", ".", "read", "(", ")", "else", ":", "resp_data", "=", "None", "return", "resp_data", ",", "resp", ".", "status", "except", "Exception", "as", "ex", ":", "if", "resp", "is", "not", "None", ":", "resp", ".", "close", "(", ")", "raise", "ex", "finally", ":", "if", "resp", "is", "not", "None", ":", "await", "resp", ".", "release", "(", ")" ]
Perform a GET request.
[ "Perform", "a", "GET", "request", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/net.py#L20-L40
train
postlund/pyatv
pyatv/net.py
HttpSession.post_data
async def post_data(self, path, data=None, headers=None, timeout=None): """Perform a POST request.""" url = self.base_url + path _LOGGER.debug('POST URL: %s', url) self._log_data(data, False) resp = None try: resp = await self._session.post( url, headers=headers, data=data, timeout=DEFAULT_TIMEOUT if timeout is None else timeout) if resp.content_length is not None: resp_data = await resp.read() else: resp_data = None self._log_data(resp_data, True) return resp_data, resp.status except Exception as ex: if resp is not None: resp.close() raise ex finally: if resp is not None: await resp.release()
python
async def post_data(self, path, data=None, headers=None, timeout=None): """Perform a POST request.""" url = self.base_url + path _LOGGER.debug('POST URL: %s', url) self._log_data(data, False) resp = None try: resp = await self._session.post( url, headers=headers, data=data, timeout=DEFAULT_TIMEOUT if timeout is None else timeout) if resp.content_length is not None: resp_data = await resp.read() else: resp_data = None self._log_data(resp_data, True) return resp_data, resp.status except Exception as ex: if resp is not None: resp.close() raise ex finally: if resp is not None: await resp.release()
[ "async", "def", "post_data", "(", "self", ",", "path", ",", "data", "=", "None", ",", "headers", "=", "None", ",", "timeout", "=", "None", ")", ":", "url", "=", "self", ".", "base_url", "+", "path", "_LOGGER", ".", "debug", "(", "'POST URL: %s'", ",", "url", ")", "self", ".", "_log_data", "(", "data", ",", "False", ")", "resp", "=", "None", "try", ":", "resp", "=", "await", "self", ".", "_session", ".", "post", "(", "url", ",", "headers", "=", "headers", ",", "data", "=", "data", ",", "timeout", "=", "DEFAULT_TIMEOUT", "if", "timeout", "is", "None", "else", "timeout", ")", "if", "resp", ".", "content_length", "is", "not", "None", ":", "resp_data", "=", "await", "resp", ".", "read", "(", ")", "else", ":", "resp_data", "=", "None", "self", ".", "_log_data", "(", "resp_data", ",", "True", ")", "return", "resp_data", ",", "resp", ".", "status", "except", "Exception", "as", "ex", ":", "if", "resp", "is", "not", "None", ":", "resp", ".", "close", "(", ")", "raise", "ex", "finally", ":", "if", "resp", "is", "not", "None", ":", "await", "resp", ".", "release", "(", ")" ]
Perform a POST request.
[ "Perform", "a", "POST", "request", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/net.py#L42-L65
train
postlund/pyatv
pyatv/dmap/tags.py
read_uint
def read_uint(data, start, length): """Extract a uint from a position in a sequence.""" return int.from_bytes(data[start:start+length], byteorder='big')
python
def read_uint(data, start, length): """Extract a uint from a position in a sequence.""" return int.from_bytes(data[start:start+length], byteorder='big')
[ "def", "read_uint", "(", "data", ",", "start", ",", "length", ")", ":", "return", "int", ".", "from_bytes", "(", "data", "[", "start", ":", "start", "+", "length", "]", ",", "byteorder", "=", "'big'", ")" ]
Extract a uint from a position in a sequence.
[ "Extract", "a", "uint", "from", "a", "position", "in", "a", "sequence", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/tags.py#L11-L13
train
postlund/pyatv
pyatv/dmap/tags.py
read_bplist
def read_bplist(data, start, length): """Extract a binary plist from a position in a sequence.""" # TODO: pylint doesn't find FMT_BINARY, why? # pylint: disable=no-member return plistlib.loads(data[start:start+length], fmt=plistlib.FMT_BINARY)
python
def read_bplist(data, start, length): """Extract a binary plist from a position in a sequence.""" # TODO: pylint doesn't find FMT_BINARY, why? # pylint: disable=no-member return plistlib.loads(data[start:start+length], fmt=plistlib.FMT_BINARY)
[ "def", "read_bplist", "(", "data", ",", "start", ",", "length", ")", ":", "# TODO: pylint doesn't find FMT_BINARY, why?", "# pylint: disable=no-member", "return", "plistlib", ".", "loads", "(", "data", "[", "start", ":", "start", "+", "length", "]", ",", "fmt", "=", "plistlib", ".", "FMT_BINARY", ")" ]
Extract a binary plist from a position in a sequence.
[ "Extract", "a", "binary", "plist", "from", "a", "position", "in", "a", "sequence", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/tags.py#L21-L26
train
postlund/pyatv
pyatv/dmap/tags.py
raw_tag
def raw_tag(name, value): """Create a DMAP tag with raw data.""" return name.encode('utf-8') + \ len(value).to_bytes(4, byteorder='big') + \ value
python
def raw_tag(name, value): """Create a DMAP tag with raw data.""" return name.encode('utf-8') + \ len(value).to_bytes(4, byteorder='big') + \ value
[ "def", "raw_tag", "(", "name", ",", "value", ")", ":", "return", "name", ".", "encode", "(", "'utf-8'", ")", "+", "len", "(", "value", ")", ".", "to_bytes", "(", "4", ",", "byteorder", "=", "'big'", ")", "+", "value" ]
Create a DMAP tag with raw data.
[ "Create", "a", "DMAP", "tag", "with", "raw", "data", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/tags.py#L69-L73
train
postlund/pyatv
pyatv/dmap/tags.py
string_tag
def string_tag(name, value): """Create a DMAP tag with string data.""" return name.encode('utf-8') + \ len(value).to_bytes(4, byteorder='big') + \ value.encode('utf-8')
python
def string_tag(name, value): """Create a DMAP tag with string data.""" return name.encode('utf-8') + \ len(value).to_bytes(4, byteorder='big') + \ value.encode('utf-8')
[ "def", "string_tag", "(", "name", ",", "value", ")", ":", "return", "name", ".", "encode", "(", "'utf-8'", ")", "+", "len", "(", "value", ")", ".", "to_bytes", "(", "4", ",", "byteorder", "=", "'big'", ")", "+", "value", ".", "encode", "(", "'utf-8'", ")" ]
Create a DMAP tag with string data.
[ "Create", "a", "DMAP", "tag", "with", "string", "data", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/tags.py#L76-L80
train
postlund/pyatv
pyatv/mrp/messages.py
create
def create(message_type, priority=0): """Create a ProtocolMessage.""" message = protobuf.ProtocolMessage() message.type = message_type message.priority = priority return message
python
def create(message_type, priority=0): """Create a ProtocolMessage.""" message = protobuf.ProtocolMessage() message.type = message_type message.priority = priority return message
[ "def", "create", "(", "message_type", ",", "priority", "=", "0", ")", ":", "message", "=", "protobuf", ".", "ProtocolMessage", "(", ")", "message", ".", "type", "=", "message_type", "message", ".", "priority", "=", "priority", "return", "message" ]
Create a ProtocolMessage.
[ "Create", "a", "ProtocolMessage", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L9-L14
train
postlund/pyatv
pyatv/mrp/messages.py
device_information
def device_information(name, identifier): """Create a new DEVICE_INFO_MESSAGE.""" # pylint: disable=no-member message = create(protobuf.DEVICE_INFO_MESSAGE) info = message.inner() info.uniqueIdentifier = identifier info.name = name info.localizedModelName = 'iPhone' info.systemBuildVersion = '14G60' info.applicationBundleIdentifier = 'com.apple.TVRemote' info.applicationBundleVersion = '273.12' info.protocolVersion = 1 info.lastSupportedMessageType = 58 info.supportsExtendedMotion = True return message
python
def device_information(name, identifier): """Create a new DEVICE_INFO_MESSAGE.""" # pylint: disable=no-member message = create(protobuf.DEVICE_INFO_MESSAGE) info = message.inner() info.uniqueIdentifier = identifier info.name = name info.localizedModelName = 'iPhone' info.systemBuildVersion = '14G60' info.applicationBundleIdentifier = 'com.apple.TVRemote' info.applicationBundleVersion = '273.12' info.protocolVersion = 1 info.lastSupportedMessageType = 58 info.supportsExtendedMotion = True return message
[ "def", "device_information", "(", "name", ",", "identifier", ")", ":", "# pylint: disable=no-member", "message", "=", "create", "(", "protobuf", ".", "DEVICE_INFO_MESSAGE", ")", "info", "=", "message", ".", "inner", "(", ")", "info", ".", "uniqueIdentifier", "=", "identifier", "info", ".", "name", "=", "name", "info", ".", "localizedModelName", "=", "'iPhone'", "info", ".", "systemBuildVersion", "=", "'14G60'", "info", ".", "applicationBundleIdentifier", "=", "'com.apple.TVRemote'", "info", ".", "applicationBundleVersion", "=", "'273.12'", "info", ".", "protocolVersion", "=", "1", "info", ".", "lastSupportedMessageType", "=", "58", "info", ".", "supportsExtendedMotion", "=", "True", "return", "message" ]
Create a new DEVICE_INFO_MESSAGE.
[ "Create", "a", "new", "DEVICE_INFO_MESSAGE", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L18-L32
train
postlund/pyatv
pyatv/mrp/messages.py
set_connection_state
def set_connection_state(): """Create a new SET_CONNECTION_STATE.""" message = create(protobuf.ProtocolMessage.SET_CONNECTION_STATE_MESSAGE) message.inner().state = protobuf.SetConnectionStateMessage.Connected return message
python
def set_connection_state(): """Create a new SET_CONNECTION_STATE.""" message = create(protobuf.ProtocolMessage.SET_CONNECTION_STATE_MESSAGE) message.inner().state = protobuf.SetConnectionStateMessage.Connected return message
[ "def", "set_connection_state", "(", ")", ":", "message", "=", "create", "(", "protobuf", ".", "ProtocolMessage", ".", "SET_CONNECTION_STATE_MESSAGE", ")", "message", ".", "inner", "(", ")", ".", "state", "=", "protobuf", ".", "SetConnectionStateMessage", ".", "Connected", "return", "message" ]
Create a new SET_CONNECTION_STATE.
[ "Create", "a", "new", "SET_CONNECTION_STATE", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L40-L44
train
postlund/pyatv
pyatv/mrp/messages.py
crypto_pairing
def crypto_pairing(pairing_data): """Create a new CRYPTO_PAIRING_MESSAGE.""" message = create(protobuf.CRYPTO_PAIRING_MESSAGE) crypto = message.inner() crypto.status = 0 crypto.pairingData = tlv8.write_tlv(pairing_data) return message
python
def crypto_pairing(pairing_data): """Create a new CRYPTO_PAIRING_MESSAGE.""" message = create(protobuf.CRYPTO_PAIRING_MESSAGE) crypto = message.inner() crypto.status = 0 crypto.pairingData = tlv8.write_tlv(pairing_data) return message
[ "def", "crypto_pairing", "(", "pairing_data", ")", ":", "message", "=", "create", "(", "protobuf", ".", "CRYPTO_PAIRING_MESSAGE", ")", "crypto", "=", "message", ".", "inner", "(", ")", "crypto", ".", "status", "=", "0", "crypto", ".", "pairingData", "=", "tlv8", ".", "write_tlv", "(", "pairing_data", ")", "return", "message" ]
Create a new CRYPTO_PAIRING_MESSAGE.
[ "Create", "a", "new", "CRYPTO_PAIRING_MESSAGE", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L47-L53
train
postlund/pyatv
pyatv/mrp/messages.py
client_updates_config
def client_updates_config(artwork=True, now_playing=True, volume=True, keyboard=True): """Create a new CLIENT_UPDATES_CONFIG_MESSAGE.""" message = create(protobuf.CLIENT_UPDATES_CONFIG_MESSAGE) config = message.inner() config.artworkUpdates = artwork config.nowPlayingUpdates = now_playing config.volumeUpdates = volume config.keyboardUpdates = keyboard return message
python
def client_updates_config(artwork=True, now_playing=True, volume=True, keyboard=True): """Create a new CLIENT_UPDATES_CONFIG_MESSAGE.""" message = create(protobuf.CLIENT_UPDATES_CONFIG_MESSAGE) config = message.inner() config.artworkUpdates = artwork config.nowPlayingUpdates = now_playing config.volumeUpdates = volume config.keyboardUpdates = keyboard return message
[ "def", "client_updates_config", "(", "artwork", "=", "True", ",", "now_playing", "=", "True", ",", "volume", "=", "True", ",", "keyboard", "=", "True", ")", ":", "message", "=", "create", "(", "protobuf", ".", "CLIENT_UPDATES_CONFIG_MESSAGE", ")", "config", "=", "message", ".", "inner", "(", ")", "config", ".", "artworkUpdates", "=", "artwork", "config", ".", "nowPlayingUpdates", "=", "now_playing", "config", ".", "volumeUpdates", "=", "volume", "config", ".", "keyboardUpdates", "=", "keyboard", "return", "message" ]
Create a new CLIENT_UPDATES_CONFIG_MESSAGE.
[ "Create", "a", "new", "CLIENT_UPDATES_CONFIG_MESSAGE", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L56-L65
train
postlund/pyatv
pyatv/mrp/messages.py
register_hid_device
def register_hid_device(screen_width, screen_height, absolute=False, integrated_display=False): """Create a new REGISTER_HID_DEVICE_MESSAGE.""" message = create(protobuf.REGISTER_HID_DEVICE_MESSAGE) descriptor = message.inner().deviceDescriptor descriptor.absolute = 1 if absolute else 0 descriptor.integratedDisplay = 1 if integrated_display else 0 descriptor.screenSizeWidth = screen_width descriptor.screenSizeHeight = screen_height return message
python
def register_hid_device(screen_width, screen_height, absolute=False, integrated_display=False): """Create a new REGISTER_HID_DEVICE_MESSAGE.""" message = create(protobuf.REGISTER_HID_DEVICE_MESSAGE) descriptor = message.inner().deviceDescriptor descriptor.absolute = 1 if absolute else 0 descriptor.integratedDisplay = 1 if integrated_display else 0 descriptor.screenSizeWidth = screen_width descriptor.screenSizeHeight = screen_height return message
[ "def", "register_hid_device", "(", "screen_width", ",", "screen_height", ",", "absolute", "=", "False", ",", "integrated_display", "=", "False", ")", ":", "message", "=", "create", "(", "protobuf", ".", "REGISTER_HID_DEVICE_MESSAGE", ")", "descriptor", "=", "message", ".", "inner", "(", ")", ".", "deviceDescriptor", "descriptor", ".", "absolute", "=", "1", "if", "absolute", "else", "0", "descriptor", ".", "integratedDisplay", "=", "1", "if", "integrated_display", "else", "0", "descriptor", ".", "screenSizeWidth", "=", "screen_width", "descriptor", ".", "screenSizeHeight", "=", "screen_height", "return", "message" ]
Create a new REGISTER_HID_DEVICE_MESSAGE.
[ "Create", "a", "new", "REGISTER_HID_DEVICE_MESSAGE", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L73-L82
train
postlund/pyatv
pyatv/mrp/messages.py
send_packed_virtual_touch_event
def send_packed_virtual_touch_event(xpos, ypos, phase, device_id, finger): """Create a new WAKE_DEVICE_MESSAGE.""" message = create(protobuf.SEND_PACKED_VIRTUAL_TOUCH_EVENT_MESSAGE) event = message.inner() # The packed version of VirtualTouchEvent contains X, Y, phase, deviceID # and finger stored as a byte array. Each value is written as 16bit little # endian integers. event.data = xpos.to_bytes(2, byteorder='little') event.data += ypos.to_bytes(2, byteorder='little') event.data += phase.to_bytes(2, byteorder='little') event.data += device_id.to_bytes(2, byteorder='little') event.data += finger.to_bytes(2, byteorder='little') return message
python
def send_packed_virtual_touch_event(xpos, ypos, phase, device_id, finger): """Create a new WAKE_DEVICE_MESSAGE.""" message = create(protobuf.SEND_PACKED_VIRTUAL_TOUCH_EVENT_MESSAGE) event = message.inner() # The packed version of VirtualTouchEvent contains X, Y, phase, deviceID # and finger stored as a byte array. Each value is written as 16bit little # endian integers. event.data = xpos.to_bytes(2, byteorder='little') event.data += ypos.to_bytes(2, byteorder='little') event.data += phase.to_bytes(2, byteorder='little') event.data += device_id.to_bytes(2, byteorder='little') event.data += finger.to_bytes(2, byteorder='little') return message
[ "def", "send_packed_virtual_touch_event", "(", "xpos", ",", "ypos", ",", "phase", ",", "device_id", ",", "finger", ")", ":", "message", "=", "create", "(", "protobuf", ".", "SEND_PACKED_VIRTUAL_TOUCH_EVENT_MESSAGE", ")", "event", "=", "message", ".", "inner", "(", ")", "# The packed version of VirtualTouchEvent contains X, Y, phase, deviceID", "# and finger stored as a byte array. Each value is written as 16bit little", "# endian integers.", "event", ".", "data", "=", "xpos", ".", "to_bytes", "(", "2", ",", "byteorder", "=", "'little'", ")", "event", ".", "data", "+=", "ypos", ".", "to_bytes", "(", "2", ",", "byteorder", "=", "'little'", ")", "event", ".", "data", "+=", "phase", ".", "to_bytes", "(", "2", ",", "byteorder", "=", "'little'", ")", "event", ".", "data", "+=", "device_id", ".", "to_bytes", "(", "2", ",", "byteorder", "=", "'little'", ")", "event", ".", "data", "+=", "finger", ".", "to_bytes", "(", "2", ",", "byteorder", "=", "'little'", ")", "return", "message" ]
Create a new WAKE_DEVICE_MESSAGE.
[ "Create", "a", "new", "WAKE_DEVICE_MESSAGE", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L85-L99
train
postlund/pyatv
pyatv/mrp/messages.py
send_hid_event
def send_hid_event(use_page, usage, down): """Create a new SEND_HID_EVENT_MESSAGE.""" message = create(protobuf.SEND_HID_EVENT_MESSAGE) event = message.inner() # TODO: This should be generated somehow. I guess it's mach AbsoluteTime # which is tricky to generate. The device does not seem to care much about # the value though, so hardcode something here. abstime = binascii.unhexlify(b'438922cf08020000') data = use_page.to_bytes(2, byteorder='big') data += usage.to_bytes(2, byteorder='big') data += (1 if down else 0).to_bytes(2, byteorder='big') # This is the format that the device expects. Some day I might take some # time to decode it for real, but this is fine for now. event.hidEventData = abstime + \ binascii.unhexlify(b'00000000000000000100000000000000020' + b'00000200000000300000001000000000000') + \ data + \ binascii.unhexlify(b'0000000000000001000000') return message
python
def send_hid_event(use_page, usage, down): """Create a new SEND_HID_EVENT_MESSAGE.""" message = create(protobuf.SEND_HID_EVENT_MESSAGE) event = message.inner() # TODO: This should be generated somehow. I guess it's mach AbsoluteTime # which is tricky to generate. The device does not seem to care much about # the value though, so hardcode something here. abstime = binascii.unhexlify(b'438922cf08020000') data = use_page.to_bytes(2, byteorder='big') data += usage.to_bytes(2, byteorder='big') data += (1 if down else 0).to_bytes(2, byteorder='big') # This is the format that the device expects. Some day I might take some # time to decode it for real, but this is fine for now. event.hidEventData = abstime + \ binascii.unhexlify(b'00000000000000000100000000000000020' + b'00000200000000300000001000000000000') + \ data + \ binascii.unhexlify(b'0000000000000001000000') return message
[ "def", "send_hid_event", "(", "use_page", ",", "usage", ",", "down", ")", ":", "message", "=", "create", "(", "protobuf", ".", "SEND_HID_EVENT_MESSAGE", ")", "event", "=", "message", ".", "inner", "(", ")", "# TODO: This should be generated somehow. I guess it's mach AbsoluteTime", "# which is tricky to generate. The device does not seem to care much about", "# the value though, so hardcode something here.", "abstime", "=", "binascii", ".", "unhexlify", "(", "b'438922cf08020000'", ")", "data", "=", "use_page", ".", "to_bytes", "(", "2", ",", "byteorder", "=", "'big'", ")", "data", "+=", "usage", ".", "to_bytes", "(", "2", ",", "byteorder", "=", "'big'", ")", "data", "+=", "(", "1", "if", "down", "else", "0", ")", ".", "to_bytes", "(", "2", ",", "byteorder", "=", "'big'", ")", "# This is the format that the device expects. Some day I might take some", "# time to decode it for real, but this is fine for now.", "event", ".", "hidEventData", "=", "abstime", "+", "binascii", ".", "unhexlify", "(", "b'00000000000000000100000000000000020'", "+", "b'00000200000000300000001000000000000'", ")", "+", "data", "+", "binascii", ".", "unhexlify", "(", "b'0000000000000001000000'", ")", "return", "message" ]
Create a new SEND_HID_EVENT_MESSAGE.
[ "Create", "a", "new", "SEND_HID_EVENT_MESSAGE", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L102-L124
train
postlund/pyatv
pyatv/mrp/messages.py
command
def command(cmd): """Playback command request.""" message = create(protobuf.SEND_COMMAND_MESSAGE) send_command = message.inner() send_command.command = cmd return message
python
def command(cmd): """Playback command request.""" message = create(protobuf.SEND_COMMAND_MESSAGE) send_command = message.inner() send_command.command = cmd return message
[ "def", "command", "(", "cmd", ")", ":", "message", "=", "create", "(", "protobuf", ".", "SEND_COMMAND_MESSAGE", ")", "send_command", "=", "message", ".", "inner", "(", ")", "send_command", ".", "command", "=", "cmd", "return", "message" ]
Playback command request.
[ "Playback", "command", "request", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L127-L132
train
postlund/pyatv
pyatv/mrp/messages.py
repeat
def repeat(mode): """Change repeat mode of current player.""" message = command(protobuf.CommandInfo_pb2.ChangeShuffleMode) send_command = message.inner() send_command.options.externalPlayerCommand = True send_command.options.repeatMode = mode return message
python
def repeat(mode): """Change repeat mode of current player.""" message = command(protobuf.CommandInfo_pb2.ChangeShuffleMode) send_command = message.inner() send_command.options.externalPlayerCommand = True send_command.options.repeatMode = mode return message
[ "def", "repeat", "(", "mode", ")", ":", "message", "=", "command", "(", "protobuf", ".", "CommandInfo_pb2", ".", "ChangeShuffleMode", ")", "send_command", "=", "message", ".", "inner", "(", ")", "send_command", ".", "options", ".", "externalPlayerCommand", "=", "True", "send_command", ".", "options", ".", "repeatMode", "=", "mode", "return", "message" ]
Change repeat mode of current player.
[ "Change", "repeat", "mode", "of", "current", "player", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L135-L141
train
postlund/pyatv
pyatv/mrp/messages.py
shuffle
def shuffle(enable): """Change shuffle mode of current player.""" message = command(protobuf.CommandInfo_pb2.ChangeShuffleMode) send_command = message.inner() send_command.options.shuffleMode = 3 if enable else 1 return message
python
def shuffle(enable): """Change shuffle mode of current player.""" message = command(protobuf.CommandInfo_pb2.ChangeShuffleMode) send_command = message.inner() send_command.options.shuffleMode = 3 if enable else 1 return message
[ "def", "shuffle", "(", "enable", ")", ":", "message", "=", "command", "(", "protobuf", ".", "CommandInfo_pb2", ".", "ChangeShuffleMode", ")", "send_command", "=", "message", ".", "inner", "(", ")", "send_command", ".", "options", ".", "shuffleMode", "=", "3", "if", "enable", "else", "1", "return", "message" ]
Change shuffle mode of current player.
[ "Change", "shuffle", "mode", "of", "current", "player", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L144-L149
train
postlund/pyatv
pyatv/mrp/messages.py
seek_to_position
def seek_to_position(position): """Seek to an absolute position in stream.""" message = command(protobuf.CommandInfo_pb2.SeekToPlaybackPosition) send_command = message.inner() send_command.options.playbackPosition = position return message
python
def seek_to_position(position): """Seek to an absolute position in stream.""" message = command(protobuf.CommandInfo_pb2.SeekToPlaybackPosition) send_command = message.inner() send_command.options.playbackPosition = position return message
[ "def", "seek_to_position", "(", "position", ")", ":", "message", "=", "command", "(", "protobuf", ".", "CommandInfo_pb2", ".", "SeekToPlaybackPosition", ")", "send_command", "=", "message", ".", "inner", "(", ")", "send_command", ".", "options", ".", "playbackPosition", "=", "position", "return", "message" ]
Seek to an absolute position in stream.
[ "Seek", "to", "an", "absolute", "position", "in", "stream", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/messages.py#L152-L157
train
postlund/pyatv
examples/pairing.py
pair_with_device
async def pair_with_device(loop): """Make it possible to pair with device.""" my_zeroconf = Zeroconf() details = conf.AppleTV('127.0.0.1', 'Apple TV') details.add_service(conf.DmapService('login_id')) atv = pyatv.connect_to_apple_tv(details, loop) atv.pairing.pin(PIN_CODE) await atv.pairing.start(zeroconf=my_zeroconf, name=REMOTE_NAME) print('You can now pair with pyatv') # Wait for a minute to allow pairing await asyncio.sleep(60, loop=loop) await atv.pairing.stop() # Give some feedback about the process if atv.pairing.has_paired: print('Paired with device!') print('Credentials:', atv.pairing.credentials) else: print('Did not pair with device!') my_zeroconf.close()
python
async def pair_with_device(loop): """Make it possible to pair with device.""" my_zeroconf = Zeroconf() details = conf.AppleTV('127.0.0.1', 'Apple TV') details.add_service(conf.DmapService('login_id')) atv = pyatv.connect_to_apple_tv(details, loop) atv.pairing.pin(PIN_CODE) await atv.pairing.start(zeroconf=my_zeroconf, name=REMOTE_NAME) print('You can now pair with pyatv') # Wait for a minute to allow pairing await asyncio.sleep(60, loop=loop) await atv.pairing.stop() # Give some feedback about the process if atv.pairing.has_paired: print('Paired with device!') print('Credentials:', atv.pairing.credentials) else: print('Did not pair with device!') my_zeroconf.close()
[ "async", "def", "pair_with_device", "(", "loop", ")", ":", "my_zeroconf", "=", "Zeroconf", "(", ")", "details", "=", "conf", ".", "AppleTV", "(", "'127.0.0.1'", ",", "'Apple TV'", ")", "details", ".", "add_service", "(", "conf", ".", "DmapService", "(", "'login_id'", ")", ")", "atv", "=", "pyatv", ".", "connect_to_apple_tv", "(", "details", ",", "loop", ")", "atv", ".", "pairing", ".", "pin", "(", "PIN_CODE", ")", "await", "atv", ".", "pairing", ".", "start", "(", "zeroconf", "=", "my_zeroconf", ",", "name", "=", "REMOTE_NAME", ")", "print", "(", "'You can now pair with pyatv'", ")", "# Wait for a minute to allow pairing", "await", "asyncio", ".", "sleep", "(", "60", ",", "loop", "=", "loop", ")", "await", "atv", ".", "pairing", ".", "stop", "(", ")", "# Give some feedback about the process", "if", "atv", ".", "pairing", ".", "has_paired", ":", "print", "(", "'Paired with device!'", ")", "print", "(", "'Credentials:'", ",", "atv", ".", "pairing", ".", "credentials", ")", "else", ":", "print", "(", "'Did not pair with device!'", ")", "my_zeroconf", ".", "close", "(", ")" ]
Make it possible to pair with device.
[ "Make", "it", "possible", "to", "pair", "with", "device", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/examples/pairing.py#L17-L40
train
postlund/pyatv
pyatv/mrp/variant.py
read_variant
def read_variant(variant): """Read and parse a binary protobuf variant value.""" result = 0 cnt = 0 for data in variant: result |= (data & 0x7f) << (7 * cnt) cnt += 1 if not data & 0x80: return result, variant[cnt:] raise Exception('invalid variant')
python
def read_variant(variant): """Read and parse a binary protobuf variant value.""" result = 0 cnt = 0 for data in variant: result |= (data & 0x7f) << (7 * cnt) cnt += 1 if not data & 0x80: return result, variant[cnt:] raise Exception('invalid variant')
[ "def", "read_variant", "(", "variant", ")", ":", "result", "=", "0", "cnt", "=", "0", "for", "data", "in", "variant", ":", "result", "|=", "(", "data", "&", "0x7f", ")", "<<", "(", "7", "*", "cnt", ")", "cnt", "+=", "1", "if", "not", "data", "&", "0x80", ":", "return", "result", ",", "variant", "[", "cnt", ":", "]", "raise", "Exception", "(", "'invalid variant'", ")" ]
Read and parse a binary protobuf variant value.
[ "Read", "and", "parse", "a", "binary", "protobuf", "variant", "value", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/variant.py#L4-L13
train
postlund/pyatv
examples/manual_connect.py
print_what_is_playing
async def print_what_is_playing(loop): """Connect to device and print what is playing.""" details = conf.AppleTV(ADDRESS, NAME) details.add_service(conf.DmapService(HSGID)) print('Connecting to {}'.format(details.address)) atv = pyatv.connect_to_apple_tv(details, loop) try: print((await atv.metadata.playing())) finally: # Do not forget to logout await atv.logout()
python
async def print_what_is_playing(loop): """Connect to device and print what is playing.""" details = conf.AppleTV(ADDRESS, NAME) details.add_service(conf.DmapService(HSGID)) print('Connecting to {}'.format(details.address)) atv = pyatv.connect_to_apple_tv(details, loop) try: print((await atv.metadata.playing())) finally: # Do not forget to logout await atv.logout()
[ "async", "def", "print_what_is_playing", "(", "loop", ")", ":", "details", "=", "conf", ".", "AppleTV", "(", "ADDRESS", ",", "NAME", ")", "details", ".", "add_service", "(", "conf", ".", "DmapService", "(", "HSGID", ")", ")", "print", "(", "'Connecting to {}'", ".", "format", "(", "details", ".", "address", ")", ")", "atv", "=", "pyatv", ".", "connect_to_apple_tv", "(", "details", ",", "loop", ")", "try", ":", "print", "(", "(", "await", "atv", ".", "metadata", ".", "playing", "(", ")", ")", ")", "finally", ":", "# Do not forget to logout", "await", "atv", ".", "logout", "(", ")" ]
Connect to device and print what is playing.
[ "Connect", "to", "device", "and", "print", "what", "is", "playing", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/examples/manual_connect.py#L16-L28
train
postlund/pyatv
pyatv/mrp/protocol.py
MrpProtocol.add_listener
def add_listener(self, listener, message_type, data=None, one_shot=False): """Add a listener that will receice incoming messages.""" lst = self._one_shots if one_shot else self._listeners if message_type not in lst: lst[message_type] = [] lst[message_type].append(Listener(listener, data))
python
def add_listener(self, listener, message_type, data=None, one_shot=False): """Add a listener that will receice incoming messages.""" lst = self._one_shots if one_shot else self._listeners if message_type not in lst: lst[message_type] = [] lst[message_type].append(Listener(listener, data))
[ "def", "add_listener", "(", "self", ",", "listener", ",", "message_type", ",", "data", "=", "None", ",", "one_shot", "=", "False", ")", ":", "lst", "=", "self", ".", "_one_shots", "if", "one_shot", "else", "self", ".", "_listeners", "if", "message_type", "not", "in", "lst", ":", "lst", "[", "message_type", "]", "=", "[", "]", "lst", "[", "message_type", "]", ".", "append", "(", "Listener", "(", "listener", ",", "data", ")", ")" ]
Add a listener that will receice incoming messages.
[ "Add", "a", "listener", "that", "will", "receice", "incoming", "messages", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/protocol.py#L43-L50
train
postlund/pyatv
pyatv/mrp/protocol.py
MrpProtocol.start
async def start(self): """Connect to device and listen to incoming messages.""" if self.connection.connected: return await self.connection.connect() # In case credentials have been given externally (i.e. not by pairing # with a device), then use that client id if self.service.device_credentials: self.srp.pairing_id = Credentials.parse( self.service.device_credentials).client_id # The first message must always be DEVICE_INFORMATION, otherwise the # device will not respond with anything msg = messages.device_information( 'pyatv', self.srp.pairing_id.decode()) await self.send_and_receive(msg) self._initial_message_sent = True # This should be the first message sent after encryption has # been enabled await self.send(messages.set_ready_state()) async def _wait_for_updates(_, semaphore): # Use a counter here whenever more than one message is expected semaphore.release() # Wait for some stuff to arrive before returning semaphore = asyncio.Semaphore(value=0, loop=self.loop) self.add_listener(_wait_for_updates, protobuf.SET_STATE_MESSAGE, data=semaphore, one_shot=True) # Subscribe to updates at this stage await self.send(messages.client_updates_config()) await self.send(messages.wake_device()) try: await asyncio.wait_for( semaphore.acquire(), 1, loop=self.loop) except asyncio.TimeoutError: # This is not an issue itself, but I should do something better. # Basically this gives the device about one second to respond with # some metadata before continuing. pass
python
async def start(self): """Connect to device and listen to incoming messages.""" if self.connection.connected: return await self.connection.connect() # In case credentials have been given externally (i.e. not by pairing # with a device), then use that client id if self.service.device_credentials: self.srp.pairing_id = Credentials.parse( self.service.device_credentials).client_id # The first message must always be DEVICE_INFORMATION, otherwise the # device will not respond with anything msg = messages.device_information( 'pyatv', self.srp.pairing_id.decode()) await self.send_and_receive(msg) self._initial_message_sent = True # This should be the first message sent after encryption has # been enabled await self.send(messages.set_ready_state()) async def _wait_for_updates(_, semaphore): # Use a counter here whenever more than one message is expected semaphore.release() # Wait for some stuff to arrive before returning semaphore = asyncio.Semaphore(value=0, loop=self.loop) self.add_listener(_wait_for_updates, protobuf.SET_STATE_MESSAGE, data=semaphore, one_shot=True) # Subscribe to updates at this stage await self.send(messages.client_updates_config()) await self.send(messages.wake_device()) try: await asyncio.wait_for( semaphore.acquire(), 1, loop=self.loop) except asyncio.TimeoutError: # This is not an issue itself, but I should do something better. # Basically this gives the device about one second to respond with # some metadata before continuing. pass
[ "async", "def", "start", "(", "self", ")", ":", "if", "self", ".", "connection", ".", "connected", ":", "return", "await", "self", ".", "connection", ".", "connect", "(", ")", "# In case credentials have been given externally (i.e. not by pairing", "# with a device), then use that client id", "if", "self", ".", "service", ".", "device_credentials", ":", "self", ".", "srp", ".", "pairing_id", "=", "Credentials", ".", "parse", "(", "self", ".", "service", ".", "device_credentials", ")", ".", "client_id", "# The first message must always be DEVICE_INFORMATION, otherwise the", "# device will not respond with anything", "msg", "=", "messages", ".", "device_information", "(", "'pyatv'", ",", "self", ".", "srp", ".", "pairing_id", ".", "decode", "(", ")", ")", "await", "self", ".", "send_and_receive", "(", "msg", ")", "self", ".", "_initial_message_sent", "=", "True", "# This should be the first message sent after encryption has", "# been enabled", "await", "self", ".", "send", "(", "messages", ".", "set_ready_state", "(", ")", ")", "async", "def", "_wait_for_updates", "(", "_", ",", "semaphore", ")", ":", "# Use a counter here whenever more than one message is expected", "semaphore", ".", "release", "(", ")", "# Wait for some stuff to arrive before returning", "semaphore", "=", "asyncio", ".", "Semaphore", "(", "value", "=", "0", ",", "loop", "=", "self", ".", "loop", ")", "self", ".", "add_listener", "(", "_wait_for_updates", ",", "protobuf", ".", "SET_STATE_MESSAGE", ",", "data", "=", "semaphore", ",", "one_shot", "=", "True", ")", "# Subscribe to updates at this stage", "await", "self", ".", "send", "(", "messages", ".", "client_updates_config", "(", ")", ")", "await", "self", ".", "send", "(", "messages", ".", "wake_device", "(", ")", ")", "try", ":", "await", "asyncio", ".", "wait_for", "(", "semaphore", ".", "acquire", "(", ")", ",", "1", ",", "loop", "=", "self", ".", "loop", ")", "except", "asyncio", ".", "TimeoutError", ":", "# This is not an issue itself, but I should do something better.", "# Basically this gives the device about one second to respond with", "# some metadata before continuing.", "pass" ]
Connect to device and listen to incoming messages.
[ "Connect", "to", "device", "and", "listen", "to", "incoming", "messages", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/protocol.py#L53-L99
train
postlund/pyatv
pyatv/mrp/protocol.py
MrpProtocol.stop
def stop(self): """Disconnect from device.""" if self._outstanding: _LOGGER.warning('There were %d outstanding requests', len(self._outstanding)) self._initial_message_sent = False self._outstanding = {} self._one_shots = {} self.connection.close()
python
def stop(self): """Disconnect from device.""" if self._outstanding: _LOGGER.warning('There were %d outstanding requests', len(self._outstanding)) self._initial_message_sent = False self._outstanding = {} self._one_shots = {} self.connection.close()
[ "def", "stop", "(", "self", ")", ":", "if", "self", ".", "_outstanding", ":", "_LOGGER", ".", "warning", "(", "'There were %d outstanding requests'", ",", "len", "(", "self", ".", "_outstanding", ")", ")", "self", ".", "_initial_message_sent", "=", "False", "self", ".", "_outstanding", "=", "{", "}", "self", ".", "_one_shots", "=", "{", "}", "self", ".", "connection", ".", "close", "(", ")" ]
Disconnect from device.
[ "Disconnect", "from", "device", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/protocol.py#L101-L110
train
postlund/pyatv
pyatv/mrp/protocol.py
MrpProtocol.send_and_receive
async def send_and_receive(self, message, generate_identifier=True, timeout=5): """Send a message and wait for a response.""" await self._connect_and_encrypt() # Some messages will respond with the same identifier as used in the # corresponding request. Others will not and one example is the crypto # message (for pairing). They will never include an identifer, but it # it is in turn only possible to have one of those message outstanding # at one time (i.e. it's not possible to mix up the responses). In # those cases, a "fake" identifier is used that includes the message # type instead. if generate_identifier: identifier = str(uuid.uuid4()) message.identifier = identifier else: identifier = 'type_' + str(message.type) self.connection.send(message) return await self._receive(identifier, timeout)
python
async def send_and_receive(self, message, generate_identifier=True, timeout=5): """Send a message and wait for a response.""" await self._connect_and_encrypt() # Some messages will respond with the same identifier as used in the # corresponding request. Others will not and one example is the crypto # message (for pairing). They will never include an identifer, but it # it is in turn only possible to have one of those message outstanding # at one time (i.e. it's not possible to mix up the responses). In # those cases, a "fake" identifier is used that includes the message # type instead. if generate_identifier: identifier = str(uuid.uuid4()) message.identifier = identifier else: identifier = 'type_' + str(message.type) self.connection.send(message) return await self._receive(identifier, timeout)
[ "async", "def", "send_and_receive", "(", "self", ",", "message", ",", "generate_identifier", "=", "True", ",", "timeout", "=", "5", ")", ":", "await", "self", ".", "_connect_and_encrypt", "(", ")", "# Some messages will respond with the same identifier as used in the", "# corresponding request. Others will not and one example is the crypto", "# message (for pairing). They will never include an identifer, but it", "# it is in turn only possible to have one of those message outstanding", "# at one time (i.e. it's not possible to mix up the responses). In", "# those cases, a \"fake\" identifier is used that includes the message", "# type instead.", "if", "generate_identifier", ":", "identifier", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "message", ".", "identifier", "=", "identifier", "else", ":", "identifier", "=", "'type_'", "+", "str", "(", "message", ".", "type", ")", "self", ".", "connection", ".", "send", "(", "message", ")", "return", "await", "self", ".", "_receive", "(", "identifier", ",", "timeout", ")" ]
Send a message and wait for a response.
[ "Send", "a", "message", "and", "wait", "for", "a", "response", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/protocol.py#L134-L153
train
postlund/pyatv
pyatv/dmap/__init__.py
BaseDmapAppleTV.playstatus
async def playstatus(self, use_revision=False, timeout=None): """Request raw data about what is currently playing. If use_revision=True, this command will "block" until playstatus changes on the device. Must be logged in. """ cmd_url = _PSU_CMD.format( self.playstatus_revision if use_revision else 0) resp = await self.daap.get(cmd_url, timeout=timeout) self.playstatus_revision = parser.first(resp, 'cmst', 'cmsr') return resp
python
async def playstatus(self, use_revision=False, timeout=None): """Request raw data about what is currently playing. If use_revision=True, this command will "block" until playstatus changes on the device. Must be logged in. """ cmd_url = _PSU_CMD.format( self.playstatus_revision if use_revision else 0) resp = await self.daap.get(cmd_url, timeout=timeout) self.playstatus_revision = parser.first(resp, 'cmst', 'cmsr') return resp
[ "async", "def", "playstatus", "(", "self", ",", "use_revision", "=", "False", ",", "timeout", "=", "None", ")", ":", "cmd_url", "=", "_PSU_CMD", ".", "format", "(", "self", ".", "playstatus_revision", "if", "use_revision", "else", "0", ")", "resp", "=", "await", "self", ".", "daap", ".", "get", "(", "cmd_url", ",", "timeout", "=", "timeout", ")", "self", ".", "playstatus_revision", "=", "parser", ".", "first", "(", "resp", ",", "'cmst'", ",", "'cmsr'", ")", "return", "resp" ]
Request raw data about what is currently playing. If use_revision=True, this command will "block" until playstatus changes on the device. Must be logged in.
[ "Request", "raw", "data", "about", "what", "is", "currently", "playing", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/__init__.py#L31-L43
train
postlund/pyatv
pyatv/dmap/__init__.py
BaseDmapAppleTV.ctrl_int_cmd
def ctrl_int_cmd(self, cmd): """Perform a "ctrl-int" command.""" cmd_url = 'ctrl-int/1/{}?[AUTH]&prompt-id=0'.format(cmd) return self.daap.post(cmd_url)
python
def ctrl_int_cmd(self, cmd): """Perform a "ctrl-int" command.""" cmd_url = 'ctrl-int/1/{}?[AUTH]&prompt-id=0'.format(cmd) return self.daap.post(cmd_url)
[ "def", "ctrl_int_cmd", "(", "self", ",", "cmd", ")", ":", "cmd_url", "=", "'ctrl-int/1/{}?[AUTH]&prompt-id=0'", ".", "format", "(", "cmd", ")", "return", "self", ".", "daap", ".", "post", "(", "cmd_url", ")" ]
Perform a "ctrl-int" command.
[ "Perform", "a", "ctrl", "-", "int", "command", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/__init__.py#L65-L68
train
postlund/pyatv
pyatv/dmap/__init__.py
BaseDmapAppleTV.controlprompt_cmd
def controlprompt_cmd(self, cmd): """Perform a "controlpromptentry" command.""" data = tags.string_tag('cmbe', cmd) + tags.uint8_tag('cmcc', 0) return self.daap.post(_CTRL_PROMPT_CMD, data=data)
python
def controlprompt_cmd(self, cmd): """Perform a "controlpromptentry" command.""" data = tags.string_tag('cmbe', cmd) + tags.uint8_tag('cmcc', 0) return self.daap.post(_CTRL_PROMPT_CMD, data=data)
[ "def", "controlprompt_cmd", "(", "self", ",", "cmd", ")", ":", "data", "=", "tags", ".", "string_tag", "(", "'cmbe'", ",", "cmd", ")", "+", "tags", ".", "uint8_tag", "(", "'cmcc'", ",", "0", ")", "return", "self", ".", "daap", ".", "post", "(", "_CTRL_PROMPT_CMD", ",", "data", "=", "data", ")" ]
Perform a "controlpromptentry" command.
[ "Perform", "a", "controlpromptentry", "command", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/__init__.py#L70-L73
train
postlund/pyatv
pyatv/dmap/__init__.py
DmapRemoteControl.up
async def up(self): """Press key up.""" await self._send_commands( self._move('Down', 0, 20, 275), self._move('Move', 1, 20, 270), self._move('Move', 2, 20, 265), self._move('Move', 3, 20, 260), self._move('Move', 4, 20, 255), self._move('Move', 5, 20, 250), self._move('Up', 6, 20, 250))
python
async def up(self): """Press key up.""" await self._send_commands( self._move('Down', 0, 20, 275), self._move('Move', 1, 20, 270), self._move('Move', 2, 20, 265), self._move('Move', 3, 20, 260), self._move('Move', 4, 20, 255), self._move('Move', 5, 20, 250), self._move('Up', 6, 20, 250))
[ "async", "def", "up", "(", "self", ")", ":", "await", "self", ".", "_send_commands", "(", "self", ".", "_move", "(", "'Down'", ",", "0", ",", "20", ",", "275", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "1", ",", "20", ",", "270", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "2", ",", "20", ",", "265", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "3", ",", "20", ",", "260", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "4", ",", "20", ",", "255", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "5", ",", "20", ",", "250", ")", ",", "self", ".", "_move", "(", "'Up'", ",", "6", ",", "20", ",", "250", ")", ")" ]
Press key up.
[ "Press", "key", "up", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/__init__.py#L94-L103
train
postlund/pyatv
pyatv/dmap/__init__.py
DmapRemoteControl.down
async def down(self): """Press key down.""" await self._send_commands( self._move('Down', 0, 20, 250), self._move('Move', 1, 20, 255), self._move('Move', 2, 20, 260), self._move('Move', 3, 20, 265), self._move('Move', 4, 20, 270), self._move('Move', 5, 20, 275), self._move('Up', 6, 20, 275))
python
async def down(self): """Press key down.""" await self._send_commands( self._move('Down', 0, 20, 250), self._move('Move', 1, 20, 255), self._move('Move', 2, 20, 260), self._move('Move', 3, 20, 265), self._move('Move', 4, 20, 270), self._move('Move', 5, 20, 275), self._move('Up', 6, 20, 275))
[ "async", "def", "down", "(", "self", ")", ":", "await", "self", ".", "_send_commands", "(", "self", ".", "_move", "(", "'Down'", ",", "0", ",", "20", ",", "250", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "1", ",", "20", ",", "255", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "2", ",", "20", ",", "260", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "3", ",", "20", ",", "265", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "4", ",", "20", ",", "270", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "5", ",", "20", ",", "275", ")", ",", "self", ".", "_move", "(", "'Up'", ",", "6", ",", "20", ",", "275", ")", ")" ]
Press key down.
[ "Press", "key", "down", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/__init__.py#L105-L114
train
postlund/pyatv
pyatv/dmap/__init__.py
DmapRemoteControl.left
async def left(self): """Press key left.""" await self._send_commands( self._move('Down', 0, 75, 100), self._move('Move', 1, 70, 100), self._move('Move', 3, 65, 100), self._move('Move', 4, 60, 100), self._move('Move', 5, 55, 100), self._move('Move', 6, 50, 100), self._move('Up', 7, 50, 100))
python
async def left(self): """Press key left.""" await self._send_commands( self._move('Down', 0, 75, 100), self._move('Move', 1, 70, 100), self._move('Move', 3, 65, 100), self._move('Move', 4, 60, 100), self._move('Move', 5, 55, 100), self._move('Move', 6, 50, 100), self._move('Up', 7, 50, 100))
[ "async", "def", "left", "(", "self", ")", ":", "await", "self", ".", "_send_commands", "(", "self", ".", "_move", "(", "'Down'", ",", "0", ",", "75", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "1", ",", "70", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "3", ",", "65", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "4", ",", "60", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "5", ",", "55", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "6", ",", "50", ",", "100", ")", ",", "self", ".", "_move", "(", "'Up'", ",", "7", ",", "50", ",", "100", ")", ")" ]
Press key left.
[ "Press", "key", "left", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/__init__.py#L116-L125
train
postlund/pyatv
pyatv/dmap/__init__.py
DmapRemoteControl.right
async def right(self): """Press key right.""" await self._send_commands( self._move('Down', 0, 50, 100), self._move('Move', 1, 55, 100), self._move('Move', 3, 60, 100), self._move('Move', 4, 65, 100), self._move('Move', 5, 70, 100), self._move('Move', 6, 75, 100), self._move('Up', 7, 75, 100))
python
async def right(self): """Press key right.""" await self._send_commands( self._move('Down', 0, 50, 100), self._move('Move', 1, 55, 100), self._move('Move', 3, 60, 100), self._move('Move', 4, 65, 100), self._move('Move', 5, 70, 100), self._move('Move', 6, 75, 100), self._move('Up', 7, 75, 100))
[ "async", "def", "right", "(", "self", ")", ":", "await", "self", ".", "_send_commands", "(", "self", ".", "_move", "(", "'Down'", ",", "0", ",", "50", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "1", ",", "55", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "3", ",", "60", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "4", ",", "65", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "5", ",", "70", ",", "100", ")", ",", "self", ".", "_move", "(", "'Move'", ",", "6", ",", "75", ",", "100", ")", ",", "self", ".", "_move", "(", "'Up'", ",", "7", ",", "75", ",", "100", ")", ")" ]
Press key right.
[ "Press", "key", "right", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/__init__.py#L127-L136
train
postlund/pyatv
pyatv/dmap/__init__.py
DmapRemoteControl.set_position
def set_position(self, pos): """Seek in the current playing media.""" time_in_ms = int(pos)*1000 return self.apple_tv.set_property('dacp.playingtime', time_in_ms)
python
def set_position(self, pos): """Seek in the current playing media.""" time_in_ms = int(pos)*1000 return self.apple_tv.set_property('dacp.playingtime', time_in_ms)
[ "def", "set_position", "(", "self", ",", "pos", ")", ":", "time_in_ms", "=", "int", "(", "pos", ")", "*", "1000", "return", "self", ".", "apple_tv", ".", "set_property", "(", "'dacp.playingtime'", ",", "time_in_ms", ")" ]
Seek in the current playing media.
[ "Seek", "in", "the", "current", "playing", "media", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/__init__.py#L185-L188
train
postlund/pyatv
examples/device_auth.py
authenticate_with_device
async def authenticate_with_device(atv): """Perform device authentication and print credentials.""" credentials = await atv.airplay.generate_credentials() await atv.airplay.load_credentials(credentials) try: await atv.airplay.start_authentication() pin = input('PIN Code: ') await atv.airplay.finish_authentication(pin) print('Credentials: {0}'.format(credentials)) except exceptions.DeviceAuthenticationError: print('Failed to authenticate', file=sys.stderr)
python
async def authenticate_with_device(atv): """Perform device authentication and print credentials.""" credentials = await atv.airplay.generate_credentials() await atv.airplay.load_credentials(credentials) try: await atv.airplay.start_authentication() pin = input('PIN Code: ') await atv.airplay.finish_authentication(pin) print('Credentials: {0}'.format(credentials)) except exceptions.DeviceAuthenticationError: print('Failed to authenticate', file=sys.stderr)
[ "async", "def", "authenticate_with_device", "(", "atv", ")", ":", "credentials", "=", "await", "atv", ".", "airplay", ".", "generate_credentials", "(", ")", "await", "atv", ".", "airplay", ".", "load_credentials", "(", "credentials", ")", "try", ":", "await", "atv", ".", "airplay", ".", "start_authentication", "(", ")", "pin", "=", "input", "(", "'PIN Code: '", ")", "await", "atv", ".", "airplay", ".", "finish_authentication", "(", "pin", ")", "print", "(", "'Credentials: {0}'", ".", "format", "(", "credentials", ")", ")", "except", "exceptions", ".", "DeviceAuthenticationError", ":", "print", "(", "'Failed to authenticate'", ",", "file", "=", "sys", ".", "stderr", ")" ]
Perform device authentication and print credentials.
[ "Perform", "device", "authentication", "and", "print", "credentials", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/examples/device_auth.py#L7-L19
train
postlund/pyatv
pyatv/mrp/chacha20.py
Chacha20Cipher.encrypt
def encrypt(self, data, nounce=None): """Encrypt data with counter or specified nounce.""" if nounce is None: nounce = self._out_counter.to_bytes(length=8, byteorder='little') self._out_counter += 1 return self._enc_out.seal(b'\x00\x00\x00\x00' + nounce, data, bytes())
python
def encrypt(self, data, nounce=None): """Encrypt data with counter or specified nounce.""" if nounce is None: nounce = self._out_counter.to_bytes(length=8, byteorder='little') self._out_counter += 1 return self._enc_out.seal(b'\x00\x00\x00\x00' + nounce, data, bytes())
[ "def", "encrypt", "(", "self", ",", "data", ",", "nounce", "=", "None", ")", ":", "if", "nounce", "is", "None", ":", "nounce", "=", "self", ".", "_out_counter", ".", "to_bytes", "(", "length", "=", "8", ",", "byteorder", "=", "'little'", ")", "self", ".", "_out_counter", "+=", "1", "return", "self", ".", "_enc_out", ".", "seal", "(", "b'\\x00\\x00\\x00\\x00'", "+", "nounce", ",", "data", ",", "bytes", "(", ")", ")" ]
Encrypt data with counter or specified nounce.
[ "Encrypt", "data", "with", "counter", "or", "specified", "nounce", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/chacha20.py#L15-L21
train
postlund/pyatv
pyatv/mrp/chacha20.py
Chacha20Cipher.decrypt
def decrypt(self, data, nounce=None): """Decrypt data with counter or specified nounce.""" if nounce is None: nounce = self._in_counter.to_bytes(length=8, byteorder='little') self._in_counter += 1 decrypted = self._enc_in.open( b'\x00\x00\x00\x00' + nounce, data, bytes()) if not decrypted: raise Exception('data decrypt failed') # TODO: new exception return bytes(decrypted)
python
def decrypt(self, data, nounce=None): """Decrypt data with counter or specified nounce.""" if nounce is None: nounce = self._in_counter.to_bytes(length=8, byteorder='little') self._in_counter += 1 decrypted = self._enc_in.open( b'\x00\x00\x00\x00' + nounce, data, bytes()) if not decrypted: raise Exception('data decrypt failed') # TODO: new exception return bytes(decrypted)
[ "def", "decrypt", "(", "self", ",", "data", ",", "nounce", "=", "None", ")", ":", "if", "nounce", "is", "None", ":", "nounce", "=", "self", ".", "_in_counter", ".", "to_bytes", "(", "length", "=", "8", ",", "byteorder", "=", "'little'", ")", "self", ".", "_in_counter", "+=", "1", "decrypted", "=", "self", ".", "_enc_in", ".", "open", "(", "b'\\x00\\x00\\x00\\x00'", "+", "nounce", ",", "data", ",", "bytes", "(", ")", ")", "if", "not", "decrypted", ":", "raise", "Exception", "(", "'data decrypt failed'", ")", "# TODO: new exception", "return", "bytes", "(", "decrypted", ")" ]
Decrypt data with counter or specified nounce.
[ "Decrypt", "data", "with", "counter", "or", "specified", "nounce", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/chacha20.py#L23-L35
train
postlund/pyatv
pyatv/helpers.py
auto_connect
def auto_connect(handler, timeout=5, not_found=None, event_loop=None): """Short method for connecting to a device. This is a convenience method that create an event loop, auto discovers devices, picks the first device found, connects to it and passes it to a user provided handler. An optional error handler can be provided that is called when no device was found. Very inflexible in many cases, but can be handys sometimes when trying things. Note 1: both handler and not_found must be coroutines Note 2: An optional loop can be passed if needed (mainly for testing) """ # A coroutine is used so we can connect to the device while being inside # the event loop async def _handle(loop): atvs = await pyatv.scan_for_apple_tvs( loop, timeout=timeout, abort_on_found=True) # Take the first device found if atvs: atv = pyatv.connect_to_apple_tv(atvs[0], loop) try: await handler(atv) finally: await atv.logout() else: if not_found is not None: await not_found() loop = event_loop if event_loop else asyncio.get_event_loop() loop.run_until_complete(_handle(loop))
python
def auto_connect(handler, timeout=5, not_found=None, event_loop=None): """Short method for connecting to a device. This is a convenience method that create an event loop, auto discovers devices, picks the first device found, connects to it and passes it to a user provided handler. An optional error handler can be provided that is called when no device was found. Very inflexible in many cases, but can be handys sometimes when trying things. Note 1: both handler and not_found must be coroutines Note 2: An optional loop can be passed if needed (mainly for testing) """ # A coroutine is used so we can connect to the device while being inside # the event loop async def _handle(loop): atvs = await pyatv.scan_for_apple_tvs( loop, timeout=timeout, abort_on_found=True) # Take the first device found if atvs: atv = pyatv.connect_to_apple_tv(atvs[0], loop) try: await handler(atv) finally: await atv.logout() else: if not_found is not None: await not_found() loop = event_loop if event_loop else asyncio.get_event_loop() loop.run_until_complete(_handle(loop))
[ "def", "auto_connect", "(", "handler", ",", "timeout", "=", "5", ",", "not_found", "=", "None", ",", "event_loop", "=", "None", ")", ":", "# A coroutine is used so we can connect to the device while being inside", "# the event loop", "async", "def", "_handle", "(", "loop", ")", ":", "atvs", "=", "await", "pyatv", ".", "scan_for_apple_tvs", "(", "loop", ",", "timeout", "=", "timeout", ",", "abort_on_found", "=", "True", ")", "# Take the first device found", "if", "atvs", ":", "atv", "=", "pyatv", ".", "connect_to_apple_tv", "(", "atvs", "[", "0", "]", ",", "loop", ")", "try", ":", "await", "handler", "(", "atv", ")", "finally", ":", "await", "atv", ".", "logout", "(", ")", "else", ":", "if", "not_found", "is", "not", "None", ":", "await", "not_found", "(", ")", "loop", "=", "event_loop", "if", "event_loop", "else", "asyncio", ".", "get_event_loop", "(", ")", "loop", ".", "run_until_complete", "(", "_handle", "(", "loop", ")", ")" ]
Short method for connecting to a device. This is a convenience method that create an event loop, auto discovers devices, picks the first device found, connects to it and passes it to a user provided handler. An optional error handler can be provided that is called when no device was found. Very inflexible in many cases, but can be handys sometimes when trying things. Note 1: both handler and not_found must be coroutines Note 2: An optional loop can be passed if needed (mainly for testing)
[ "Short", "method", "for", "connecting", "to", "a", "device", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/helpers.py#L7-L37
train
postlund/pyatv
pyatv/dmap/daap.py
DaapRequester.login
async def login(self): """Login to Apple TV using specified login id.""" # Do not use session.get_data(...) in login as that would end up in # an infinte loop. def _login_request(): return self.http.get_data( self._mkurl('login?[AUTH]&hasFP=1', session=False, login_id=True), headers=_DMAP_HEADERS) resp = await self._do(_login_request, is_login=True) self._session_id = parser.first(resp, 'mlog', 'mlid') _LOGGER.info('Logged in and got session id %s', self._session_id) return self._session_id
python
async def login(self): """Login to Apple TV using specified login id.""" # Do not use session.get_data(...) in login as that would end up in # an infinte loop. def _login_request(): return self.http.get_data( self._mkurl('login?[AUTH]&hasFP=1', session=False, login_id=True), headers=_DMAP_HEADERS) resp = await self._do(_login_request, is_login=True) self._session_id = parser.first(resp, 'mlog', 'mlid') _LOGGER.info('Logged in and got session id %s', self._session_id) return self._session_id
[ "async", "def", "login", "(", "self", ")", ":", "# Do not use session.get_data(...) in login as that would end up in", "# an infinte loop.", "def", "_login_request", "(", ")", ":", "return", "self", ".", "http", ".", "get_data", "(", "self", ".", "_mkurl", "(", "'login?[AUTH]&hasFP=1'", ",", "session", "=", "False", ",", "login_id", "=", "True", ")", ",", "headers", "=", "_DMAP_HEADERS", ")", "resp", "=", "await", "self", ".", "_do", "(", "_login_request", ",", "is_login", "=", "True", ")", "self", ".", "_session_id", "=", "parser", ".", "first", "(", "resp", ",", "'mlog'", ",", "'mlid'", ")", "_LOGGER", ".", "info", "(", "'Logged in and got session id %s'", ",", "self", ".", "_session_id", ")", "return", "self", ".", "_session_id" ]
Login to Apple TV using specified login id.
[ "Login", "to", "Apple", "TV", "using", "specified", "login", "id", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/daap.py#L41-L55
train
postlund/pyatv
pyatv/dmap/daap.py
DaapRequester.get
async def get(self, cmd, daap_data=True, timeout=None, **args): """Perform a DAAP GET command.""" def _get_request(): return self.http.get_data( self._mkurl(cmd, *args), headers=_DMAP_HEADERS, timeout=timeout) await self._assure_logged_in() return await self._do(_get_request, is_daap=daap_data)
python
async def get(self, cmd, daap_data=True, timeout=None, **args): """Perform a DAAP GET command.""" def _get_request(): return self.http.get_data( self._mkurl(cmd, *args), headers=_DMAP_HEADERS, timeout=timeout) await self._assure_logged_in() return await self._do(_get_request, is_daap=daap_data)
[ "async", "def", "get", "(", "self", ",", "cmd", ",", "daap_data", "=", "True", ",", "timeout", "=", "None", ",", "*", "*", "args", ")", ":", "def", "_get_request", "(", ")", ":", "return", "self", ".", "http", ".", "get_data", "(", "self", ".", "_mkurl", "(", "cmd", ",", "*", "args", ")", ",", "headers", "=", "_DMAP_HEADERS", ",", "timeout", "=", "timeout", ")", "await", "self", ".", "_assure_logged_in", "(", ")", "return", "await", "self", ".", "_do", "(", "_get_request", ",", "is_daap", "=", "daap_data", ")" ]
Perform a DAAP GET command.
[ "Perform", "a", "DAAP", "GET", "command", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/daap.py#L57-L66
train
postlund/pyatv
pyatv/dmap/daap.py
DaapRequester.get_url
def get_url(self, cmd, **args): """Expand the request URL for a request.""" return self.http.base_url + self._mkurl(cmd, *args)
python
def get_url(self, cmd, **args): """Expand the request URL for a request.""" return self.http.base_url + self._mkurl(cmd, *args)
[ "def", "get_url", "(", "self", ",", "cmd", ",", "*", "*", "args", ")", ":", "return", "self", ".", "http", ".", "base_url", "+", "self", ".", "_mkurl", "(", "cmd", ",", "*", "args", ")" ]
Expand the request URL for a request.
[ "Expand", "the", "request", "URL", "for", "a", "request", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/daap.py#L68-L70
train
postlund/pyatv
pyatv/dmap/daap.py
DaapRequester.post
async def post(self, cmd, data=None, timeout=None, **args): """Perform DAAP POST command with optional data.""" def _post_request(): headers = copy(_DMAP_HEADERS) headers['Content-Type'] = 'application/x-www-form-urlencoded' return self.http.post_data( self._mkurl(cmd, *args), data=data, headers=headers, timeout=timeout) await self._assure_logged_in() return await self._do(_post_request)
python
async def post(self, cmd, data=None, timeout=None, **args): """Perform DAAP POST command with optional data.""" def _post_request(): headers = copy(_DMAP_HEADERS) headers['Content-Type'] = 'application/x-www-form-urlencoded' return self.http.post_data( self._mkurl(cmd, *args), data=data, headers=headers, timeout=timeout) await self._assure_logged_in() return await self._do(_post_request)
[ "async", "def", "post", "(", "self", ",", "cmd", ",", "data", "=", "None", ",", "timeout", "=", "None", ",", "*", "*", "args", ")", ":", "def", "_post_request", "(", ")", ":", "headers", "=", "copy", "(", "_DMAP_HEADERS", ")", "headers", "[", "'Content-Type'", "]", "=", "'application/x-www-form-urlencoded'", "return", "self", ".", "http", ".", "post_data", "(", "self", ".", "_mkurl", "(", "cmd", ",", "*", "args", ")", ",", "data", "=", "data", ",", "headers", "=", "headers", ",", "timeout", "=", "timeout", ")", "await", "self", ".", "_assure_logged_in", "(", ")", "return", "await", "self", ".", "_do", "(", "_post_request", ")" ]
Perform DAAP POST command with optional data.
[ "Perform", "DAAP", "POST", "command", "with", "optional", "data", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/dmap/daap.py#L72-L84
train
postlund/pyatv
pyatv/mrp/__init__.py
MrpRemoteControl.set_repeat
def set_repeat(self, repeat_mode): """Change repeat mode.""" # TODO: extract to convert module if int(repeat_mode) == const.REPEAT_STATE_OFF: state = 1 elif int(repeat_mode) == const.REPEAT_STATE_ALL: state = 2 elif int(repeat_mode) == const.REPEAT_STATE_TRACK: state = 3 else: raise ValueError('Invalid repeat mode: ' + str(repeat_mode)) return self.protocol.send(messages.repeat(state))
python
def set_repeat(self, repeat_mode): """Change repeat mode.""" # TODO: extract to convert module if int(repeat_mode) == const.REPEAT_STATE_OFF: state = 1 elif int(repeat_mode) == const.REPEAT_STATE_ALL: state = 2 elif int(repeat_mode) == const.REPEAT_STATE_TRACK: state = 3 else: raise ValueError('Invalid repeat mode: ' + str(repeat_mode)) return self.protocol.send(messages.repeat(state))
[ "def", "set_repeat", "(", "self", ",", "repeat_mode", ")", ":", "# TODO: extract to convert module", "if", "int", "(", "repeat_mode", ")", "==", "const", ".", "REPEAT_STATE_OFF", ":", "state", "=", "1", "elif", "int", "(", "repeat_mode", ")", "==", "const", ".", "REPEAT_STATE_ALL", ":", "state", "=", "2", "elif", "int", "(", "repeat_mode", ")", "==", "const", ".", "REPEAT_STATE_TRACK", ":", "state", "=", "3", "else", ":", "raise", "ValueError", "(", "'Invalid repeat mode: '", "+", "str", "(", "repeat_mode", ")", ")", "return", "self", ".", "protocol", ".", "send", "(", "messages", ".", "repeat", "(", "state", ")", ")" ]
Change repeat mode.
[ "Change", "repeat", "mode", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L116-L128
train
postlund/pyatv
pyatv/mrp/__init__.py
MrpPlaying.genre
def genre(self): """Genre of the currently playing song.""" if self._metadata: from pyatv.mrp.protobuf import ContentItem_pb2 transaction = ContentItem_pb2.ContentItem() transaction.ParseFromString(self._metadata)
python
def genre(self): """Genre of the currently playing song.""" if self._metadata: from pyatv.mrp.protobuf import ContentItem_pb2 transaction = ContentItem_pb2.ContentItem() transaction.ParseFromString(self._metadata)
[ "def", "genre", "(", "self", ")", ":", "if", "self", ".", "_metadata", ":", "from", "pyatv", ".", "mrp", ".", "protobuf", "import", "ContentItem_pb2", "transaction", "=", "ContentItem_pb2", ".", "ContentItem", "(", ")", "transaction", ".", "ParseFromString", "(", "self", ".", "_metadata", ")" ]
Genre of the currently playing song.
[ "Genre", "of", "the", "currently", "playing", "song", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L172-L177
train
postlund/pyatv
pyatv/mrp/__init__.py
MrpPlaying.total_time
def total_time(self): """Total play time in seconds.""" now_playing = self._setstate.nowPlayingInfo if now_playing.HasField('duration'): return int(now_playing.duration) return None
python
def total_time(self): """Total play time in seconds.""" now_playing = self._setstate.nowPlayingInfo if now_playing.HasField('duration'): return int(now_playing.duration) return None
[ "def", "total_time", "(", "self", ")", ":", "now_playing", "=", "self", ".", "_setstate", ".", "nowPlayingInfo", "if", "now_playing", ".", "HasField", "(", "'duration'", ")", ":", "return", "int", "(", "now_playing", ".", "duration", ")", "return", "None" ]
Total play time in seconds.
[ "Total", "play", "time", "in", "seconds", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L181-L187
train
postlund/pyatv
pyatv/mrp/__init__.py
MrpPlaying.shuffle
def shuffle(self): """If shuffle is enabled or not.""" info = self._get_command_info(CommandInfo_pb2.ChangeShuffleMode) return None if info is None else info.shuffleMode
python
def shuffle(self): """If shuffle is enabled or not.""" info = self._get_command_info(CommandInfo_pb2.ChangeShuffleMode) return None if info is None else info.shuffleMode
[ "def", "shuffle", "(", "self", ")", ":", "info", "=", "self", ".", "_get_command_info", "(", "CommandInfo_pb2", ".", "ChangeShuffleMode", ")", "return", "None", "if", "info", "is", "None", "else", "info", ".", "shuffleMode" ]
If shuffle is enabled or not.
[ "If", "shuffle", "is", "enabled", "or", "not", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L205-L208
train
postlund/pyatv
pyatv/mrp/__init__.py
MrpPlaying.repeat
def repeat(self): """Repeat mode.""" info = self._get_command_info(CommandInfo_pb2.ChangeRepeatMode) return None if info is None else info.repeatMode
python
def repeat(self): """Repeat mode.""" info = self._get_command_info(CommandInfo_pb2.ChangeRepeatMode) return None if info is None else info.repeatMode
[ "def", "repeat", "(", "self", ")", ":", "info", "=", "self", ".", "_get_command_info", "(", "CommandInfo_pb2", ".", "ChangeRepeatMode", ")", "return", "None", "if", "info", "is", "None", "else", "info", ".", "repeatMode" ]
Repeat mode.
[ "Repeat", "mode", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L211-L214
train
postlund/pyatv
pyatv/mrp/__init__.py
MrpMetadata.playing
async def playing(self): """Return what is currently playing.""" # TODO: This is hack-ish if self._setstate is None: await self.protocol.start() # No SET_STATE_MESSAGE received yet, use default if self._setstate is None: return MrpPlaying(protobuf.SetStateMessage(), None) return MrpPlaying(self._setstate, self._nowplaying)
python
async def playing(self): """Return what is currently playing.""" # TODO: This is hack-ish if self._setstate is None: await self.protocol.start() # No SET_STATE_MESSAGE received yet, use default if self._setstate is None: return MrpPlaying(protobuf.SetStateMessage(), None) return MrpPlaying(self._setstate, self._nowplaying)
[ "async", "def", "playing", "(", "self", ")", ":", "# TODO: This is hack-ish", "if", "self", ".", "_setstate", "is", "None", ":", "await", "self", ".", "protocol", ".", "start", "(", ")", "# No SET_STATE_MESSAGE received yet, use default", "if", "self", ".", "_setstate", "is", "None", ":", "return", "MrpPlaying", "(", "protobuf", ".", "SetStateMessage", "(", ")", ",", "None", ")", "return", "MrpPlaying", "(", "self", ".", "_setstate", ",", "self", ".", "_nowplaying", ")" ]
Return what is currently playing.
[ "Return", "what", "is", "currently", "playing", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L250-L260
train
postlund/pyatv
pyatv/mrp/__init__.py
MrpPairingHandler.stop
async def stop(self, **kwargs): """Stop pairing process.""" if not self._pin_code: raise Exception('no pin given') # TODO: new exception self.service.device_credentials = \ await self.pairing_procedure.finish_pairing(self._pin_code)
python
async def stop(self, **kwargs): """Stop pairing process.""" if not self._pin_code: raise Exception('no pin given') # TODO: new exception self.service.device_credentials = \ await self.pairing_procedure.finish_pairing(self._pin_code)
[ "async", "def", "stop", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "_pin_code", ":", "raise", "Exception", "(", "'no pin given'", ")", "# TODO: new exception", "self", ".", "service", ".", "device_credentials", "=", "await", "self", ".", "pairing_procedure", ".", "finish_pairing", "(", "self", ".", "_pin_code", ")" ]
Stop pairing process.
[ "Stop", "pairing", "process", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/__init__.py#L324-L330
train
postlund/pyatv
pyatv/mrp/tlv8.py
read_tlv
def read_tlv(data): """Parse TLV8 bytes into a dict. If value is larger than 255 bytes, it is split up in multiple chunks. So the same tag might occurr several times. """ def _parse(data, pos, size, result=None): if result is None: result = {} if pos >= size: return result tag = str(data[pos]) length = data[pos+1] value = data[pos+2:pos+2+length] if tag in result: result[tag] += value # value > 255 is split up else: result[tag] = value return _parse(data, pos+2+length, size, result) return _parse(data, 0, len(data))
python
def read_tlv(data): """Parse TLV8 bytes into a dict. If value is larger than 255 bytes, it is split up in multiple chunks. So the same tag might occurr several times. """ def _parse(data, pos, size, result=None): if result is None: result = {} if pos >= size: return result tag = str(data[pos]) length = data[pos+1] value = data[pos+2:pos+2+length] if tag in result: result[tag] += value # value > 255 is split up else: result[tag] = value return _parse(data, pos+2+length, size, result) return _parse(data, 0, len(data))
[ "def", "read_tlv", "(", "data", ")", ":", "def", "_parse", "(", "data", ",", "pos", ",", "size", ",", "result", "=", "None", ")", ":", "if", "result", "is", "None", ":", "result", "=", "{", "}", "if", "pos", ">=", "size", ":", "return", "result", "tag", "=", "str", "(", "data", "[", "pos", "]", ")", "length", "=", "data", "[", "pos", "+", "1", "]", "value", "=", "data", "[", "pos", "+", "2", ":", "pos", "+", "2", "+", "length", "]", "if", "tag", "in", "result", ":", "result", "[", "tag", "]", "+=", "value", "# value > 255 is split up", "else", ":", "result", "[", "tag", "]", "=", "value", "return", "_parse", "(", "data", ",", "pos", "+", "2", "+", "length", ",", "size", ",", "result", ")", "return", "_parse", "(", "data", ",", "0", ",", "len", "(", "data", ")", ")" ]
Parse TLV8 bytes into a dict. If value is larger than 255 bytes, it is split up in multiple chunks. So the same tag might occurr several times.
[ "Parse", "TLV8", "bytes", "into", "a", "dict", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/tlv8.py#L19-L41
train
postlund/pyatv
pyatv/mrp/tlv8.py
write_tlv
def write_tlv(data): """Convert a dict to TLV8 bytes.""" tlv = b'' for key, value in data.items(): tag = bytes([int(key)]) length = len(value) pos = 0 # A tag with length > 255 is added multiple times and concatenated into # one buffer when reading the TLV again. while pos < len(value): size = min(length, 255) tlv += tag tlv += bytes([size]) tlv += value[pos:pos+size] pos += size length -= size return tlv
python
def write_tlv(data): """Convert a dict to TLV8 bytes.""" tlv = b'' for key, value in data.items(): tag = bytes([int(key)]) length = len(value) pos = 0 # A tag with length > 255 is added multiple times and concatenated into # one buffer when reading the TLV again. while pos < len(value): size = min(length, 255) tlv += tag tlv += bytes([size]) tlv += value[pos:pos+size] pos += size length -= size return tlv
[ "def", "write_tlv", "(", "data", ")", ":", "tlv", "=", "b''", "for", "key", ",", "value", "in", "data", ".", "items", "(", ")", ":", "tag", "=", "bytes", "(", "[", "int", "(", "key", ")", "]", ")", "length", "=", "len", "(", "value", ")", "pos", "=", "0", "# A tag with length > 255 is added multiple times and concatenated into", "# one buffer when reading the TLV again.", "while", "pos", "<", "len", "(", "value", ")", ":", "size", "=", "min", "(", "length", ",", "255", ")", "tlv", "+=", "tag", "tlv", "+=", "bytes", "(", "[", "size", "]", ")", "tlv", "+=", "value", "[", "pos", ":", "pos", "+", "size", "]", "pos", "+=", "size", "length", "-=", "size", "return", "tlv" ]
Convert a dict to TLV8 bytes.
[ "Convert", "a", "dict", "to", "TLV8", "bytes", "." ]
655dfcda4e2f9d1c501540e18da4f480d8bf0e70
https://github.com/postlund/pyatv/blob/655dfcda4e2f9d1c501540e18da4f480d8bf0e70/pyatv/mrp/tlv8.py#L44-L61
train
tommikaikkonen/prettyprinter
prettyprinter/prettyprinter.py
comment
def comment(value, comment_text): """Annotates a value or a Doc with a comment. When printed by prettyprinter, the comment will be rendered next to the value or Doc. """ if isinstance(value, Doc): return comment_doc(value, comment_text) return comment_value(value, comment_text)
python
def comment(value, comment_text): """Annotates a value or a Doc with a comment. When printed by prettyprinter, the comment will be rendered next to the value or Doc. """ if isinstance(value, Doc): return comment_doc(value, comment_text) return comment_value(value, comment_text)
[ "def", "comment", "(", "value", ",", "comment_text", ")", ":", "if", "isinstance", "(", "value", ",", "Doc", ")", ":", "return", "comment_doc", "(", "value", ",", "comment_text", ")", "return", "comment_value", "(", "value", ",", "comment_text", ")" ]
Annotates a value or a Doc with a comment. When printed by prettyprinter, the comment will be rendered next to the value or Doc.
[ "Annotates", "a", "value", "or", "a", "Doc", "with", "a", "comment", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L156-L164
train
tommikaikkonen/prettyprinter
prettyprinter/prettyprinter.py
register_pretty
def register_pretty(type=None, predicate=None): """Returns a decorator that registers the decorated function as the pretty printer for instances of ``type``. :param type: the type to register the pretty printer for, or a ``str`` to indicate the module and name, e.g.: ``'collections.Counter'``. :param predicate: a predicate function that takes one argument and returns a boolean indicating if the value should be handled by the registered pretty printer. Only one of ``type`` and ``predicate`` may be supplied. That means that ``predicate`` will be run on unregistered types only. The decorated function must accept exactly two positional arguments: - ``value`` to pretty print, and - ``ctx``, a context value. Here's an example of the pretty printer for OrderedDict: .. code:: python from collections import OrderedDict from prettyprinter import register_pretty, pretty_call @register_pretty(OrderedDict) def pretty_orderreddict(value, ctx): return pretty_call(ctx, OrderedDict, list(value.items())) """ if type is None and predicate is None: raise ValueError( "You must provide either the 'type' or 'predicate' argument." ) if type is not None and predicate is not None: raise ValueError( "You must provide either the 'type' or 'predicate' argument," "but not both" ) if predicate is not None: if not callable(predicate): raise ValueError( "Expected a callable for 'predicate', got {}".format( repr(predicate) ) ) def decorator(fn): sig = inspect.signature(fn) value = None ctx = None try: sig.bind(value, ctx) except TypeError: fnname = '{}.{}'.format( fn.__module__, fn.__qualname__ ) raise ValueError( "Functions decorated with register_pretty must accept " "exactly two positional parameters: 'value' and 'ctx'. " "The function signature for {} was not compatible.".format( fnname ) ) if type: if isinstance(type, str): # We don't wrap this with _run_pretty, # so that when we register this printer with an actual # class, we can call register_pretty(cls)(fn) _DEFERRED_DISPATCH_BY_NAME[type] = fn else: pretty_dispatch.register(type, partial(_run_pretty, fn)) else: assert callable(predicate) _PREDICATE_REGISTRY.append((predicate, fn)) return fn return decorator
python
def register_pretty(type=None, predicate=None): """Returns a decorator that registers the decorated function as the pretty printer for instances of ``type``. :param type: the type to register the pretty printer for, or a ``str`` to indicate the module and name, e.g.: ``'collections.Counter'``. :param predicate: a predicate function that takes one argument and returns a boolean indicating if the value should be handled by the registered pretty printer. Only one of ``type`` and ``predicate`` may be supplied. That means that ``predicate`` will be run on unregistered types only. The decorated function must accept exactly two positional arguments: - ``value`` to pretty print, and - ``ctx``, a context value. Here's an example of the pretty printer for OrderedDict: .. code:: python from collections import OrderedDict from prettyprinter import register_pretty, pretty_call @register_pretty(OrderedDict) def pretty_orderreddict(value, ctx): return pretty_call(ctx, OrderedDict, list(value.items())) """ if type is None and predicate is None: raise ValueError( "You must provide either the 'type' or 'predicate' argument." ) if type is not None and predicate is not None: raise ValueError( "You must provide either the 'type' or 'predicate' argument," "but not both" ) if predicate is not None: if not callable(predicate): raise ValueError( "Expected a callable for 'predicate', got {}".format( repr(predicate) ) ) def decorator(fn): sig = inspect.signature(fn) value = None ctx = None try: sig.bind(value, ctx) except TypeError: fnname = '{}.{}'.format( fn.__module__, fn.__qualname__ ) raise ValueError( "Functions decorated with register_pretty must accept " "exactly two positional parameters: 'value' and 'ctx'. " "The function signature for {} was not compatible.".format( fnname ) ) if type: if isinstance(type, str): # We don't wrap this with _run_pretty, # so that when we register this printer with an actual # class, we can call register_pretty(cls)(fn) _DEFERRED_DISPATCH_BY_NAME[type] = fn else: pretty_dispatch.register(type, partial(_run_pretty, fn)) else: assert callable(predicate) _PREDICATE_REGISTRY.append((predicate, fn)) return fn return decorator
[ "def", "register_pretty", "(", "type", "=", "None", ",", "predicate", "=", "None", ")", ":", "if", "type", "is", "None", "and", "predicate", "is", "None", ":", "raise", "ValueError", "(", "\"You must provide either the 'type' or 'predicate' argument.\"", ")", "if", "type", "is", "not", "None", "and", "predicate", "is", "not", "None", ":", "raise", "ValueError", "(", "\"You must provide either the 'type' or 'predicate' argument,\"", "\"but not both\"", ")", "if", "predicate", "is", "not", "None", ":", "if", "not", "callable", "(", "predicate", ")", ":", "raise", "ValueError", "(", "\"Expected a callable for 'predicate', got {}\"", ".", "format", "(", "repr", "(", "predicate", ")", ")", ")", "def", "decorator", "(", "fn", ")", ":", "sig", "=", "inspect", ".", "signature", "(", "fn", ")", "value", "=", "None", "ctx", "=", "None", "try", ":", "sig", ".", "bind", "(", "value", ",", "ctx", ")", "except", "TypeError", ":", "fnname", "=", "'{}.{}'", ".", "format", "(", "fn", ".", "__module__", ",", "fn", ".", "__qualname__", ")", "raise", "ValueError", "(", "\"Functions decorated with register_pretty must accept \"", "\"exactly two positional parameters: 'value' and 'ctx'. \"", "\"The function signature for {} was not compatible.\"", ".", "format", "(", "fnname", ")", ")", "if", "type", ":", "if", "isinstance", "(", "type", ",", "str", ")", ":", "# We don't wrap this with _run_pretty,", "# so that when we register this printer with an actual", "# class, we can call register_pretty(cls)(fn)", "_DEFERRED_DISPATCH_BY_NAME", "[", "type", "]", "=", "fn", "else", ":", "pretty_dispatch", ".", "register", "(", "type", ",", "partial", "(", "_run_pretty", ",", "fn", ")", ")", "else", ":", "assert", "callable", "(", "predicate", ")", "_PREDICATE_REGISTRY", ".", "append", "(", "(", "predicate", ",", "fn", ")", ")", "return", "fn", "return", "decorator" ]
Returns a decorator that registers the decorated function as the pretty printer for instances of ``type``. :param type: the type to register the pretty printer for, or a ``str`` to indicate the module and name, e.g.: ``'collections.Counter'``. :param predicate: a predicate function that takes one argument and returns a boolean indicating if the value should be handled by the registered pretty printer. Only one of ``type`` and ``predicate`` may be supplied. That means that ``predicate`` will be run on unregistered types only. The decorated function must accept exactly two positional arguments: - ``value`` to pretty print, and - ``ctx``, a context value. Here's an example of the pretty printer for OrderedDict: .. code:: python from collections import OrderedDict from prettyprinter import register_pretty, pretty_call @register_pretty(OrderedDict) def pretty_orderreddict(value, ctx): return pretty_call(ctx, OrderedDict, list(value.items()))
[ "Returns", "a", "decorator", "that", "registers", "the", "decorated", "function", "as", "the", "pretty", "printer", "for", "instances", "of", "type", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L462-L544
train
tommikaikkonen/prettyprinter
prettyprinter/prettyprinter.py
commentdoc
def commentdoc(text): """Returns a Doc representing a comment `text`. `text` is treated as words, and any whitespace may be used to break the comment to multiple lines.""" if not text: raise ValueError( 'Expected non-empty comment str, got {}'.format(repr(text)) ) commentlines = [] for line in text.splitlines(): alternating_words_ws = list(filter(None, WHITESPACE_PATTERN_TEXT.split(line))) starts_with_whitespace = bool( WHITESPACE_PATTERN_TEXT.match(alternating_words_ws[0]) ) if starts_with_whitespace: prefix = alternating_words_ws[0] alternating_words_ws = alternating_words_ws[1:] else: prefix = NIL if len(alternating_words_ws) % 2 == 0: # The last part must be whitespace. alternating_words_ws = alternating_words_ws[:-1] for idx, tup in enumerate(zip(alternating_words_ws, cycle([False, True]))): part, is_ws = tup if is_ws: alternating_words_ws[idx] = flat_choice( when_flat=part, when_broken=always_break( concat([ HARDLINE, '# ', ]) ) ) commentlines.append( concat([ '# ', prefix, fill(alternating_words_ws) ]) ) outer = identity if len(commentlines) > 1: outer = always_break return annotate( Token.COMMENT_SINGLE, outer(concat(intersperse(HARDLINE, commentlines))) )
python
def commentdoc(text): """Returns a Doc representing a comment `text`. `text` is treated as words, and any whitespace may be used to break the comment to multiple lines.""" if not text: raise ValueError( 'Expected non-empty comment str, got {}'.format(repr(text)) ) commentlines = [] for line in text.splitlines(): alternating_words_ws = list(filter(None, WHITESPACE_PATTERN_TEXT.split(line))) starts_with_whitespace = bool( WHITESPACE_PATTERN_TEXT.match(alternating_words_ws[0]) ) if starts_with_whitespace: prefix = alternating_words_ws[0] alternating_words_ws = alternating_words_ws[1:] else: prefix = NIL if len(alternating_words_ws) % 2 == 0: # The last part must be whitespace. alternating_words_ws = alternating_words_ws[:-1] for idx, tup in enumerate(zip(alternating_words_ws, cycle([False, True]))): part, is_ws = tup if is_ws: alternating_words_ws[idx] = flat_choice( when_flat=part, when_broken=always_break( concat([ HARDLINE, '# ', ]) ) ) commentlines.append( concat([ '# ', prefix, fill(alternating_words_ws) ]) ) outer = identity if len(commentlines) > 1: outer = always_break return annotate( Token.COMMENT_SINGLE, outer(concat(intersperse(HARDLINE, commentlines))) )
[ "def", "commentdoc", "(", "text", ")", ":", "if", "not", "text", ":", "raise", "ValueError", "(", "'Expected non-empty comment str, got {}'", ".", "format", "(", "repr", "(", "text", ")", ")", ")", "commentlines", "=", "[", "]", "for", "line", "in", "text", ".", "splitlines", "(", ")", ":", "alternating_words_ws", "=", "list", "(", "filter", "(", "None", ",", "WHITESPACE_PATTERN_TEXT", ".", "split", "(", "line", ")", ")", ")", "starts_with_whitespace", "=", "bool", "(", "WHITESPACE_PATTERN_TEXT", ".", "match", "(", "alternating_words_ws", "[", "0", "]", ")", ")", "if", "starts_with_whitespace", ":", "prefix", "=", "alternating_words_ws", "[", "0", "]", "alternating_words_ws", "=", "alternating_words_ws", "[", "1", ":", "]", "else", ":", "prefix", "=", "NIL", "if", "len", "(", "alternating_words_ws", ")", "%", "2", "==", "0", ":", "# The last part must be whitespace.", "alternating_words_ws", "=", "alternating_words_ws", "[", ":", "-", "1", "]", "for", "idx", ",", "tup", "in", "enumerate", "(", "zip", "(", "alternating_words_ws", ",", "cycle", "(", "[", "False", ",", "True", "]", ")", ")", ")", ":", "part", ",", "is_ws", "=", "tup", "if", "is_ws", ":", "alternating_words_ws", "[", "idx", "]", "=", "flat_choice", "(", "when_flat", "=", "part", ",", "when_broken", "=", "always_break", "(", "concat", "(", "[", "HARDLINE", ",", "'# '", ",", "]", ")", ")", ")", "commentlines", ".", "append", "(", "concat", "(", "[", "'# '", ",", "prefix", ",", "fill", "(", "alternating_words_ws", ")", "]", ")", ")", "outer", "=", "identity", "if", "len", "(", "commentlines", ")", ">", "1", ":", "outer", "=", "always_break", "return", "annotate", "(", "Token", ".", "COMMENT_SINGLE", ",", "outer", "(", "concat", "(", "intersperse", "(", "HARDLINE", ",", "commentlines", ")", ")", ")", ")" ]
Returns a Doc representing a comment `text`. `text` is treated as words, and any whitespace may be used to break the comment to multiple lines.
[ "Returns", "a", "Doc", "representing", "a", "comment", "text", ".", "text", "is", "treated", "as", "words", "and", "any", "whitespace", "may", "be", "used", "to", "break", "the", "comment", "to", "multiple", "lines", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L599-L654
train
tommikaikkonen/prettyprinter
prettyprinter/prettyprinter.py
build_fncall
def build_fncall( ctx, fndoc, argdocs=(), kwargdocs=(), hug_sole_arg=False, trailing_comment=None, ): """Builds a doc that looks like a function call, from docs that represent the function, arguments and keyword arguments. If ``hug_sole_arg`` is True, and the represented functional call is done with a single non-keyword argument, the function call parentheses will hug the sole argument doc without newlines and indentation in break mode. This makes a difference in calls like this:: > hug_sole_arg = False frozenset( [ 1, 2, 3, 4, 5 ] ) > hug_sole_arg = True frozenset([ 1, 2, 3, 4, 5, ]) If ``trailing_comment`` is provided, the text is rendered as a comment after the last argument and before the closing parenthesis. This will force the function call to be broken to multiple lines. """ if callable(fndoc): fndoc = general_identifier(fndoc) has_comment = bool(trailing_comment) argdocs = list(argdocs) kwargdocs = list(kwargdocs) kwargdocs = [ # Propagate any comments to the kwarg doc. ( comment_doc( concat([ keyword_arg(binding), ASSIGN_OP, doc.doc ]), doc.annotation.value ) if is_commented(doc) else concat([ keyword_arg(binding), ASSIGN_OP, doc ]) ) for binding, doc in kwargdocs ] if not (argdocs or kwargdocs): return concat([ fndoc, LPAREN, RPAREN, ]) if ( hug_sole_arg and not kwargdocs and len(argdocs) == 1 and not is_commented(argdocs[0]) ): return group( concat([ fndoc, LPAREN, argdocs[0], RPAREN ]) ) allarg_docs = [*argdocs, *kwargdocs] if trailing_comment: allarg_docs.append(commentdoc(trailing_comment)) parts = [] for idx, doc in enumerate(allarg_docs): last = idx == len(allarg_docs) - 1 if is_commented(doc): has_comment = True comment_str = doc.annotation.value doc = doc.doc else: comment_str = None part = concat([doc, NIL if last else COMMA]) if comment_str: part = group( flat_choice( when_flat=concat([ part, ' ', commentdoc(comment_str) ]), when_broken=concat([ commentdoc(comment_str), HARDLINE, part, ]), ) ) if not last: part = concat([part, HARDLINE if has_comment else LINE]) parts.append(part) outer = ( always_break if has_comment else group ) return outer( concat([ fndoc, LPAREN, nest( ctx.indent, concat([ SOFTLINE, concat(parts), ]) ), SOFTLINE, RPAREN ]) )
python
def build_fncall( ctx, fndoc, argdocs=(), kwargdocs=(), hug_sole_arg=False, trailing_comment=None, ): """Builds a doc that looks like a function call, from docs that represent the function, arguments and keyword arguments. If ``hug_sole_arg`` is True, and the represented functional call is done with a single non-keyword argument, the function call parentheses will hug the sole argument doc without newlines and indentation in break mode. This makes a difference in calls like this:: > hug_sole_arg = False frozenset( [ 1, 2, 3, 4, 5 ] ) > hug_sole_arg = True frozenset([ 1, 2, 3, 4, 5, ]) If ``trailing_comment`` is provided, the text is rendered as a comment after the last argument and before the closing parenthesis. This will force the function call to be broken to multiple lines. """ if callable(fndoc): fndoc = general_identifier(fndoc) has_comment = bool(trailing_comment) argdocs = list(argdocs) kwargdocs = list(kwargdocs) kwargdocs = [ # Propagate any comments to the kwarg doc. ( comment_doc( concat([ keyword_arg(binding), ASSIGN_OP, doc.doc ]), doc.annotation.value ) if is_commented(doc) else concat([ keyword_arg(binding), ASSIGN_OP, doc ]) ) for binding, doc in kwargdocs ] if not (argdocs or kwargdocs): return concat([ fndoc, LPAREN, RPAREN, ]) if ( hug_sole_arg and not kwargdocs and len(argdocs) == 1 and not is_commented(argdocs[0]) ): return group( concat([ fndoc, LPAREN, argdocs[0], RPAREN ]) ) allarg_docs = [*argdocs, *kwargdocs] if trailing_comment: allarg_docs.append(commentdoc(trailing_comment)) parts = [] for idx, doc in enumerate(allarg_docs): last = idx == len(allarg_docs) - 1 if is_commented(doc): has_comment = True comment_str = doc.annotation.value doc = doc.doc else: comment_str = None part = concat([doc, NIL if last else COMMA]) if comment_str: part = group( flat_choice( when_flat=concat([ part, ' ', commentdoc(comment_str) ]), when_broken=concat([ commentdoc(comment_str), HARDLINE, part, ]), ) ) if not last: part = concat([part, HARDLINE if has_comment else LINE]) parts.append(part) outer = ( always_break if has_comment else group ) return outer( concat([ fndoc, LPAREN, nest( ctx.indent, concat([ SOFTLINE, concat(parts), ]) ), SOFTLINE, RPAREN ]) )
[ "def", "build_fncall", "(", "ctx", ",", "fndoc", ",", "argdocs", "=", "(", ")", ",", "kwargdocs", "=", "(", ")", ",", "hug_sole_arg", "=", "False", ",", "trailing_comment", "=", "None", ",", ")", ":", "if", "callable", "(", "fndoc", ")", ":", "fndoc", "=", "general_identifier", "(", "fndoc", ")", "has_comment", "=", "bool", "(", "trailing_comment", ")", "argdocs", "=", "list", "(", "argdocs", ")", "kwargdocs", "=", "list", "(", "kwargdocs", ")", "kwargdocs", "=", "[", "# Propagate any comments to the kwarg doc.", "(", "comment_doc", "(", "concat", "(", "[", "keyword_arg", "(", "binding", ")", ",", "ASSIGN_OP", ",", "doc", ".", "doc", "]", ")", ",", "doc", ".", "annotation", ".", "value", ")", "if", "is_commented", "(", "doc", ")", "else", "concat", "(", "[", "keyword_arg", "(", "binding", ")", ",", "ASSIGN_OP", ",", "doc", "]", ")", ")", "for", "binding", ",", "doc", "in", "kwargdocs", "]", "if", "not", "(", "argdocs", "or", "kwargdocs", ")", ":", "return", "concat", "(", "[", "fndoc", ",", "LPAREN", ",", "RPAREN", ",", "]", ")", "if", "(", "hug_sole_arg", "and", "not", "kwargdocs", "and", "len", "(", "argdocs", ")", "==", "1", "and", "not", "is_commented", "(", "argdocs", "[", "0", "]", ")", ")", ":", "return", "group", "(", "concat", "(", "[", "fndoc", ",", "LPAREN", ",", "argdocs", "[", "0", "]", ",", "RPAREN", "]", ")", ")", "allarg_docs", "=", "[", "*", "argdocs", ",", "*", "kwargdocs", "]", "if", "trailing_comment", ":", "allarg_docs", ".", "append", "(", "commentdoc", "(", "trailing_comment", ")", ")", "parts", "=", "[", "]", "for", "idx", ",", "doc", "in", "enumerate", "(", "allarg_docs", ")", ":", "last", "=", "idx", "==", "len", "(", "allarg_docs", ")", "-", "1", "if", "is_commented", "(", "doc", ")", ":", "has_comment", "=", "True", "comment_str", "=", "doc", ".", "annotation", ".", "value", "doc", "=", "doc", ".", "doc", "else", ":", "comment_str", "=", "None", "part", "=", "concat", "(", "[", "doc", ",", "NIL", "if", "last", "else", "COMMA", "]", ")", "if", "comment_str", ":", "part", "=", "group", "(", "flat_choice", "(", "when_flat", "=", "concat", "(", "[", "part", ",", "' '", ",", "commentdoc", "(", "comment_str", ")", "]", ")", ",", "when_broken", "=", "concat", "(", "[", "commentdoc", "(", "comment_str", ")", ",", "HARDLINE", ",", "part", ",", "]", ")", ",", ")", ")", "if", "not", "last", ":", "part", "=", "concat", "(", "[", "part", ",", "HARDLINE", "if", "has_comment", "else", "LINE", "]", ")", "parts", ".", "append", "(", "part", ")", "outer", "=", "(", "always_break", "if", "has_comment", "else", "group", ")", "return", "outer", "(", "concat", "(", "[", "fndoc", ",", "LPAREN", ",", "nest", "(", "ctx", ".", "indent", ",", "concat", "(", "[", "SOFTLINE", ",", "concat", "(", "parts", ")", ",", "]", ")", ")", ",", "SOFTLINE", ",", "RPAREN", "]", ")", ")" ]
Builds a doc that looks like a function call, from docs that represent the function, arguments and keyword arguments. If ``hug_sole_arg`` is True, and the represented functional call is done with a single non-keyword argument, the function call parentheses will hug the sole argument doc without newlines and indentation in break mode. This makes a difference in calls like this:: > hug_sole_arg = False frozenset( [ 1, 2, 3, 4, 5 ] ) > hug_sole_arg = True frozenset([ 1, 2, 3, 4, 5, ]) If ``trailing_comment`` is provided, the text is rendered as a comment after the last argument and before the closing parenthesis. This will force the function call to be broken to multiple lines.
[ "Builds", "a", "doc", "that", "looks", "like", "a", "function", "call", "from", "docs", "that", "represent", "the", "function", "arguments", "and", "keyword", "arguments", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L849-L1003
train
tommikaikkonen/prettyprinter
prettyprinter/prettyprinter.py
PrettyContext.assoc
def assoc(self, key, value): """ Return a modified PrettyContext with ``key`` set to ``value`` """ return self._replace(user_ctx={ **self.user_ctx, key: value, })
python
def assoc(self, key, value): """ Return a modified PrettyContext with ``key`` set to ``value`` """ return self._replace(user_ctx={ **self.user_ctx, key: value, })
[ "def", "assoc", "(", "self", ",", "key", ",", "value", ")", ":", "return", "self", ".", "_replace", "(", "user_ctx", "=", "{", "*", "*", "self", ".", "user_ctx", ",", "key", ":", "value", ",", "}", ")" ]
Return a modified PrettyContext with ``key`` set to ``value``
[ "Return", "a", "modified", "PrettyContext", "with", "key", "set", "to", "value" ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/prettyprinter.py#L297-L304
train
tommikaikkonen/prettyprinter
prettyprinter/doc.py
align
def align(doc): """Aligns each new line in ``doc`` with the first new line. """ validate_doc(doc) def evaluator(indent, column, page_width, ribbon_width): return Nest(column - indent, doc) return contextual(evaluator)
python
def align(doc): """Aligns each new line in ``doc`` with the first new line. """ validate_doc(doc) def evaluator(indent, column, page_width, ribbon_width): return Nest(column - indent, doc) return contextual(evaluator)
[ "def", "align", "(", "doc", ")", ":", "validate_doc", "(", "doc", ")", "def", "evaluator", "(", "indent", ",", "column", ",", "page_width", ",", "ribbon_width", ")", ":", "return", "Nest", "(", "column", "-", "indent", ",", "doc", ")", "return", "contextual", "(", "evaluator", ")" ]
Aligns each new line in ``doc`` with the first new line.
[ "Aligns", "each", "new", "line", "in", "doc", "with", "the", "first", "new", "line", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/doc.py#L57-L64
train
tommikaikkonen/prettyprinter
prettyprinter/layout.py
smart_fitting_predicate
def smart_fitting_predicate( page_width, ribbon_frac, min_nesting_level, max_width, triplestack ): """ Lookahead until the last doc at the current indentation level. Pretty, but not as fast. """ chars_left = max_width while chars_left >= 0: if not triplestack: return True indent, mode, doc = triplestack.pop() if doc is NIL: continue elif isinstance(doc, str): chars_left -= len(doc) elif isinstance(doc, Concat): # Recursive call in Strictly Pretty: docs within Concat # are processed in order, with keeping the current # indentation and mode. # We want the leftmost element at the top of the stack, # so we append the concatenated documents in reverse order. triplestack.extend( (indent, mode, doc) for doc in reversed(doc.docs) ) elif isinstance(doc, Annotated): triplestack.append((indent, mode, doc.doc)) elif isinstance(doc, Fill): # Same as the Concat case. triplestack.extend( (indent, mode, doc) for doc in reversed(doc.docs) ) elif isinstance(doc, Nest): # Nest is a combination of an indent and a doc. # Increase indentation, then add the doc for processing. triplestack.append((indent + doc.indent, mode, doc.doc)) elif isinstance(doc, AlwaysBreak): return False elif doc is HARDLINE: # In the fast algorithm, when we see a line, # we return True. Here, as long as the minimum indentation # level is satisfied, we continue processing the next line. # This causes the longer runtime. if indent > min_nesting_level: chars_left = page_width - indent else: return True elif isinstance(doc, FlatChoice): if mode is FLAT_MODE: triplestack.append((indent, mode, doc.when_flat)) elif mode is BREAK_MODE: triplestack.append((indent, mode, doc.when_broken)) else: raise ValueError elif isinstance(doc, Group): # Group just changes the mode. triplestack.append((indent, FLAT_MODE, doc.doc)) elif isinstance(doc, Contextual): ribbon_width = max(0, min(page_width, round(ribbon_frac * page_width))) evaluated_doc = doc.fn( indent=indent, column=max_width - chars_left, page_width=page_width, ribbon_width=ribbon_width, ) normalized = normalize_doc(evaluated_doc) triplestack.append((indent, mode, normalized)) elif isinstance(doc, SAnnotationPush): continue elif isinstance(doc, SAnnotationPop): continue else: raise ValueError((indent, mode, doc)) return False
python
def smart_fitting_predicate( page_width, ribbon_frac, min_nesting_level, max_width, triplestack ): """ Lookahead until the last doc at the current indentation level. Pretty, but not as fast. """ chars_left = max_width while chars_left >= 0: if not triplestack: return True indent, mode, doc = triplestack.pop() if doc is NIL: continue elif isinstance(doc, str): chars_left -= len(doc) elif isinstance(doc, Concat): # Recursive call in Strictly Pretty: docs within Concat # are processed in order, with keeping the current # indentation and mode. # We want the leftmost element at the top of the stack, # so we append the concatenated documents in reverse order. triplestack.extend( (indent, mode, doc) for doc in reversed(doc.docs) ) elif isinstance(doc, Annotated): triplestack.append((indent, mode, doc.doc)) elif isinstance(doc, Fill): # Same as the Concat case. triplestack.extend( (indent, mode, doc) for doc in reversed(doc.docs) ) elif isinstance(doc, Nest): # Nest is a combination of an indent and a doc. # Increase indentation, then add the doc for processing. triplestack.append((indent + doc.indent, mode, doc.doc)) elif isinstance(doc, AlwaysBreak): return False elif doc is HARDLINE: # In the fast algorithm, when we see a line, # we return True. Here, as long as the minimum indentation # level is satisfied, we continue processing the next line. # This causes the longer runtime. if indent > min_nesting_level: chars_left = page_width - indent else: return True elif isinstance(doc, FlatChoice): if mode is FLAT_MODE: triplestack.append((indent, mode, doc.when_flat)) elif mode is BREAK_MODE: triplestack.append((indent, mode, doc.when_broken)) else: raise ValueError elif isinstance(doc, Group): # Group just changes the mode. triplestack.append((indent, FLAT_MODE, doc.doc)) elif isinstance(doc, Contextual): ribbon_width = max(0, min(page_width, round(ribbon_frac * page_width))) evaluated_doc = doc.fn( indent=indent, column=max_width - chars_left, page_width=page_width, ribbon_width=ribbon_width, ) normalized = normalize_doc(evaluated_doc) triplestack.append((indent, mode, normalized)) elif isinstance(doc, SAnnotationPush): continue elif isinstance(doc, SAnnotationPop): continue else: raise ValueError((indent, mode, doc)) return False
[ "def", "smart_fitting_predicate", "(", "page_width", ",", "ribbon_frac", ",", "min_nesting_level", ",", "max_width", ",", "triplestack", ")", ":", "chars_left", "=", "max_width", "while", "chars_left", ">=", "0", ":", "if", "not", "triplestack", ":", "return", "True", "indent", ",", "mode", ",", "doc", "=", "triplestack", ".", "pop", "(", ")", "if", "doc", "is", "NIL", ":", "continue", "elif", "isinstance", "(", "doc", ",", "str", ")", ":", "chars_left", "-=", "len", "(", "doc", ")", "elif", "isinstance", "(", "doc", ",", "Concat", ")", ":", "# Recursive call in Strictly Pretty: docs within Concat", "# are processed in order, with keeping the current", "# indentation and mode.", "# We want the leftmost element at the top of the stack,", "# so we append the concatenated documents in reverse order.", "triplestack", ".", "extend", "(", "(", "indent", ",", "mode", ",", "doc", ")", "for", "doc", "in", "reversed", "(", "doc", ".", "docs", ")", ")", "elif", "isinstance", "(", "doc", ",", "Annotated", ")", ":", "triplestack", ".", "append", "(", "(", "indent", ",", "mode", ",", "doc", ".", "doc", ")", ")", "elif", "isinstance", "(", "doc", ",", "Fill", ")", ":", "# Same as the Concat case.", "triplestack", ".", "extend", "(", "(", "indent", ",", "mode", ",", "doc", ")", "for", "doc", "in", "reversed", "(", "doc", ".", "docs", ")", ")", "elif", "isinstance", "(", "doc", ",", "Nest", ")", ":", "# Nest is a combination of an indent and a doc.", "# Increase indentation, then add the doc for processing.", "triplestack", ".", "append", "(", "(", "indent", "+", "doc", ".", "indent", ",", "mode", ",", "doc", ".", "doc", ")", ")", "elif", "isinstance", "(", "doc", ",", "AlwaysBreak", ")", ":", "return", "False", "elif", "doc", "is", "HARDLINE", ":", "# In the fast algorithm, when we see a line,", "# we return True. Here, as long as the minimum indentation", "# level is satisfied, we continue processing the next line.", "# This causes the longer runtime.", "if", "indent", ">", "min_nesting_level", ":", "chars_left", "=", "page_width", "-", "indent", "else", ":", "return", "True", "elif", "isinstance", "(", "doc", ",", "FlatChoice", ")", ":", "if", "mode", "is", "FLAT_MODE", ":", "triplestack", ".", "append", "(", "(", "indent", ",", "mode", ",", "doc", ".", "when_flat", ")", ")", "elif", "mode", "is", "BREAK_MODE", ":", "triplestack", ".", "append", "(", "(", "indent", ",", "mode", ",", "doc", ".", "when_broken", ")", ")", "else", ":", "raise", "ValueError", "elif", "isinstance", "(", "doc", ",", "Group", ")", ":", "# Group just changes the mode.", "triplestack", ".", "append", "(", "(", "indent", ",", "FLAT_MODE", ",", "doc", ".", "doc", ")", ")", "elif", "isinstance", "(", "doc", ",", "Contextual", ")", ":", "ribbon_width", "=", "max", "(", "0", ",", "min", "(", "page_width", ",", "round", "(", "ribbon_frac", "*", "page_width", ")", ")", ")", "evaluated_doc", "=", "doc", ".", "fn", "(", "indent", "=", "indent", ",", "column", "=", "max_width", "-", "chars_left", ",", "page_width", "=", "page_width", ",", "ribbon_width", "=", "ribbon_width", ",", ")", "normalized", "=", "normalize_doc", "(", "evaluated_doc", ")", "triplestack", ".", "append", "(", "(", "indent", ",", "mode", ",", "normalized", ")", ")", "elif", "isinstance", "(", "doc", ",", "SAnnotationPush", ")", ":", "continue", "elif", "isinstance", "(", "doc", ",", "SAnnotationPop", ")", ":", "continue", "else", ":", "raise", "ValueError", "(", "(", "indent", ",", "mode", ",", "doc", ")", ")", "return", "False" ]
Lookahead until the last doc at the current indentation level. Pretty, but not as fast.
[ "Lookahead", "until", "the", "last", "doc", "at", "the", "current", "indentation", "level", ".", "Pretty", "but", "not", "as", "fast", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/layout.py#L124-L208
train
tommikaikkonen/prettyprinter
prettyprinter/color.py
set_default_style
def set_default_style(style): """Sets default global style to be used by ``prettyprinter.cpprint``. :param style: the style to set, either subclass of ``pygments.styles.Style`` or one of ``'dark'``, ``'light'`` """ global default_style if style == 'dark': style = default_dark_style elif style == 'light': style = default_light_style if not issubclass(style, Style): raise TypeError( "style must be a subclass of pygments.styles.Style or " "one of 'dark', 'light'. Got {}".format(repr(style)) ) default_style = style
python
def set_default_style(style): """Sets default global style to be used by ``prettyprinter.cpprint``. :param style: the style to set, either subclass of ``pygments.styles.Style`` or one of ``'dark'``, ``'light'`` """ global default_style if style == 'dark': style = default_dark_style elif style == 'light': style = default_light_style if not issubclass(style, Style): raise TypeError( "style must be a subclass of pygments.styles.Style or " "one of 'dark', 'light'. Got {}".format(repr(style)) ) default_style = style
[ "def", "set_default_style", "(", "style", ")", ":", "global", "default_style", "if", "style", "==", "'dark'", ":", "style", "=", "default_dark_style", "elif", "style", "==", "'light'", ":", "style", "=", "default_light_style", "if", "not", "issubclass", "(", "style", ",", "Style", ")", ":", "raise", "TypeError", "(", "\"style must be a subclass of pygments.styles.Style or \"", "\"one of 'dark', 'light'. Got {}\"", ".", "format", "(", "repr", "(", "style", ")", ")", ")", "default_style", "=", "style" ]
Sets default global style to be used by ``prettyprinter.cpprint``. :param style: the style to set, either subclass of ``pygments.styles.Style`` or one of ``'dark'``, ``'light'``
[ "Sets", "default", "global", "style", "to", "be", "used", "by", "prettyprinter", ".", "cpprint", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/color.py#L134-L151
train
tommikaikkonen/prettyprinter
prettyprinter/utils.py
intersperse
def intersperse(x, ys): """ Returns an iterable where ``x`` is inserted between each element of ``ys`` :type ys: Iterable """ it = iter(ys) try: y = next(it) except StopIteration: return yield y for y in it: yield x yield y
python
def intersperse(x, ys): """ Returns an iterable where ``x`` is inserted between each element of ``ys`` :type ys: Iterable """ it = iter(ys) try: y = next(it) except StopIteration: return yield y for y in it: yield x yield y
[ "def", "intersperse", "(", "x", ",", "ys", ")", ":", "it", "=", "iter", "(", "ys", ")", "try", ":", "y", "=", "next", "(", "it", ")", "except", "StopIteration", ":", "return", "yield", "y", "for", "y", "in", "it", ":", "yield", "x", "yield", "y" ]
Returns an iterable where ``x`` is inserted between each element of ``ys`` :type ys: Iterable
[ "Returns", "an", "iterable", "where", "x", "is", "inserted", "between", "each", "element", "of", "ys" ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/utils.py#L5-L23
train
tommikaikkonen/prettyprinter
prettyprinter/__init__.py
pprint
def pprint( object, stream=_UNSET_SENTINEL, indent=_UNSET_SENTINEL, width=_UNSET_SENTINEL, depth=_UNSET_SENTINEL, *, compact=False, ribbon_width=_UNSET_SENTINEL, max_seq_len=_UNSET_SENTINEL, sort_dict_keys=_UNSET_SENTINEL, end='\n' ): """Pretty print a Python value ``object`` to ``stream``, which defaults to ``sys.stdout``. The output will not be colored. :param indent: number of spaces to add for each level of nesting. :param stream: the output stream, defaults to ``sys.stdout`` :param width: a soft maximum allowed number of columns in the output, which the layout algorithm attempts to stay under. :param depth: maximum depth to print nested structures :param ribbon_width: a soft maximum allowed number of columns in the output, after indenting the line :param max_seq_len: a maximum sequence length that applies to subclasses of lists, sets, frozensets, tuples and dicts. A trailing comment that indicates the number of truncated elements. Setting max_seq_len to ``None`` disables truncation. :param sort_dict_keys: a ``bool`` value indicating if dict keys should be sorted in the output. Defaults to ``False``, in which case the default order is used, which is the insertion order in CPython 3.6+. """ sdocs = python_to_sdocs( object, **_merge_defaults( indent=indent, width=width, depth=depth, ribbon_width=ribbon_width, max_seq_len=max_seq_len, sort_dict_keys=sort_dict_keys, ) ) stream = ( # This is not in _default_config in case # sys.stdout changes. sys.stdout if stream is _UNSET_SENTINEL else stream ) default_render_to_stream(stream, sdocs) if end: stream.write(end)
python
def pprint( object, stream=_UNSET_SENTINEL, indent=_UNSET_SENTINEL, width=_UNSET_SENTINEL, depth=_UNSET_SENTINEL, *, compact=False, ribbon_width=_UNSET_SENTINEL, max_seq_len=_UNSET_SENTINEL, sort_dict_keys=_UNSET_SENTINEL, end='\n' ): """Pretty print a Python value ``object`` to ``stream``, which defaults to ``sys.stdout``. The output will not be colored. :param indent: number of spaces to add for each level of nesting. :param stream: the output stream, defaults to ``sys.stdout`` :param width: a soft maximum allowed number of columns in the output, which the layout algorithm attempts to stay under. :param depth: maximum depth to print nested structures :param ribbon_width: a soft maximum allowed number of columns in the output, after indenting the line :param max_seq_len: a maximum sequence length that applies to subclasses of lists, sets, frozensets, tuples and dicts. A trailing comment that indicates the number of truncated elements. Setting max_seq_len to ``None`` disables truncation. :param sort_dict_keys: a ``bool`` value indicating if dict keys should be sorted in the output. Defaults to ``False``, in which case the default order is used, which is the insertion order in CPython 3.6+. """ sdocs = python_to_sdocs( object, **_merge_defaults( indent=indent, width=width, depth=depth, ribbon_width=ribbon_width, max_seq_len=max_seq_len, sort_dict_keys=sort_dict_keys, ) ) stream = ( # This is not in _default_config in case # sys.stdout changes. sys.stdout if stream is _UNSET_SENTINEL else stream ) default_render_to_stream(stream, sdocs) if end: stream.write(end)
[ "def", "pprint", "(", "object", ",", "stream", "=", "_UNSET_SENTINEL", ",", "indent", "=", "_UNSET_SENTINEL", ",", "width", "=", "_UNSET_SENTINEL", ",", "depth", "=", "_UNSET_SENTINEL", ",", "*", ",", "compact", "=", "False", ",", "ribbon_width", "=", "_UNSET_SENTINEL", ",", "max_seq_len", "=", "_UNSET_SENTINEL", ",", "sort_dict_keys", "=", "_UNSET_SENTINEL", ",", "end", "=", "'\\n'", ")", ":", "sdocs", "=", "python_to_sdocs", "(", "object", ",", "*", "*", "_merge_defaults", "(", "indent", "=", "indent", ",", "width", "=", "width", ",", "depth", "=", "depth", ",", "ribbon_width", "=", "ribbon_width", ",", "max_seq_len", "=", "max_seq_len", ",", "sort_dict_keys", "=", "sort_dict_keys", ",", ")", ")", "stream", "=", "(", "# This is not in _default_config in case", "# sys.stdout changes.", "sys", ".", "stdout", "if", "stream", "is", "_UNSET_SENTINEL", "else", "stream", ")", "default_render_to_stream", "(", "stream", ",", "sdocs", ")", "if", "end", ":", "stream", ".", "write", "(", "end", ")" ]
Pretty print a Python value ``object`` to ``stream``, which defaults to ``sys.stdout``. The output will not be colored. :param indent: number of spaces to add for each level of nesting. :param stream: the output stream, defaults to ``sys.stdout`` :param width: a soft maximum allowed number of columns in the output, which the layout algorithm attempts to stay under. :param depth: maximum depth to print nested structures :param ribbon_width: a soft maximum allowed number of columns in the output, after indenting the line :param max_seq_len: a maximum sequence length that applies to subclasses of lists, sets, frozensets, tuples and dicts. A trailing comment that indicates the number of truncated elements. Setting max_seq_len to ``None`` disables truncation. :param sort_dict_keys: a ``bool`` value indicating if dict keys should be sorted in the output. Defaults to ``False``, in which case the default order is used, which is the insertion order in CPython 3.6+.
[ "Pretty", "print", "a", "Python", "value", "object", "to", "stream", "which", "defaults", "to", "sys", ".", "stdout", ".", "The", "output", "will", "not", "be", "colored", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/__init__.py#L142-L195
train
tommikaikkonen/prettyprinter
prettyprinter/__init__.py
cpprint
def cpprint( object, stream=_UNSET_SENTINEL, indent=_UNSET_SENTINEL, width=_UNSET_SENTINEL, depth=_UNSET_SENTINEL, *, compact=False, ribbon_width=_UNSET_SENTINEL, max_seq_len=_UNSET_SENTINEL, sort_dict_keys=_UNSET_SENTINEL, style=None, end='\n' ): """Pretty print a Python value ``object`` to ``stream``, which defaults to sys.stdout. The output will be colored and syntax highlighted. :param indent: number of spaces to add for each level of nesting. :param stream: the output stream, defaults to sys.stdout :param width: a soft maximum allowed number of columns in the output, which the layout algorithm attempts to stay under. :param depth: maximum depth to print nested structures :param ribbon_width: a soft maximum allowed number of columns in the output, after indenting the line :param max_seq_len: a maximum sequence length that applies to subclasses of lists, sets, frozensets, tuples and dicts. A trailing comment that indicates the number of truncated elements. Setting max_seq_len to ``None`` disables truncation. :param sort_dict_keys: a ``bool`` value indicating if dict keys should be sorted in the output. Defaults to ``False``, in which case the default order is used, which is the insertion order in CPython 3.6+. :param style: one of ``'light'``, ``'dark'`` or a subclass of ``pygments.styles.Style``. If omitted, will use the default style. If the default style is not changed by the user with :func:`~prettyprinter.set_default_style`, the default is ``'dark'``. """ sdocs = python_to_sdocs( object, **_merge_defaults( indent=indent, width=width, depth=depth, ribbon_width=ribbon_width, max_seq_len=max_seq_len, sort_dict_keys=sort_dict_keys, ) ) stream = ( # This is not in _default_config in case # sys.stdout changes. sys.stdout if stream is _UNSET_SENTINEL else stream ) colored_render_to_stream(stream, sdocs, style=style) if end: stream.write(end)
python
def cpprint( object, stream=_UNSET_SENTINEL, indent=_UNSET_SENTINEL, width=_UNSET_SENTINEL, depth=_UNSET_SENTINEL, *, compact=False, ribbon_width=_UNSET_SENTINEL, max_seq_len=_UNSET_SENTINEL, sort_dict_keys=_UNSET_SENTINEL, style=None, end='\n' ): """Pretty print a Python value ``object`` to ``stream``, which defaults to sys.stdout. The output will be colored and syntax highlighted. :param indent: number of spaces to add for each level of nesting. :param stream: the output stream, defaults to sys.stdout :param width: a soft maximum allowed number of columns in the output, which the layout algorithm attempts to stay under. :param depth: maximum depth to print nested structures :param ribbon_width: a soft maximum allowed number of columns in the output, after indenting the line :param max_seq_len: a maximum sequence length that applies to subclasses of lists, sets, frozensets, tuples and dicts. A trailing comment that indicates the number of truncated elements. Setting max_seq_len to ``None`` disables truncation. :param sort_dict_keys: a ``bool`` value indicating if dict keys should be sorted in the output. Defaults to ``False``, in which case the default order is used, which is the insertion order in CPython 3.6+. :param style: one of ``'light'``, ``'dark'`` or a subclass of ``pygments.styles.Style``. If omitted, will use the default style. If the default style is not changed by the user with :func:`~prettyprinter.set_default_style`, the default is ``'dark'``. """ sdocs = python_to_sdocs( object, **_merge_defaults( indent=indent, width=width, depth=depth, ribbon_width=ribbon_width, max_seq_len=max_seq_len, sort_dict_keys=sort_dict_keys, ) ) stream = ( # This is not in _default_config in case # sys.stdout changes. sys.stdout if stream is _UNSET_SENTINEL else stream ) colored_render_to_stream(stream, sdocs, style=style) if end: stream.write(end)
[ "def", "cpprint", "(", "object", ",", "stream", "=", "_UNSET_SENTINEL", ",", "indent", "=", "_UNSET_SENTINEL", ",", "width", "=", "_UNSET_SENTINEL", ",", "depth", "=", "_UNSET_SENTINEL", ",", "*", ",", "compact", "=", "False", ",", "ribbon_width", "=", "_UNSET_SENTINEL", ",", "max_seq_len", "=", "_UNSET_SENTINEL", ",", "sort_dict_keys", "=", "_UNSET_SENTINEL", ",", "style", "=", "None", ",", "end", "=", "'\\n'", ")", ":", "sdocs", "=", "python_to_sdocs", "(", "object", ",", "*", "*", "_merge_defaults", "(", "indent", "=", "indent", ",", "width", "=", "width", ",", "depth", "=", "depth", ",", "ribbon_width", "=", "ribbon_width", ",", "max_seq_len", "=", "max_seq_len", ",", "sort_dict_keys", "=", "sort_dict_keys", ",", ")", ")", "stream", "=", "(", "# This is not in _default_config in case", "# sys.stdout changes.", "sys", ".", "stdout", "if", "stream", "is", "_UNSET_SENTINEL", "else", "stream", ")", "colored_render_to_stream", "(", "stream", ",", "sdocs", ",", "style", "=", "style", ")", "if", "end", ":", "stream", ".", "write", "(", "end", ")" ]
Pretty print a Python value ``object`` to ``stream``, which defaults to sys.stdout. The output will be colored and syntax highlighted. :param indent: number of spaces to add for each level of nesting. :param stream: the output stream, defaults to sys.stdout :param width: a soft maximum allowed number of columns in the output, which the layout algorithm attempts to stay under. :param depth: maximum depth to print nested structures :param ribbon_width: a soft maximum allowed number of columns in the output, after indenting the line :param max_seq_len: a maximum sequence length that applies to subclasses of lists, sets, frozensets, tuples and dicts. A trailing comment that indicates the number of truncated elements. Setting max_seq_len to ``None`` disables truncation. :param sort_dict_keys: a ``bool`` value indicating if dict keys should be sorted in the output. Defaults to ``False``, in which case the default order is used, which is the insertion order in CPython 3.6+. :param style: one of ``'light'``, ``'dark'`` or a subclass of ``pygments.styles.Style``. If omitted, will use the default style. If the default style is not changed by the user with :func:`~prettyprinter.set_default_style`, the default is ``'dark'``.
[ "Pretty", "print", "a", "Python", "value", "object", "to", "stream", "which", "defaults", "to", "sys", ".", "stdout", ".", "The", "output", "will", "be", "colored", "and", "syntax", "highlighted", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/__init__.py#L198-L257
train
tommikaikkonen/prettyprinter
prettyprinter/__init__.py
install_extras
def install_extras( include=ALL_EXTRAS, *, exclude=EMPTY_SET, raise_on_error=False, warn_on_error=True ): """Installs extras. Installing an extra means registering pretty printers for objects from third party libraries and/or enabling integrations with other python programs. - ``'attrs'`` - automatically pretty prints classes created using the ``attrs`` package. - ``'dataclasses'`` - automatically pretty prints classes created using the ``dataclasses`` module. - ``'django'`` - automatically pretty prints Model and QuerySet subclasses defined in your Django apps. - ``numpy`` - automatically pretty prints numpy scalars with explicit types, and, for numpy>=1.14, numpy arrays. - ``'requests'`` - automatically pretty prints Requests, Responses, Sessions, etc. - ``'ipython'`` - makes prettyprinter the default printer in the IPython shell. - ``'python'`` - makes prettyprinter the default printer in the default Python shell. - ``'ipython_repr_pretty'`` - automatically prints objects that define a ``_repr_pretty_`` method to integrate with `IPython.lib.pretty <http://ipython.readthedocs.io/en/stable/api/generated/IPython.lib.pretty.html#extending>`_. :param include: an iterable of strs representing the extras to include. All extras are included by default. :param exclude: an iterable of strs representing the extras to exclude. """ # noqa include = set(include) exclude = set(exclude) unexisting_extras = (include | exclude) - ALL_EXTRAS if unexisting_extras: raise ValueError( "The following extras don't exist: {}".format( ', '.join(unexisting_extras) ) ) extras_to_install = (ALL_EXTRAS & include) - exclude for extra in extras_to_install: module_name = 'prettyprinter.extras.' + extra try: extra_module = import_module(module_name) except ImportError as e: if raise_on_error: raise e if warn_on_error: warnings.warn( "Failed to import '{0}' PrettyPrinter extra. " "If you don't need it, call install_extras with " "exclude=['{0}']".format(extra) ) else: try: extra_module.install() except Exception as exc: if raise_on_error: raise exc elif warn_on_error: warnings.warn( "Failed to install '{0}' PrettyPrinter extra. " "If you don't need it, call install_extras with " "exclude=['{0}']".format(extra) )
python
def install_extras( include=ALL_EXTRAS, *, exclude=EMPTY_SET, raise_on_error=False, warn_on_error=True ): """Installs extras. Installing an extra means registering pretty printers for objects from third party libraries and/or enabling integrations with other python programs. - ``'attrs'`` - automatically pretty prints classes created using the ``attrs`` package. - ``'dataclasses'`` - automatically pretty prints classes created using the ``dataclasses`` module. - ``'django'`` - automatically pretty prints Model and QuerySet subclasses defined in your Django apps. - ``numpy`` - automatically pretty prints numpy scalars with explicit types, and, for numpy>=1.14, numpy arrays. - ``'requests'`` - automatically pretty prints Requests, Responses, Sessions, etc. - ``'ipython'`` - makes prettyprinter the default printer in the IPython shell. - ``'python'`` - makes prettyprinter the default printer in the default Python shell. - ``'ipython_repr_pretty'`` - automatically prints objects that define a ``_repr_pretty_`` method to integrate with `IPython.lib.pretty <http://ipython.readthedocs.io/en/stable/api/generated/IPython.lib.pretty.html#extending>`_. :param include: an iterable of strs representing the extras to include. All extras are included by default. :param exclude: an iterable of strs representing the extras to exclude. """ # noqa include = set(include) exclude = set(exclude) unexisting_extras = (include | exclude) - ALL_EXTRAS if unexisting_extras: raise ValueError( "The following extras don't exist: {}".format( ', '.join(unexisting_extras) ) ) extras_to_install = (ALL_EXTRAS & include) - exclude for extra in extras_to_install: module_name = 'prettyprinter.extras.' + extra try: extra_module = import_module(module_name) except ImportError as e: if raise_on_error: raise e if warn_on_error: warnings.warn( "Failed to import '{0}' PrettyPrinter extra. " "If you don't need it, call install_extras with " "exclude=['{0}']".format(extra) ) else: try: extra_module.install() except Exception as exc: if raise_on_error: raise exc elif warn_on_error: warnings.warn( "Failed to install '{0}' PrettyPrinter extra. " "If you don't need it, call install_extras with " "exclude=['{0}']".format(extra) )
[ "def", "install_extras", "(", "include", "=", "ALL_EXTRAS", ",", "*", ",", "exclude", "=", "EMPTY_SET", ",", "raise_on_error", "=", "False", ",", "warn_on_error", "=", "True", ")", ":", "# noqa", "include", "=", "set", "(", "include", ")", "exclude", "=", "set", "(", "exclude", ")", "unexisting_extras", "=", "(", "include", "|", "exclude", ")", "-", "ALL_EXTRAS", "if", "unexisting_extras", ":", "raise", "ValueError", "(", "\"The following extras don't exist: {}\"", ".", "format", "(", "', '", ".", "join", "(", "unexisting_extras", ")", ")", ")", "extras_to_install", "=", "(", "ALL_EXTRAS", "&", "include", ")", "-", "exclude", "for", "extra", "in", "extras_to_install", ":", "module_name", "=", "'prettyprinter.extras.'", "+", "extra", "try", ":", "extra_module", "=", "import_module", "(", "module_name", ")", "except", "ImportError", "as", "e", ":", "if", "raise_on_error", ":", "raise", "e", "if", "warn_on_error", ":", "warnings", ".", "warn", "(", "\"Failed to import '{0}' PrettyPrinter extra. \"", "\"If you don't need it, call install_extras with \"", "\"exclude=['{0}']\"", ".", "format", "(", "extra", ")", ")", "else", ":", "try", ":", "extra_module", ".", "install", "(", ")", "except", "Exception", "as", "exc", ":", "if", "raise_on_error", ":", "raise", "exc", "elif", "warn_on_error", ":", "warnings", ".", "warn", "(", "\"Failed to install '{0}' PrettyPrinter extra. \"", "\"If you don't need it, call install_extras with \"", "\"exclude=['{0}']\"", ".", "format", "(", "extra", ")", ")" ]
Installs extras. Installing an extra means registering pretty printers for objects from third party libraries and/or enabling integrations with other python programs. - ``'attrs'`` - automatically pretty prints classes created using the ``attrs`` package. - ``'dataclasses'`` - automatically pretty prints classes created using the ``dataclasses`` module. - ``'django'`` - automatically pretty prints Model and QuerySet subclasses defined in your Django apps. - ``numpy`` - automatically pretty prints numpy scalars with explicit types, and, for numpy>=1.14, numpy arrays. - ``'requests'`` - automatically pretty prints Requests, Responses, Sessions, etc. - ``'ipython'`` - makes prettyprinter the default printer in the IPython shell. - ``'python'`` - makes prettyprinter the default printer in the default Python shell. - ``'ipython_repr_pretty'`` - automatically prints objects that define a ``_repr_pretty_`` method to integrate with `IPython.lib.pretty <http://ipython.readthedocs.io/en/stable/api/generated/IPython.lib.pretty.html#extending>`_. :param include: an iterable of strs representing the extras to include. All extras are included by default. :param exclude: an iterable of strs representing the extras to exclude.
[ "Installs", "extras", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/__init__.py#L273-L341
train
tommikaikkonen/prettyprinter
prettyprinter/__init__.py
set_default_config
def set_default_config( *, style=_UNSET_SENTINEL, max_seq_len=_UNSET_SENTINEL, width=_UNSET_SENTINEL, ribbon_width=_UNSET_SENTINEL, depth=_UNSET_SENTINEL, sort_dict_keys=_UNSET_SENTINEL ): """ Sets the default configuration values used when calling `pprint`, `cpprint`, or `pformat`, if those values weren't explicitly provided. Only overrides the values provided in the keyword arguments. """ global _default_config if style is not _UNSET_SENTINEL: set_default_style(style) new_defaults = {**_default_config} if max_seq_len is not _UNSET_SENTINEL: new_defaults['max_seq_len'] = max_seq_len if width is not _UNSET_SENTINEL: new_defaults['width'] = width if ribbon_width is not _UNSET_SENTINEL: new_defaults['ribbon_width'] = ribbon_width if depth is not _UNSET_SENTINEL: new_defaults['depth'] = depth if sort_dict_keys is not _UNSET_SENTINEL: new_defaults['sort_dict_keys'] = sort_dict_keys _default_config = new_defaults return new_defaults
python
def set_default_config( *, style=_UNSET_SENTINEL, max_seq_len=_UNSET_SENTINEL, width=_UNSET_SENTINEL, ribbon_width=_UNSET_SENTINEL, depth=_UNSET_SENTINEL, sort_dict_keys=_UNSET_SENTINEL ): """ Sets the default configuration values used when calling `pprint`, `cpprint`, or `pformat`, if those values weren't explicitly provided. Only overrides the values provided in the keyword arguments. """ global _default_config if style is not _UNSET_SENTINEL: set_default_style(style) new_defaults = {**_default_config} if max_seq_len is not _UNSET_SENTINEL: new_defaults['max_seq_len'] = max_seq_len if width is not _UNSET_SENTINEL: new_defaults['width'] = width if ribbon_width is not _UNSET_SENTINEL: new_defaults['ribbon_width'] = ribbon_width if depth is not _UNSET_SENTINEL: new_defaults['depth'] = depth if sort_dict_keys is not _UNSET_SENTINEL: new_defaults['sort_dict_keys'] = sort_dict_keys _default_config = new_defaults return new_defaults
[ "def", "set_default_config", "(", "*", ",", "style", "=", "_UNSET_SENTINEL", ",", "max_seq_len", "=", "_UNSET_SENTINEL", ",", "width", "=", "_UNSET_SENTINEL", ",", "ribbon_width", "=", "_UNSET_SENTINEL", ",", "depth", "=", "_UNSET_SENTINEL", ",", "sort_dict_keys", "=", "_UNSET_SENTINEL", ")", ":", "global", "_default_config", "if", "style", "is", "not", "_UNSET_SENTINEL", ":", "set_default_style", "(", "style", ")", "new_defaults", "=", "{", "*", "*", "_default_config", "}", "if", "max_seq_len", "is", "not", "_UNSET_SENTINEL", ":", "new_defaults", "[", "'max_seq_len'", "]", "=", "max_seq_len", "if", "width", "is", "not", "_UNSET_SENTINEL", ":", "new_defaults", "[", "'width'", "]", "=", "width", "if", "ribbon_width", "is", "not", "_UNSET_SENTINEL", ":", "new_defaults", "[", "'ribbon_width'", "]", "=", "ribbon_width", "if", "depth", "is", "not", "_UNSET_SENTINEL", ":", "new_defaults", "[", "'depth'", "]", "=", "depth", "if", "sort_dict_keys", "is", "not", "_UNSET_SENTINEL", ":", "new_defaults", "[", "'sort_dict_keys'", "]", "=", "sort_dict_keys", "_default_config", "=", "new_defaults", "return", "new_defaults" ]
Sets the default configuration values used when calling `pprint`, `cpprint`, or `pformat`, if those values weren't explicitly provided. Only overrides the values provided in the keyword arguments.
[ "Sets", "the", "default", "configuration", "values", "used", "when", "calling", "pprint", "cpprint", "or", "pformat", "if", "those", "values", "weren", "t", "explicitly", "provided", ".", "Only", "overrides", "the", "values", "provided", "in", "the", "keyword", "arguments", "." ]
6b405884b8085eaf867e81c02b7b662b463ac5a0
https://github.com/tommikaikkonen/prettyprinter/blob/6b405884b8085eaf867e81c02b7b662b463ac5a0/prettyprinter/__init__.py#L344-L382
train
bcdev/jpy
setup.py
package_maven
def package_maven(): """ Run maven package lifecycle """ if not os.getenv('JAVA_HOME'): # make sure Maven uses the same JDK which we have used to compile # and link the C-code os.environ['JAVA_HOME'] = jdk_home_dir mvn_goal = 'package' log.info("Executing Maven goal '" + mvn_goal + "'") code = subprocess.call(['mvn', 'clean', mvn_goal, '-DskipTests'], shell=platform.system() == 'Windows') if code: exit(code) # # Copy JAR results to lib/*.jar # if not os.path.exists(lib_dir): os.mkdir(lib_dir) target_dir = os.path.join(base_dir, 'target') jar_files = glob.glob(os.path.join(target_dir, '*.jar')) jar_files = [f for f in jar_files if not (f.endswith('-sources.jar') or f.endswith('-javadoc.jar'))] if not jar_files: log.error('Maven did not generate any JAR artifacts') exit(1) for jar_file in jar_files: build_dir = _build_dir() log.info("Copying " + jar_file + " -> " + build_dir + "") shutil.copy(jar_file, build_dir)
python
def package_maven(): """ Run maven package lifecycle """ if not os.getenv('JAVA_HOME'): # make sure Maven uses the same JDK which we have used to compile # and link the C-code os.environ['JAVA_HOME'] = jdk_home_dir mvn_goal = 'package' log.info("Executing Maven goal '" + mvn_goal + "'") code = subprocess.call(['mvn', 'clean', mvn_goal, '-DskipTests'], shell=platform.system() == 'Windows') if code: exit(code) # # Copy JAR results to lib/*.jar # if not os.path.exists(lib_dir): os.mkdir(lib_dir) target_dir = os.path.join(base_dir, 'target') jar_files = glob.glob(os.path.join(target_dir, '*.jar')) jar_files = [f for f in jar_files if not (f.endswith('-sources.jar') or f.endswith('-javadoc.jar'))] if not jar_files: log.error('Maven did not generate any JAR artifacts') exit(1) for jar_file in jar_files: build_dir = _build_dir() log.info("Copying " + jar_file + " -> " + build_dir + "") shutil.copy(jar_file, build_dir)
[ "def", "package_maven", "(", ")", ":", "if", "not", "os", ".", "getenv", "(", "'JAVA_HOME'", ")", ":", "# make sure Maven uses the same JDK which we have used to compile", "# and link the C-code", "os", ".", "environ", "[", "'JAVA_HOME'", "]", "=", "jdk_home_dir", "mvn_goal", "=", "'package'", "log", ".", "info", "(", "\"Executing Maven goal '\"", "+", "mvn_goal", "+", "\"'\"", ")", "code", "=", "subprocess", ".", "call", "(", "[", "'mvn'", ",", "'clean'", ",", "mvn_goal", ",", "'-DskipTests'", "]", ",", "shell", "=", "platform", ".", "system", "(", ")", "==", "'Windows'", ")", "if", "code", ":", "exit", "(", "code", ")", "#", "# Copy JAR results to lib/*.jar", "#", "if", "not", "os", ".", "path", ".", "exists", "(", "lib_dir", ")", ":", "os", ".", "mkdir", "(", "lib_dir", ")", "target_dir", "=", "os", ".", "path", ".", "join", "(", "base_dir", ",", "'target'", ")", "jar_files", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "target_dir", ",", "'*.jar'", ")", ")", "jar_files", "=", "[", "f", "for", "f", "in", "jar_files", "if", "not", "(", "f", ".", "endswith", "(", "'-sources.jar'", ")", "or", "f", ".", "endswith", "(", "'-javadoc.jar'", ")", ")", "]", "if", "not", "jar_files", ":", "log", ".", "error", "(", "'Maven did not generate any JAR artifacts'", ")", "exit", "(", "1", ")", "for", "jar_file", "in", "jar_files", ":", "build_dir", "=", "_build_dir", "(", ")", "log", ".", "info", "(", "\"Copying \"", "+", "jar_file", "+", "\" -> \"", "+", "build_dir", "+", "\"\"", ")", "shutil", ".", "copy", "(", "jar_file", ",", "build_dir", ")" ]
Run maven package lifecycle
[ "Run", "maven", "package", "lifecycle" ]
ae813df536807fb839650a0b359aa90f8344dd79
https://github.com/bcdev/jpy/blob/ae813df536807fb839650a0b359aa90f8344dd79/setup.py#L153-L184
train
bcdev/jpy
setup.py
_write_jpy_config
def _write_jpy_config(target_dir=None, install_dir=None): """ Write out a well-formed jpyconfig.properties file for easier Java integration in a given location. """ if not target_dir: target_dir = _build_dir() args = [sys.executable, os.path.join(target_dir, 'jpyutil.py'), '--jvm_dll', jvm_dll_file, '--java_home', jdk_home_dir, '--log_level', 'DEBUG', '--req_java', '--req_py'] if install_dir: args.append('--install_dir') args.append(install_dir) log.info('Writing jpy configuration to %s using install_dir %s' % (target_dir, install_dir)) return subprocess.call(args)
python
def _write_jpy_config(target_dir=None, install_dir=None): """ Write out a well-formed jpyconfig.properties file for easier Java integration in a given location. """ if not target_dir: target_dir = _build_dir() args = [sys.executable, os.path.join(target_dir, 'jpyutil.py'), '--jvm_dll', jvm_dll_file, '--java_home', jdk_home_dir, '--log_level', 'DEBUG', '--req_java', '--req_py'] if install_dir: args.append('--install_dir') args.append(install_dir) log.info('Writing jpy configuration to %s using install_dir %s' % (target_dir, install_dir)) return subprocess.call(args)
[ "def", "_write_jpy_config", "(", "target_dir", "=", "None", ",", "install_dir", "=", "None", ")", ":", "if", "not", "target_dir", ":", "target_dir", "=", "_build_dir", "(", ")", "args", "=", "[", "sys", ".", "executable", ",", "os", ".", "path", ".", "join", "(", "target_dir", ",", "'jpyutil.py'", ")", ",", "'--jvm_dll'", ",", "jvm_dll_file", ",", "'--java_home'", ",", "jdk_home_dir", ",", "'--log_level'", ",", "'DEBUG'", ",", "'--req_java'", ",", "'--req_py'", "]", "if", "install_dir", ":", "args", ".", "append", "(", "'--install_dir'", ")", "args", ".", "append", "(", "install_dir", ")", "log", ".", "info", "(", "'Writing jpy configuration to %s using install_dir %s'", "%", "(", "target_dir", ",", "install_dir", ")", ")", "return", "subprocess", ".", "call", "(", "args", ")" ]
Write out a well-formed jpyconfig.properties file for easier Java integration in a given location.
[ "Write", "out", "a", "well", "-", "formed", "jpyconfig", ".", "properties", "file", "for", "easier", "Java", "integration", "in", "a", "given", "location", "." ]
ae813df536807fb839650a0b359aa90f8344dd79
https://github.com/bcdev/jpy/blob/ae813df536807fb839650a0b359aa90f8344dd79/setup.py#L216-L236
train
bcdev/jpy
jpyutil.py
_get_module_path
def _get_module_path(name, fail=False, install_path=None): """ Find the path to the jpy jni modules. """ import imp module = imp.find_module(name) if not module and fail: raise RuntimeError("can't find module '" + name + "'") path = module[1] if not path and fail: raise RuntimeError("module '" + name + "' is missing a file path") if install_path: return os.path.join(install_path, os.path.split(path)[1]) return path
python
def _get_module_path(name, fail=False, install_path=None): """ Find the path to the jpy jni modules. """ import imp module = imp.find_module(name) if not module and fail: raise RuntimeError("can't find module '" + name + "'") path = module[1] if not path and fail: raise RuntimeError("module '" + name + "' is missing a file path") if install_path: return os.path.join(install_path, os.path.split(path)[1]) return path
[ "def", "_get_module_path", "(", "name", ",", "fail", "=", "False", ",", "install_path", "=", "None", ")", ":", "import", "imp", "module", "=", "imp", ".", "find_module", "(", "name", ")", "if", "not", "module", "and", "fail", ":", "raise", "RuntimeError", "(", "\"can't find module '\"", "+", "name", "+", "\"'\"", ")", "path", "=", "module", "[", "1", "]", "if", "not", "path", "and", "fail", ":", "raise", "RuntimeError", "(", "\"module '\"", "+", "name", "+", "\"' is missing a file path\"", ")", "if", "install_path", ":", "return", "os", ".", "path", ".", "join", "(", "install_path", ",", "os", ".", "path", ".", "split", "(", "path", ")", "[", "1", "]", ")", "return", "path" ]
Find the path to the jpy jni modules.
[ "Find", "the", "path", "to", "the", "jpy", "jni", "modules", "." ]
ae813df536807fb839650a0b359aa90f8344dd79
https://github.com/bcdev/jpy/blob/ae813df536807fb839650a0b359aa90f8344dd79/jpyutil.py#L99-L113
train
bcdev/jpy
jpyutil.py
init_jvm
def init_jvm(java_home=None, jvm_dll=None, jvm_maxmem=None, jvm_classpath=None, jvm_properties=None, jvm_options=None, config_file=None, config=None): """ Creates a configured Java virtual machine which will be used by jpy. :param java_home: The Java JRE or JDK home directory used to search JVM shared library, if 'jvm_dll' is omitted. :param jvm_dll: The JVM shared library file. My be inferred from 'java_home'. :param jvm_maxmem: The JVM maximum heap space, e.g. '400M', '8G'. Refer to the java executable '-Xmx' option. :param jvm_classpath: The JVM search paths for Java class files. Separated by colons (Unix) or semicolons (Windows). Refer to the java executable '-cp' option. :param jvm_properties: A dictionary of key -> value pairs passed to the JVM as Java system properties. Refer to the java executable '-D' option. :param jvm_options: A list of extra options for the JVM. Refer to the java executable options. :param config_file: Extra configuration file (e.g. 'jpyconfig.py') to be loaded if 'config' parameter is omitted. :param config: An optional default configuration object providing default attributes for the 'jvm_maxmem', 'jvm_classpath', 'jvm_properties', 'jvm_options' parameters. :return: a tuple (cdll, actual_jvm_options) on success, None otherwise. """ if not config: config = _get_python_api_config(config_file=config_file) cdll = preload_jvm_dll(jvm_dll_file=jvm_dll, java_home_dir=java_home, config_file=config_file, config=config, fail=False) import jpy if not jpy.has_jvm(): jvm_options = get_jvm_options(jvm_maxmem=jvm_maxmem, jvm_classpath=jvm_classpath, jvm_properties=jvm_properties, jvm_options=jvm_options, config=config) logger.debug('Creating JVM with options %s' % repr(jvm_options)) jpy.create_jvm(options=jvm_options) else: jvm_options = None # print('jvm_dll =', jvm_dll) # print('jvm_options =', jvm_options) return cdll, jvm_options
python
def init_jvm(java_home=None, jvm_dll=None, jvm_maxmem=None, jvm_classpath=None, jvm_properties=None, jvm_options=None, config_file=None, config=None): """ Creates a configured Java virtual machine which will be used by jpy. :param java_home: The Java JRE or JDK home directory used to search JVM shared library, if 'jvm_dll' is omitted. :param jvm_dll: The JVM shared library file. My be inferred from 'java_home'. :param jvm_maxmem: The JVM maximum heap space, e.g. '400M', '8G'. Refer to the java executable '-Xmx' option. :param jvm_classpath: The JVM search paths for Java class files. Separated by colons (Unix) or semicolons (Windows). Refer to the java executable '-cp' option. :param jvm_properties: A dictionary of key -> value pairs passed to the JVM as Java system properties. Refer to the java executable '-D' option. :param jvm_options: A list of extra options for the JVM. Refer to the java executable options. :param config_file: Extra configuration file (e.g. 'jpyconfig.py') to be loaded if 'config' parameter is omitted. :param config: An optional default configuration object providing default attributes for the 'jvm_maxmem', 'jvm_classpath', 'jvm_properties', 'jvm_options' parameters. :return: a tuple (cdll, actual_jvm_options) on success, None otherwise. """ if not config: config = _get_python_api_config(config_file=config_file) cdll = preload_jvm_dll(jvm_dll_file=jvm_dll, java_home_dir=java_home, config_file=config_file, config=config, fail=False) import jpy if not jpy.has_jvm(): jvm_options = get_jvm_options(jvm_maxmem=jvm_maxmem, jvm_classpath=jvm_classpath, jvm_properties=jvm_properties, jvm_options=jvm_options, config=config) logger.debug('Creating JVM with options %s' % repr(jvm_options)) jpy.create_jvm(options=jvm_options) else: jvm_options = None # print('jvm_dll =', jvm_dll) # print('jvm_options =', jvm_options) return cdll, jvm_options
[ "def", "init_jvm", "(", "java_home", "=", "None", ",", "jvm_dll", "=", "None", ",", "jvm_maxmem", "=", "None", ",", "jvm_classpath", "=", "None", ",", "jvm_properties", "=", "None", ",", "jvm_options", "=", "None", ",", "config_file", "=", "None", ",", "config", "=", "None", ")", ":", "if", "not", "config", ":", "config", "=", "_get_python_api_config", "(", "config_file", "=", "config_file", ")", "cdll", "=", "preload_jvm_dll", "(", "jvm_dll_file", "=", "jvm_dll", ",", "java_home_dir", "=", "java_home", ",", "config_file", "=", "config_file", ",", "config", "=", "config", ",", "fail", "=", "False", ")", "import", "jpy", "if", "not", "jpy", ".", "has_jvm", "(", ")", ":", "jvm_options", "=", "get_jvm_options", "(", "jvm_maxmem", "=", "jvm_maxmem", ",", "jvm_classpath", "=", "jvm_classpath", ",", "jvm_properties", "=", "jvm_properties", ",", "jvm_options", "=", "jvm_options", ",", "config", "=", "config", ")", "logger", ".", "debug", "(", "'Creating JVM with options %s'", "%", "repr", "(", "jvm_options", ")", ")", "jpy", ".", "create_jvm", "(", "options", "=", "jvm_options", ")", "else", ":", "jvm_options", "=", "None", "# print('jvm_dll =', jvm_dll)", "# print('jvm_options =', jvm_options)", "return", "cdll", ",", "jvm_options" ]
Creates a configured Java virtual machine which will be used by jpy. :param java_home: The Java JRE or JDK home directory used to search JVM shared library, if 'jvm_dll' is omitted. :param jvm_dll: The JVM shared library file. My be inferred from 'java_home'. :param jvm_maxmem: The JVM maximum heap space, e.g. '400M', '8G'. Refer to the java executable '-Xmx' option. :param jvm_classpath: The JVM search paths for Java class files. Separated by colons (Unix) or semicolons (Windows). Refer to the java executable '-cp' option. :param jvm_properties: A dictionary of key -> value pairs passed to the JVM as Java system properties. Refer to the java executable '-D' option. :param jvm_options: A list of extra options for the JVM. Refer to the java executable options. :param config_file: Extra configuration file (e.g. 'jpyconfig.py') to be loaded if 'config' parameter is omitted. :param config: An optional default configuration object providing default attributes for the 'jvm_maxmem', 'jvm_classpath', 'jvm_properties', 'jvm_options' parameters. :return: a tuple (cdll, actual_jvm_options) on success, None otherwise.
[ "Creates", "a", "configured", "Java", "virtual", "machine", "which", "will", "be", "used", "by", "jpy", "." ]
ae813df536807fb839650a0b359aa90f8344dd79
https://github.com/bcdev/jpy/blob/ae813df536807fb839650a0b359aa90f8344dd79/jpyutil.py#L411-L459
train
KeepSafe/android-resource-remover
android_clean_app.py
run_lint_command
def run_lint_command(): """ Run lint command in the shell and save results to lint-result.xml """ lint, app_dir, lint_result, ignore_layouts = parse_args() if not lint_result: if not distutils.spawn.find_executable(lint): raise Exception( '`%s` executable could not be found and path to lint result not specified. See --help' % lint) lint_result = os.path.join(app_dir, 'lint-result.xml') call_result = subprocess.call([lint, app_dir, '--xml', lint_result]) if call_result > 0: print('Running the command failed with result %s. Try running it from the console.' ' Arguments for subprocess.call: %s' % (call_result, [lint, app_dir, '--xml', lint_result])) else: if not os.path.isabs(lint_result): lint_result = os.path.join(app_dir, lint_result) lint_result = os.path.abspath(lint_result) return lint_result, app_dir, ignore_layouts
python
def run_lint_command(): """ Run lint command in the shell and save results to lint-result.xml """ lint, app_dir, lint_result, ignore_layouts = parse_args() if not lint_result: if not distutils.spawn.find_executable(lint): raise Exception( '`%s` executable could not be found and path to lint result not specified. See --help' % lint) lint_result = os.path.join(app_dir, 'lint-result.xml') call_result = subprocess.call([lint, app_dir, '--xml', lint_result]) if call_result > 0: print('Running the command failed with result %s. Try running it from the console.' ' Arguments for subprocess.call: %s' % (call_result, [lint, app_dir, '--xml', lint_result])) else: if not os.path.isabs(lint_result): lint_result = os.path.join(app_dir, lint_result) lint_result = os.path.abspath(lint_result) return lint_result, app_dir, ignore_layouts
[ "def", "run_lint_command", "(", ")", ":", "lint", ",", "app_dir", ",", "lint_result", ",", "ignore_layouts", "=", "parse_args", "(", ")", "if", "not", "lint_result", ":", "if", "not", "distutils", ".", "spawn", ".", "find_executable", "(", "lint", ")", ":", "raise", "Exception", "(", "'`%s` executable could not be found and path to lint result not specified. See --help'", "%", "lint", ")", "lint_result", "=", "os", ".", "path", ".", "join", "(", "app_dir", ",", "'lint-result.xml'", ")", "call_result", "=", "subprocess", ".", "call", "(", "[", "lint", ",", "app_dir", ",", "'--xml'", ",", "lint_result", "]", ")", "if", "call_result", ">", "0", ":", "print", "(", "'Running the command failed with result %s. Try running it from the console.'", "' Arguments for subprocess.call: %s'", "%", "(", "call_result", ",", "[", "lint", ",", "app_dir", ",", "'--xml'", ",", "lint_result", "]", ")", ")", "else", ":", "if", "not", "os", ".", "path", ".", "isabs", "(", "lint_result", ")", ":", "lint_result", "=", "os", ".", "path", ".", "join", "(", "app_dir", ",", "lint_result", ")", "lint_result", "=", "os", ".", "path", ".", "abspath", "(", "lint_result", ")", "return", "lint_result", ",", "app_dir", ",", "ignore_layouts" ]
Run lint command in the shell and save results to lint-result.xml
[ "Run", "lint", "command", "in", "the", "shell", "and", "save", "results", "to", "lint", "-", "result", ".", "xml" ]
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L87-L105
train
KeepSafe/android-resource-remover
android_clean_app.py
parse_lint_result
def parse_lint_result(lint_result_path, manifest_path): """ Parse lint-result.xml and create Issue for every problem found except unused strings referenced in AndroidManifest """ unused_string_pattern = re.compile('The resource `R\.string\.([^`]+)` appears to be unused') mainfest_string_refs = get_manifest_string_refs(manifest_path) root = etree.parse(lint_result_path).getroot() issues = [] for issue_xml in root.findall('.//issue[@id="UnusedResources"]'): message = issue_xml.get('message') unused_string = re.match(unused_string_pattern, issue_xml.get('message')) has_string_in_manifest = unused_string and unused_string.group(1) in mainfest_string_refs if not has_string_in_manifest: issues.extend(_get_issues_from_location(UnusedResourceIssue, issue_xml.findall('location'), message)) for issue_xml in root.findall('.//issue[@id="ExtraTranslation"]'): message = issue_xml.get('message') if re.findall(ExtraTranslationIssue.pattern, message): issues.extend(_get_issues_from_location(ExtraTranslationIssue, issue_xml.findall('location'), message)) return issues
python
def parse_lint_result(lint_result_path, manifest_path): """ Parse lint-result.xml and create Issue for every problem found except unused strings referenced in AndroidManifest """ unused_string_pattern = re.compile('The resource `R\.string\.([^`]+)` appears to be unused') mainfest_string_refs = get_manifest_string_refs(manifest_path) root = etree.parse(lint_result_path).getroot() issues = [] for issue_xml in root.findall('.//issue[@id="UnusedResources"]'): message = issue_xml.get('message') unused_string = re.match(unused_string_pattern, issue_xml.get('message')) has_string_in_manifest = unused_string and unused_string.group(1) in mainfest_string_refs if not has_string_in_manifest: issues.extend(_get_issues_from_location(UnusedResourceIssue, issue_xml.findall('location'), message)) for issue_xml in root.findall('.//issue[@id="ExtraTranslation"]'): message = issue_xml.get('message') if re.findall(ExtraTranslationIssue.pattern, message): issues.extend(_get_issues_from_location(ExtraTranslationIssue, issue_xml.findall('location'), message)) return issues
[ "def", "parse_lint_result", "(", "lint_result_path", ",", "manifest_path", ")", ":", "unused_string_pattern", "=", "re", ".", "compile", "(", "'The resource `R\\.string\\.([^`]+)` appears to be unused'", ")", "mainfest_string_refs", "=", "get_manifest_string_refs", "(", "manifest_path", ")", "root", "=", "etree", ".", "parse", "(", "lint_result_path", ")", ".", "getroot", "(", ")", "issues", "=", "[", "]", "for", "issue_xml", "in", "root", ".", "findall", "(", "'.//issue[@id=\"UnusedResources\"]'", ")", ":", "message", "=", "issue_xml", ".", "get", "(", "'message'", ")", "unused_string", "=", "re", ".", "match", "(", "unused_string_pattern", ",", "issue_xml", ".", "get", "(", "'message'", ")", ")", "has_string_in_manifest", "=", "unused_string", "and", "unused_string", ".", "group", "(", "1", ")", "in", "mainfest_string_refs", "if", "not", "has_string_in_manifest", ":", "issues", ".", "extend", "(", "_get_issues_from_location", "(", "UnusedResourceIssue", ",", "issue_xml", ".", "findall", "(", "'location'", ")", ",", "message", ")", ")", "for", "issue_xml", "in", "root", ".", "findall", "(", "'.//issue[@id=\"ExtraTranslation\"]'", ")", ":", "message", "=", "issue_xml", ".", "get", "(", "'message'", ")", "if", "re", ".", "findall", "(", "ExtraTranslationIssue", ".", "pattern", ",", "message", ")", ":", "issues", ".", "extend", "(", "_get_issues_from_location", "(", "ExtraTranslationIssue", ",", "issue_xml", ".", "findall", "(", "'location'", ")", ",", "message", ")", ")", "return", "issues" ]
Parse lint-result.xml and create Issue for every problem found except unused strings referenced in AndroidManifest
[ "Parse", "lint", "-", "result", ".", "xml", "and", "create", "Issue", "for", "every", "problem", "found", "except", "unused", "strings", "referenced", "in", "AndroidManifest" ]
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L138-L163
train
KeepSafe/android-resource-remover
android_clean_app.py
remove_resource_file
def remove_resource_file(issue, filepath, ignore_layouts): """ Delete a file from the filesystem """ if os.path.exists(filepath) and (ignore_layouts is False or issue.elements[0][0] != 'layout'): print('removing resource: {0}'.format(filepath)) os.remove(os.path.abspath(filepath))
python
def remove_resource_file(issue, filepath, ignore_layouts): """ Delete a file from the filesystem """ if os.path.exists(filepath) and (ignore_layouts is False or issue.elements[0][0] != 'layout'): print('removing resource: {0}'.format(filepath)) os.remove(os.path.abspath(filepath))
[ "def", "remove_resource_file", "(", "issue", ",", "filepath", ",", "ignore_layouts", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "filepath", ")", "and", "(", "ignore_layouts", "is", "False", "or", "issue", ".", "elements", "[", "0", "]", "[", "0", "]", "!=", "'layout'", ")", ":", "print", "(", "'removing resource: {0}'", ".", "format", "(", "filepath", ")", ")", "os", ".", "remove", "(", "os", ".", "path", ".", "abspath", "(", "filepath", ")", ")" ]
Delete a file from the filesystem
[ "Delete", "a", "file", "from", "the", "filesystem" ]
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L166-L172
train
KeepSafe/android-resource-remover
android_clean_app.py
remove_resource_value
def remove_resource_value(issue, filepath): """ Read an xml file and remove an element which is unused, then save the file back to the filesystem """ if os.path.exists(filepath): for element in issue.elements: print('removing {0} from resource {1}'.format(element, filepath)) parser = etree.XMLParser(remove_blank_text=False, remove_comments=False, remove_pis=False, strip_cdata=False, resolve_entities=False) tree = etree.parse(filepath, parser) root = tree.getroot() for unused_value in root.findall('.//{0}[@name="{1}"]'.format(element[0], element[1])): root.remove(unused_value) with open(filepath, 'wb') as resource: tree.write(resource, encoding='utf-8', xml_declaration=True)
python
def remove_resource_value(issue, filepath): """ Read an xml file and remove an element which is unused, then save the file back to the filesystem """ if os.path.exists(filepath): for element in issue.elements: print('removing {0} from resource {1}'.format(element, filepath)) parser = etree.XMLParser(remove_blank_text=False, remove_comments=False, remove_pis=False, strip_cdata=False, resolve_entities=False) tree = etree.parse(filepath, parser) root = tree.getroot() for unused_value in root.findall('.//{0}[@name="{1}"]'.format(element[0], element[1])): root.remove(unused_value) with open(filepath, 'wb') as resource: tree.write(resource, encoding='utf-8', xml_declaration=True)
[ "def", "remove_resource_value", "(", "issue", ",", "filepath", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "filepath", ")", ":", "for", "element", "in", "issue", ".", "elements", ":", "print", "(", "'removing {0} from resource {1}'", ".", "format", "(", "element", ",", "filepath", ")", ")", "parser", "=", "etree", ".", "XMLParser", "(", "remove_blank_text", "=", "False", ",", "remove_comments", "=", "False", ",", "remove_pis", "=", "False", ",", "strip_cdata", "=", "False", ",", "resolve_entities", "=", "False", ")", "tree", "=", "etree", ".", "parse", "(", "filepath", ",", "parser", ")", "root", "=", "tree", ".", "getroot", "(", ")", "for", "unused_value", "in", "root", ".", "findall", "(", "'.//{0}[@name=\"{1}\"]'", ".", "format", "(", "element", "[", "0", "]", ",", "element", "[", "1", "]", ")", ")", ":", "root", ".", "remove", "(", "unused_value", ")", "with", "open", "(", "filepath", ",", "'wb'", ")", "as", "resource", ":", "tree", ".", "write", "(", "resource", ",", "encoding", "=", "'utf-8'", ",", "xml_declaration", "=", "True", ")" ]
Read an xml file and remove an element which is unused, then save the file back to the filesystem
[ "Read", "an", "xml", "file", "and", "remove", "an", "element", "which", "is", "unused", "then", "save", "the", "file", "back", "to", "the", "filesystem" ]
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L175-L189
train
KeepSafe/android-resource-remover
android_clean_app.py
remove_unused_resources
def remove_unused_resources(issues, app_dir, ignore_layouts): """ Remove the file or the value inside the file depending if the whole file is unused or not. """ for issue in issues: filepath = os.path.join(app_dir, issue.filepath) if issue.remove_file: remove_resource_file(issue, filepath, ignore_layouts) else: remove_resource_value(issue, filepath)
python
def remove_unused_resources(issues, app_dir, ignore_layouts): """ Remove the file or the value inside the file depending if the whole file is unused or not. """ for issue in issues: filepath = os.path.join(app_dir, issue.filepath) if issue.remove_file: remove_resource_file(issue, filepath, ignore_layouts) else: remove_resource_value(issue, filepath)
[ "def", "remove_unused_resources", "(", "issues", ",", "app_dir", ",", "ignore_layouts", ")", ":", "for", "issue", "in", "issues", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "app_dir", ",", "issue", ".", "filepath", ")", "if", "issue", ".", "remove_file", ":", "remove_resource_file", "(", "issue", ",", "filepath", ",", "ignore_layouts", ")", "else", ":", "remove_resource_value", "(", "issue", ",", "filepath", ")" ]
Remove the file or the value inside the file depending if the whole file is unused or not.
[ "Remove", "the", "file", "or", "the", "value", "inside", "the", "file", "depending", "if", "the", "whole", "file", "is", "unused", "or", "not", "." ]
f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8
https://github.com/KeepSafe/android-resource-remover/blob/f2b4fb5a6822da79c9b166e3250ca6bdc6ee06e8/android_clean_app.py#L192-L201
train
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/caches/__init__.py
_encryption_context_hash
def _encryption_context_hash(hasher, encryption_context): """Generates the expected hash for the provided encryption context. :param hasher: Existing hasher to use :type hasher: cryptography.hazmat.primitives.hashes.Hash :param dict encryption_context: Encryption context to hash :returns: Complete hash :rtype: bytes """ serialized_encryption_context = serialize_encryption_context(encryption_context) hasher.update(serialized_encryption_context) return hasher.finalize()
python
def _encryption_context_hash(hasher, encryption_context): """Generates the expected hash for the provided encryption context. :param hasher: Existing hasher to use :type hasher: cryptography.hazmat.primitives.hashes.Hash :param dict encryption_context: Encryption context to hash :returns: Complete hash :rtype: bytes """ serialized_encryption_context = serialize_encryption_context(encryption_context) hasher.update(serialized_encryption_context) return hasher.finalize()
[ "def", "_encryption_context_hash", "(", "hasher", ",", "encryption_context", ")", ":", "serialized_encryption_context", "=", "serialize_encryption_context", "(", "encryption_context", ")", "hasher", ".", "update", "(", "serialized_encryption_context", ")", "return", "hasher", ".", "finalize", "(", ")" ]
Generates the expected hash for the provided encryption context. :param hasher: Existing hasher to use :type hasher: cryptography.hazmat.primitives.hashes.Hash :param dict encryption_context: Encryption context to hash :returns: Complete hash :rtype: bytes
[ "Generates", "the", "expected", "hash", "for", "the", "provided", "encryption", "context", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/caches/__init__.py#L51-L62
train
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/caches/__init__.py
build_encryption_materials_cache_key
def build_encryption_materials_cache_key(partition, request): """Generates a cache key for an encrypt request. :param bytes partition: Partition name for which to generate key :param request: Request for which to generate key :type request: aws_encryption_sdk.materials_managers.EncryptionMaterialsRequest :returns: cache key :rtype: bytes """ if request.algorithm is None: _algorithm_info = b"\x00" else: _algorithm_info = b"\x01" + request.algorithm.id_as_bytes() hasher = _new_cache_key_hasher() _partition_hash = _partition_name_hash(hasher=hasher.copy(), partition_name=partition) _ec_hash = _encryption_context_hash(hasher=hasher.copy(), encryption_context=request.encryption_context) hasher.update(_partition_hash) hasher.update(_algorithm_info) hasher.update(_ec_hash) return hasher.finalize()
python
def build_encryption_materials_cache_key(partition, request): """Generates a cache key for an encrypt request. :param bytes partition: Partition name for which to generate key :param request: Request for which to generate key :type request: aws_encryption_sdk.materials_managers.EncryptionMaterialsRequest :returns: cache key :rtype: bytes """ if request.algorithm is None: _algorithm_info = b"\x00" else: _algorithm_info = b"\x01" + request.algorithm.id_as_bytes() hasher = _new_cache_key_hasher() _partition_hash = _partition_name_hash(hasher=hasher.copy(), partition_name=partition) _ec_hash = _encryption_context_hash(hasher=hasher.copy(), encryption_context=request.encryption_context) hasher.update(_partition_hash) hasher.update(_algorithm_info) hasher.update(_ec_hash) return hasher.finalize()
[ "def", "build_encryption_materials_cache_key", "(", "partition", ",", "request", ")", ":", "if", "request", ".", "algorithm", "is", "None", ":", "_algorithm_info", "=", "b\"\\x00\"", "else", ":", "_algorithm_info", "=", "b\"\\x01\"", "+", "request", ".", "algorithm", ".", "id_as_bytes", "(", ")", "hasher", "=", "_new_cache_key_hasher", "(", ")", "_partition_hash", "=", "_partition_name_hash", "(", "hasher", "=", "hasher", ".", "copy", "(", ")", ",", "partition_name", "=", "partition", ")", "_ec_hash", "=", "_encryption_context_hash", "(", "hasher", "=", "hasher", ".", "copy", "(", ")", ",", "encryption_context", "=", "request", ".", "encryption_context", ")", "hasher", ".", "update", "(", "_partition_hash", ")", "hasher", ".", "update", "(", "_algorithm_info", ")", "hasher", ".", "update", "(", "_ec_hash", ")", "return", "hasher", ".", "finalize", "(", ")" ]
Generates a cache key for an encrypt request. :param bytes partition: Partition name for which to generate key :param request: Request for which to generate key :type request: aws_encryption_sdk.materials_managers.EncryptionMaterialsRequest :returns: cache key :rtype: bytes
[ "Generates", "a", "cache", "key", "for", "an", "encrypt", "request", "." ]
d182155d5fb1ef176d9e7d0647679737d5146495
https://github.com/aws/aws-encryption-sdk-python/blob/d182155d5fb1ef176d9e7d0647679737d5146495/src/aws_encryption_sdk/caches/__init__.py#L65-L86
train