idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
59,400
def get_url ( self , cmd , ** args ) : return self . http . base_url + self . _mkurl ( cmd , * args )
Expand the request URL for a request .
59,401
async def post ( self , cmd , data = None , timeout = None , ** args ) : def _post_request ( ) : headers = copy ( _DMAP_HEADERS ) headers [ 'Content-Type' ] = 'application/x-www-form-urlencoded' return self . http . post_data ( self . _mkurl ( cmd , * args ) , data = data , headers = headers , timeout = timeout ) await self . _assure_logged_in ( ) return await self . _do ( _post_request )
Perform DAAP POST command with optional data .
59,402
def set_repeat ( self , repeat_mode ) : if int ( repeat_mode ) == const . REPEAT_STATE_OFF : state = 1 elif int ( repeat_mode ) == const . REPEAT_STATE_ALL : state = 2 elif int ( repeat_mode ) == const . REPEAT_STATE_TRACK : state = 3 else : raise ValueError ( 'Invalid repeat mode: ' + str ( repeat_mode ) ) return self . protocol . send ( messages . repeat ( state ) )
Change repeat mode .
59,403
def genre ( self ) : if self . _metadata : from pyatv . mrp . protobuf import ContentItem_pb2 transaction = ContentItem_pb2 . ContentItem ( ) transaction . ParseFromString ( self . _metadata )
Genre of the currently playing song .
59,404
def total_time ( self ) : now_playing = self . _setstate . nowPlayingInfo if now_playing . HasField ( 'duration' ) : return int ( now_playing . duration ) return None
Total play time in seconds .
59,405
def shuffle ( self ) : info = self . _get_command_info ( CommandInfo_pb2 . ChangeShuffleMode ) return None if info is None else info . shuffleMode
If shuffle is enabled or not .
59,406
def repeat ( self ) : info = self . _get_command_info ( CommandInfo_pb2 . ChangeRepeatMode ) return None if info is None else info . repeatMode
Repeat mode .
59,407
async def playing ( self ) : if self . _setstate is None : await self . protocol . start ( ) if self . _setstate is None : return MrpPlaying ( protobuf . SetStateMessage ( ) , None ) return MrpPlaying ( self . _setstate , self . _nowplaying )
Return what is currently playing .
59,408
async def stop ( self , ** kwargs ) : if not self . _pin_code : raise Exception ( 'no pin given' ) self . service . device_credentials = await self . pairing_procedure . finish_pairing ( self . _pin_code )
Stop pairing process .
59,409
def read_tlv ( data ) : def _parse ( data , pos , size , result = None ) : if result is None : result = { } if pos >= size : return result tag = str ( data [ pos ] ) length = data [ pos + 1 ] value = data [ pos + 2 : pos + 2 + length ] if tag in result : result [ tag ] += value else : result [ tag ] = value return _parse ( data , pos + 2 + length , size , result ) return _parse ( data , 0 , len ( data ) )
Parse TLV8 bytes into a dict .
59,410
def write_tlv ( data ) : tlv = b'' for key , value in data . items ( ) : tag = bytes ( [ int ( key ) ] ) length = len ( value ) pos = 0 while pos < len ( value ) : size = min ( length , 255 ) tlv += tag tlv += bytes ( [ size ] ) tlv += value [ pos : pos + size ] pos += size length -= size return tlv
Convert a dict to TLV8 bytes .
59,411
def comment ( value , comment_text ) : if isinstance ( value , Doc ) : return comment_doc ( value , comment_text ) return comment_value ( value , comment_text )
Annotates a value or a Doc with a comment .
59,412
def register_pretty ( type = None , predicate = None ) : if type is None and predicate is None : raise ValueError ( "You must provide either the 'type' or 'predicate' argument." ) if type is not None and predicate is not None : raise ValueError ( "You must provide either the 'type' or 'predicate' argument," "but not both" ) if predicate is not None : if not callable ( predicate ) : raise ValueError ( "Expected a callable for 'predicate', got {}" . format ( repr ( predicate ) ) ) def decorator ( fn ) : sig = inspect . signature ( fn ) value = None ctx = None try : sig . bind ( value , ctx ) except TypeError : fnname = '{}.{}' . format ( fn . __module__ , fn . __qualname__ ) raise ValueError ( "Functions decorated with register_pretty must accept " "exactly two positional parameters: 'value' and 'ctx'. " "The function signature for {} was not compatible." . format ( fnname ) ) if type : if isinstance ( type , str ) : _DEFERRED_DISPATCH_BY_NAME [ type ] = fn else : pretty_dispatch . register ( type , partial ( _run_pretty , fn ) ) else : assert callable ( predicate ) _PREDICATE_REGISTRY . append ( ( predicate , fn ) ) return fn return decorator
Returns a decorator that registers the decorated function as the pretty printer for instances of type .
59,413
def commentdoc ( text ) : if not text : raise ValueError ( 'Expected non-empty comment str, got {}' . format ( repr ( text ) ) ) commentlines = [ ] for line in text . splitlines ( ) : alternating_words_ws = list ( filter ( None , WHITESPACE_PATTERN_TEXT . split ( line ) ) ) starts_with_whitespace = bool ( WHITESPACE_PATTERN_TEXT . match ( alternating_words_ws [ 0 ] ) ) if starts_with_whitespace : prefix = alternating_words_ws [ 0 ] alternating_words_ws = alternating_words_ws [ 1 : ] else : prefix = NIL if len ( alternating_words_ws ) % 2 == 0 : alternating_words_ws = alternating_words_ws [ : - 1 ] for idx , tup in enumerate ( zip ( alternating_words_ws , cycle ( [ False , True ] ) ) ) : part , is_ws = tup if is_ws : alternating_words_ws [ idx ] = flat_choice ( when_flat = part , when_broken = always_break ( concat ( [ HARDLINE , '# ' , ] ) ) ) commentlines . append ( concat ( [ '# ' , prefix , fill ( alternating_words_ws ) ] ) ) outer = identity if len ( commentlines ) > 1 : outer = always_break return annotate ( Token . COMMENT_SINGLE , outer ( concat ( intersperse ( HARDLINE , commentlines ) ) ) )
Returns a Doc representing a comment text . text is treated as words and any whitespace may be used to break the comment to multiple lines .
59,414
def build_fncall ( ctx , fndoc , argdocs = ( ) , kwargdocs = ( ) , hug_sole_arg = False , trailing_comment = None , ) : if callable ( fndoc ) : fndoc = general_identifier ( fndoc ) has_comment = bool ( trailing_comment ) argdocs = list ( argdocs ) kwargdocs = list ( kwargdocs ) kwargdocs = [ ( comment_doc ( concat ( [ keyword_arg ( binding ) , ASSIGN_OP , doc . doc ] ) , doc . annotation . value ) if is_commented ( doc ) else concat ( [ keyword_arg ( binding ) , ASSIGN_OP , doc ] ) ) for binding , doc in kwargdocs ] if not ( argdocs or kwargdocs ) : return concat ( [ fndoc , LPAREN , RPAREN , ] ) if ( hug_sole_arg and not kwargdocs and len ( argdocs ) == 1 and not is_commented ( argdocs [ 0 ] ) ) : return group ( concat ( [ fndoc , LPAREN , argdocs [ 0 ] , RPAREN ] ) ) allarg_docs = [ * argdocs , * kwargdocs ] if trailing_comment : allarg_docs . append ( commentdoc ( trailing_comment ) ) parts = [ ] for idx , doc in enumerate ( allarg_docs ) : last = idx == len ( allarg_docs ) - 1 if is_commented ( doc ) : has_comment = True comment_str = doc . annotation . value doc = doc . doc else : comment_str = None part = concat ( [ doc , NIL if last else COMMA ] ) if comment_str : part = group ( flat_choice ( when_flat = concat ( [ part , ' ' , commentdoc ( comment_str ) ] ) , when_broken = concat ( [ commentdoc ( comment_str ) , HARDLINE , part , ] ) , ) ) if not last : part = concat ( [ part , HARDLINE if has_comment else LINE ] ) parts . append ( part ) outer = ( always_break if has_comment else group ) return outer ( concat ( [ fndoc , LPAREN , nest ( ctx . indent , concat ( [ SOFTLINE , concat ( parts ) , ] ) ) , SOFTLINE , RPAREN ] ) )
Builds a doc that looks like a function call from docs that represent the function arguments and keyword arguments .
59,415
def assoc ( self , key , value ) : return self . _replace ( user_ctx = { ** self . user_ctx , key : value , } )
Return a modified PrettyContext with key set to value
59,416
def align ( doc ) : validate_doc ( doc ) def evaluator ( indent , column , page_width , ribbon_width ) : return Nest ( column - indent , doc ) return contextual ( evaluator )
Aligns each new line in doc with the first new line .
59,417
def smart_fitting_predicate ( page_width , ribbon_frac , min_nesting_level , max_width , triplestack ) : chars_left = max_width while chars_left >= 0 : if not triplestack : return True indent , mode , doc = triplestack . pop ( ) if doc is NIL : continue elif isinstance ( doc , str ) : chars_left -= len ( doc ) elif isinstance ( doc , Concat ) : triplestack . extend ( ( indent , mode , doc ) for doc in reversed ( doc . docs ) ) elif isinstance ( doc , Annotated ) : triplestack . append ( ( indent , mode , doc . doc ) ) elif isinstance ( doc , Fill ) : triplestack . extend ( ( indent , mode , doc ) for doc in reversed ( doc . docs ) ) elif isinstance ( doc , Nest ) : triplestack . append ( ( indent + doc . indent , mode , doc . doc ) ) elif isinstance ( doc , AlwaysBreak ) : return False elif doc is HARDLINE : if indent > min_nesting_level : chars_left = page_width - indent else : return True elif isinstance ( doc , FlatChoice ) : if mode is FLAT_MODE : triplestack . append ( ( indent , mode , doc . when_flat ) ) elif mode is BREAK_MODE : triplestack . append ( ( indent , mode , doc . when_broken ) ) else : raise ValueError elif isinstance ( doc , Group ) : triplestack . append ( ( indent , FLAT_MODE , doc . doc ) ) elif isinstance ( doc , Contextual ) : ribbon_width = max ( 0 , min ( page_width , round ( ribbon_frac * page_width ) ) ) evaluated_doc = doc . fn ( indent = indent , column = max_width - chars_left , page_width = page_width , ribbon_width = ribbon_width , ) normalized = normalize_doc ( evaluated_doc ) triplestack . append ( ( indent , mode , normalized ) ) elif isinstance ( doc , SAnnotationPush ) : continue elif isinstance ( doc , SAnnotationPop ) : continue else : raise ValueError ( ( indent , mode , doc ) ) return False
Lookahead until the last doc at the current indentation level . Pretty but not as fast .
59,418
def set_default_style ( style ) : global default_style if style == 'dark' : style = default_dark_style elif style == 'light' : style = default_light_style if not issubclass ( style , Style ) : raise TypeError ( "style must be a subclass of pygments.styles.Style or " "one of 'dark', 'light'. Got {}" . format ( repr ( style ) ) ) default_style = style
Sets default global style to be used by prettyprinter . cpprint .
59,419
def intersperse ( x , ys ) : it = iter ( ys ) try : y = next ( it ) except StopIteration : return yield y for y in it : yield x yield y
Returns an iterable where x is inserted between each element of ys
59,420
def pprint ( object , stream = _UNSET_SENTINEL , indent = _UNSET_SENTINEL , width = _UNSET_SENTINEL , depth = _UNSET_SENTINEL , * , compact = False , ribbon_width = _UNSET_SENTINEL , max_seq_len = _UNSET_SENTINEL , sort_dict_keys = _UNSET_SENTINEL , end = '\n' ) : sdocs = python_to_sdocs ( object , ** _merge_defaults ( indent = indent , width = width , depth = depth , ribbon_width = ribbon_width , max_seq_len = max_seq_len , sort_dict_keys = sort_dict_keys , ) ) stream = ( sys . stdout if stream is _UNSET_SENTINEL else stream ) default_render_to_stream ( stream , sdocs ) if end : stream . write ( end )
Pretty print a Python value object to stream which defaults to sys . stdout . The output will not be colored .
59,421
def cpprint ( object , stream = _UNSET_SENTINEL , indent = _UNSET_SENTINEL , width = _UNSET_SENTINEL , depth = _UNSET_SENTINEL , * , compact = False , ribbon_width = _UNSET_SENTINEL , max_seq_len = _UNSET_SENTINEL , sort_dict_keys = _UNSET_SENTINEL , style = None , end = '\n' ) : sdocs = python_to_sdocs ( object , ** _merge_defaults ( indent = indent , width = width , depth = depth , ribbon_width = ribbon_width , max_seq_len = max_seq_len , sort_dict_keys = sort_dict_keys , ) ) stream = ( sys . stdout if stream is _UNSET_SENTINEL else stream ) colored_render_to_stream ( stream , sdocs , style = style ) if end : stream . write ( end )
Pretty print a Python value object to stream which defaults to sys . stdout . The output will be colored and syntax highlighted .
59,422
def install_extras ( include = ALL_EXTRAS , * , exclude = EMPTY_SET , raise_on_error = False , warn_on_error = True ) : include = set ( include ) exclude = set ( exclude ) unexisting_extras = ( include | exclude ) - ALL_EXTRAS if unexisting_extras : raise ValueError ( "The following extras don't exist: {}" . format ( ', ' . join ( unexisting_extras ) ) ) extras_to_install = ( ALL_EXTRAS & include ) - exclude for extra in extras_to_install : module_name = 'prettyprinter.extras.' + extra try : extra_module = import_module ( module_name ) except ImportError as e : if raise_on_error : raise e if warn_on_error : warnings . warn ( "Failed to import '{0}' PrettyPrinter extra. " "If you don't need it, call install_extras with " "exclude=['{0}']" . format ( extra ) ) else : try : extra_module . install ( ) except Exception as exc : if raise_on_error : raise exc elif warn_on_error : warnings . warn ( "Failed to install '{0}' PrettyPrinter extra. " "If you don't need it, call install_extras with " "exclude=['{0}']" . format ( extra ) )
Installs extras .
59,423
def set_default_config ( * , style = _UNSET_SENTINEL , max_seq_len = _UNSET_SENTINEL , width = _UNSET_SENTINEL , ribbon_width = _UNSET_SENTINEL , depth = _UNSET_SENTINEL , sort_dict_keys = _UNSET_SENTINEL ) : global _default_config if style is not _UNSET_SENTINEL : set_default_style ( style ) new_defaults = { ** _default_config } if max_seq_len is not _UNSET_SENTINEL : new_defaults [ 'max_seq_len' ] = max_seq_len if width is not _UNSET_SENTINEL : new_defaults [ 'width' ] = width if ribbon_width is not _UNSET_SENTINEL : new_defaults [ 'ribbon_width' ] = ribbon_width if depth is not _UNSET_SENTINEL : new_defaults [ 'depth' ] = depth if sort_dict_keys is not _UNSET_SENTINEL : new_defaults [ 'sort_dict_keys' ] = sort_dict_keys _default_config = new_defaults return new_defaults
Sets the default configuration values used when calling pprint cpprint or pformat if those values weren t explicitly provided . Only overrides the values provided in the keyword arguments .
59,424
def package_maven ( ) : if not os . getenv ( 'JAVA_HOME' ) : os . environ [ 'JAVA_HOME' ] = jdk_home_dir mvn_goal = 'package' log . info ( "Executing Maven goal '" + mvn_goal + "'" ) code = subprocess . call ( [ 'mvn' , 'clean' , mvn_goal , '-DskipTests' ] , shell = platform . system ( ) == 'Windows' ) if code : exit ( code ) if not os . path . exists ( lib_dir ) : os . mkdir ( lib_dir ) target_dir = os . path . join ( base_dir , 'target' ) jar_files = glob . glob ( os . path . join ( target_dir , '*.jar' ) ) jar_files = [ f for f in jar_files if not ( f . endswith ( '-sources.jar' ) or f . endswith ( '-javadoc.jar' ) ) ] if not jar_files : log . error ( 'Maven did not generate any JAR artifacts' ) exit ( 1 ) for jar_file in jar_files : build_dir = _build_dir ( ) log . info ( "Copying " + jar_file + " -> " + build_dir + "" ) shutil . copy ( jar_file , build_dir )
Run maven package lifecycle
59,425
def _write_jpy_config ( target_dir = None , install_dir = None ) : if not target_dir : target_dir = _build_dir ( ) args = [ sys . executable , os . path . join ( target_dir , 'jpyutil.py' ) , '--jvm_dll' , jvm_dll_file , '--java_home' , jdk_home_dir , '--log_level' , 'DEBUG' , '--req_java' , '--req_py' ] if install_dir : args . append ( '--install_dir' ) args . append ( install_dir ) log . info ( 'Writing jpy configuration to %s using install_dir %s' % ( target_dir , install_dir ) ) return subprocess . call ( args )
Write out a well - formed jpyconfig . properties file for easier Java integration in a given location .
59,426
def _get_module_path ( name , fail = False , install_path = None ) : import imp module = imp . find_module ( name ) if not module and fail : raise RuntimeError ( "can't find module '" + name + "'" ) path = module [ 1 ] if not path and fail : raise RuntimeError ( "module '" + name + "' is missing a file path" ) if install_path : return os . path . join ( install_path , os . path . split ( path ) [ 1 ] ) return path
Find the path to the jpy jni modules .
59,427
def init_jvm ( java_home = None , jvm_dll = None , jvm_maxmem = None , jvm_classpath = None , jvm_properties = None , jvm_options = None , config_file = None , config = None ) : if not config : config = _get_python_api_config ( config_file = config_file ) cdll = preload_jvm_dll ( jvm_dll_file = jvm_dll , java_home_dir = java_home , config_file = config_file , config = config , fail = False ) import jpy if not jpy . has_jvm ( ) : jvm_options = get_jvm_options ( jvm_maxmem = jvm_maxmem , jvm_classpath = jvm_classpath , jvm_properties = jvm_properties , jvm_options = jvm_options , config = config ) logger . debug ( 'Creating JVM with options %s' % repr ( jvm_options ) ) jpy . create_jvm ( options = jvm_options ) else : jvm_options = None return cdll , jvm_options
Creates a configured Java virtual machine which will be used by jpy .
59,428
def run_lint_command ( ) : lint , app_dir , lint_result , ignore_layouts = parse_args ( ) if not lint_result : if not distutils . spawn . find_executable ( lint ) : raise Exception ( '`%s` executable could not be found and path to lint result not specified. See --help' % lint ) lint_result = os . path . join ( app_dir , 'lint-result.xml' ) call_result = subprocess . call ( [ lint , app_dir , '--xml' , lint_result ] ) if call_result > 0 : print ( 'Running the command failed with result %s. Try running it from the console.' ' Arguments for subprocess.call: %s' % ( call_result , [ lint , app_dir , '--xml' , lint_result ] ) ) else : if not os . path . isabs ( lint_result ) : lint_result = os . path . join ( app_dir , lint_result ) lint_result = os . path . abspath ( lint_result ) return lint_result , app_dir , ignore_layouts
Run lint command in the shell and save results to lint - result . xml
59,429
def parse_lint_result ( lint_result_path , manifest_path ) : unused_string_pattern = re . compile ( 'The resource `R\.string\.([^`]+)` appears to be unused' ) mainfest_string_refs = get_manifest_string_refs ( manifest_path ) root = etree . parse ( lint_result_path ) . getroot ( ) issues = [ ] for issue_xml in root . findall ( './/issue[@id="UnusedResources"]' ) : message = issue_xml . get ( 'message' ) unused_string = re . match ( unused_string_pattern , issue_xml . get ( 'message' ) ) has_string_in_manifest = unused_string and unused_string . group ( 1 ) in mainfest_string_refs if not has_string_in_manifest : issues . extend ( _get_issues_from_location ( UnusedResourceIssue , issue_xml . findall ( 'location' ) , message ) ) for issue_xml in root . findall ( './/issue[@id="ExtraTranslation"]' ) : message = issue_xml . get ( 'message' ) if re . findall ( ExtraTranslationIssue . pattern , message ) : issues . extend ( _get_issues_from_location ( ExtraTranslationIssue , issue_xml . findall ( 'location' ) , message ) ) return issues
Parse lint - result . xml and create Issue for every problem found except unused strings referenced in AndroidManifest
59,430
def remove_resource_file ( issue , filepath , ignore_layouts ) : if os . path . exists ( filepath ) and ( ignore_layouts is False or issue . elements [ 0 ] [ 0 ] != 'layout' ) : print ( 'removing resource: {0}' . format ( filepath ) ) os . remove ( os . path . abspath ( filepath ) )
Delete a file from the filesystem
59,431
def remove_resource_value ( issue , filepath ) : if os . path . exists ( filepath ) : for element in issue . elements : print ( 'removing {0} from resource {1}' . format ( element , filepath ) ) parser = etree . XMLParser ( remove_blank_text = False , remove_comments = False , remove_pis = False , strip_cdata = False , resolve_entities = False ) tree = etree . parse ( filepath , parser ) root = tree . getroot ( ) for unused_value in root . findall ( './/{0}[@name="{1}"]' . format ( element [ 0 ] , element [ 1 ] ) ) : root . remove ( unused_value ) with open ( filepath , 'wb' ) as resource : tree . write ( resource , encoding = 'utf-8' , xml_declaration = True )
Read an xml file and remove an element which is unused then save the file back to the filesystem
59,432
def remove_unused_resources ( issues , app_dir , ignore_layouts ) : for issue in issues : filepath = os . path . join ( app_dir , issue . filepath ) if issue . remove_file : remove_resource_file ( issue , filepath , ignore_layouts ) else : remove_resource_value ( issue , filepath )
Remove the file or the value inside the file depending if the whole file is unused or not .
59,433
def _encryption_context_hash ( hasher , encryption_context ) : serialized_encryption_context = serialize_encryption_context ( encryption_context ) hasher . update ( serialized_encryption_context ) return hasher . finalize ( )
Generates the expected hash for the provided encryption context .
59,434
def build_encryption_materials_cache_key ( partition , request ) : if request . algorithm is None : _algorithm_info = b"\x00" else : _algorithm_info = b"\x01" + request . algorithm . id_as_bytes ( ) hasher = _new_cache_key_hasher ( ) _partition_hash = _partition_name_hash ( hasher = hasher . copy ( ) , partition_name = partition ) _ec_hash = _encryption_context_hash ( hasher = hasher . copy ( ) , encryption_context = request . encryption_context ) hasher . update ( _partition_hash ) hasher . update ( _algorithm_info ) hasher . update ( _ec_hash ) return hasher . finalize ( )
Generates a cache key for an encrypt request .
59,435
def _encrypted_data_keys_hash ( hasher , encrypted_data_keys ) : hashed_keys = [ ] for edk in encrypted_data_keys : serialized_edk = serialize_encrypted_data_key ( edk ) _hasher = hasher . copy ( ) _hasher . update ( serialized_edk ) hashed_keys . append ( _hasher . finalize ( ) ) return b"" . join ( sorted ( hashed_keys ) )
Generates the expected hash for the provided encrypted data keys .
59,436
def build_decryption_materials_cache_key ( partition , request ) : hasher = _new_cache_key_hasher ( ) _partition_hash = _partition_name_hash ( hasher = hasher . copy ( ) , partition_name = partition ) _algorithm_info = request . algorithm . id_as_bytes ( ) _edks_hash = _encrypted_data_keys_hash ( hasher = hasher . copy ( ) , encrypted_data_keys = request . encrypted_data_keys ) _ec_hash = _encryption_context_hash ( hasher = hasher . copy ( ) , encryption_context = request . encryption_context ) hasher . update ( _partition_hash ) hasher . update ( _algorithm_info ) hasher . update ( _edks_hash ) hasher . update ( _512_BIT_PAD ) hasher . update ( _ec_hash ) return hasher . finalize ( )
Generates a cache key for a decrypt request .
59,437
def cycle_file ( source_plaintext_filename ) : key_id = os . urandom ( 8 ) master_key_provider = StaticRandomMasterKeyProvider ( ) master_key_provider . add_master_key ( key_id ) ciphertext_filename = source_plaintext_filename + ".encrypted" cycled_plaintext_filename = source_plaintext_filename + ".decrypted" with open ( source_plaintext_filename , "rb" ) as plaintext , open ( ciphertext_filename , "wb" ) as ciphertext : with aws_encryption_sdk . stream ( mode = "e" , source = plaintext , key_provider = master_key_provider ) as encryptor : for chunk in encryptor : ciphertext . write ( chunk ) with open ( ciphertext_filename , "rb" ) as ciphertext , open ( cycled_plaintext_filename , "wb" ) as plaintext : with aws_encryption_sdk . stream ( mode = "d" , source = ciphertext , key_provider = master_key_provider ) as decryptor : for chunk in decryptor : plaintext . write ( chunk ) assert filecmp . cmp ( source_plaintext_filename , cycled_plaintext_filename ) assert all ( pair in decryptor . header . encryption_context . items ( ) for pair in encryptor . header . encryption_context . items ( ) ) return ciphertext_filename , cycled_plaintext_filename
Encrypts and then decrypts a file under a custom static master key provider .
59,438
def _get_raw_key ( self , key_id ) : try : static_key = self . _static_keys [ key_id ] except KeyError : static_key = os . urandom ( 32 ) self . _static_keys [ key_id ] = static_key return WrappingKey ( wrapping_algorithm = WrappingAlgorithm . AES_256_GCM_IV12_TAG16_NO_PADDING , wrapping_key = static_key , wrapping_key_type = EncryptionKeyType . SYMMETRIC , )
Returns a static randomly - generated symmetric key for the specified key ID .
59,439
def stream_length ( self ) : if self . _stream_length is None : try : current_position = self . source_stream . tell ( ) self . source_stream . seek ( 0 , 2 ) self . _stream_length = self . source_stream . tell ( ) self . source_stream . seek ( current_position , 0 ) except Exception as error : raise NotSupportedError ( error ) return self . _stream_length
Returns the length of the source stream determining it if not already known .
59,440
def read ( self , b = - 1 ) : if b is None or b < 0 : b = - 1 _LOGGER . debug ( "Stream read called, requesting %d bytes" , b ) output = io . BytesIO ( ) if not self . _message_prepped : self . _prep_message ( ) if self . closed : raise ValueError ( "I/O operation on closed file" ) if b >= 0 : self . _read_bytes ( b ) output . write ( self . output_buffer [ : b ] ) self . output_buffer = self . output_buffer [ b : ] else : while True : line = self . readline ( ) if not line : break output . write ( line ) self . bytes_read += output . tell ( ) _LOGGER . debug ( "Returning %d bytes of %d bytes requested" , output . tell ( ) , b ) return output . getvalue ( )
Returns either the requested number of bytes or the entire stream .
59,441
def readline ( self ) : _LOGGER . info ( "reading line" ) line = self . read ( self . line_length ) if len ( line ) < self . line_length : _LOGGER . info ( "all lines read" ) return line
Read a chunk of the output
59,442
def next ( self ) : _LOGGER . debug ( "reading next" ) if self . closed : _LOGGER . debug ( "stream is closed" ) raise StopIteration ( ) line = self . readline ( ) if not line : _LOGGER . debug ( "nothing more to read" ) raise StopIteration ( ) return line
Provides hook for Python2 iterator functionality .
59,443
def ciphertext_length ( self ) : return aws_encryption_sdk . internal . formatting . ciphertext_length ( header = self . header , plaintext_length = self . stream_length )
Returns the length of the resulting ciphertext message in bytes .
59,444
def _write_header ( self ) : self . output_buffer += serialize_header ( header = self . _header , signer = self . signer ) self . output_buffer += serialize_header_auth ( algorithm = self . _encryption_materials . algorithm , header = self . output_buffer , data_encryption_key = self . _derived_data_key , signer = self . signer , )
Builds the message header and writes it to the output stream .
59,445
def _read_bytes_to_non_framed_body ( self , b ) : _LOGGER . debug ( "Reading %d bytes" , b ) plaintext = self . __unframed_plaintext_cache . read ( b ) plaintext_length = len ( plaintext ) if self . tell ( ) + len ( plaintext ) > MAX_NON_FRAMED_SIZE : raise SerializationError ( "Source too large for non-framed message" ) ciphertext = self . encryptor . update ( plaintext ) self . _bytes_encrypted += plaintext_length if self . signer is not None : self . signer . update ( ciphertext ) if len ( plaintext ) < b : _LOGGER . debug ( "Closing encryptor after receiving only %d bytes of %d bytes requested" , plaintext_length , b ) closing = self . encryptor . finalize ( ) if self . signer is not None : self . signer . update ( closing ) closing += serialize_non_framed_close ( tag = self . encryptor . tag , signer = self . signer ) if self . signer is not None : closing += serialize_footer ( self . signer ) self . __message_complete = True return ciphertext + closing return ciphertext
Reads the requested number of bytes from source to a streaming non - framed message body .
59,446
def _read_bytes_to_framed_body ( self , b ) : _LOGGER . debug ( "collecting %d bytes" , b ) _b = b if b > 0 : _frames_to_read = math . ceil ( b / float ( self . config . frame_length ) ) b = int ( _frames_to_read * self . config . frame_length ) _LOGGER . debug ( "%d bytes requested; reading %d bytes after normalizing to frame length" , _b , b ) plaintext = self . source_stream . read ( b ) plaintext_length = len ( plaintext ) _LOGGER . debug ( "%d bytes read from source" , plaintext_length ) finalize = False if b < 0 or plaintext_length < b : _LOGGER . debug ( "Final plaintext read from source" ) finalize = True output = b"" final_frame_written = False while ( ( not finalize and plaintext ) or ( finalize and not final_frame_written ) ) : current_plaintext_length = len ( plaintext ) is_final_frame = finalize and current_plaintext_length < self . config . frame_length bytes_in_frame = min ( current_plaintext_length , self . config . frame_length ) _LOGGER . debug ( "Writing %d bytes into%s frame %d" , bytes_in_frame , " final" if is_final_frame else "" , self . sequence_number , ) self . _bytes_encrypted += bytes_in_frame ciphertext , plaintext = serialize_frame ( algorithm = self . _encryption_materials . algorithm , plaintext = plaintext , message_id = self . _header . message_id , data_encryption_key = self . _derived_data_key , frame_length = self . config . frame_length , sequence_number = self . sequence_number , is_final_frame = is_final_frame , signer = self . signer , ) final_frame_written = is_final_frame output += ciphertext self . sequence_number += 1 if finalize : _LOGGER . debug ( "Writing footer" ) if self . signer is not None : output += serialize_footer ( self . signer ) self . __message_complete = True return output
Reads the requested number of bytes from source to a streaming framed message body .
59,447
def _read_header ( self ) : header , raw_header = deserialize_header ( self . source_stream ) self . __unframed_bytes_read += len ( raw_header ) if ( self . config . max_body_length is not None and header . content_type == ContentType . FRAMED_DATA and header . frame_length > self . config . max_body_length ) : raise CustomMaximumValueExceeded ( "Frame Size in header found larger than custom value: {found:d} > {custom:d}" . format ( found = header . frame_length , custom = self . config . max_body_length ) ) decrypt_materials_request = DecryptionMaterialsRequest ( encrypted_data_keys = header . encrypted_data_keys , algorithm = header . algorithm , encryption_context = header . encryption_context , ) decryption_materials = self . config . materials_manager . decrypt_materials ( request = decrypt_materials_request ) if decryption_materials . verification_key is None : self . verifier = None else : self . verifier = Verifier . from_key_bytes ( algorithm = header . algorithm , key_bytes = decryption_materials . verification_key ) if self . verifier is not None : self . verifier . update ( raw_header ) header_auth = deserialize_header_auth ( stream = self . source_stream , algorithm = header . algorithm , verifier = self . verifier ) self . _derived_data_key = derive_data_encryption_key ( source_key = decryption_materials . data_key . data_key , algorithm = header . algorithm , message_id = header . message_id ) validate_header ( header = header , header_auth = header_auth , raw_header = raw_header , data_key = self . _derived_data_key ) return header , header_auth
Reads the message header from the input stream .
59,448
def _read_bytes_from_non_framed_body ( self , b ) : _LOGGER . debug ( "starting non-framed body read" ) bytes_to_read = self . body_length _LOGGER . debug ( "%d bytes requested; reading %d bytes" , b , bytes_to_read ) ciphertext = self . source_stream . read ( bytes_to_read ) if len ( self . output_buffer ) + len ( ciphertext ) < self . body_length : raise SerializationError ( "Total message body contents less than specified in body description" ) if self . verifier is not None : self . verifier . update ( ciphertext ) tag = deserialize_tag ( stream = self . source_stream , header = self . _header , verifier = self . verifier ) aad_content_string = aws_encryption_sdk . internal . utils . get_aad_content_string ( content_type = self . _header . content_type , is_final_frame = True ) associated_data = assemble_content_aad ( message_id = self . _header . message_id , aad_content_string = aad_content_string , seq_num = 1 , length = self . body_length , ) self . decryptor = Decryptor ( algorithm = self . _header . algorithm , key = self . _derived_data_key , associated_data = associated_data , iv = self . _unframed_body_iv , tag = tag , ) plaintext = self . decryptor . update ( ciphertext ) plaintext += self . decryptor . finalize ( ) self . footer = deserialize_footer ( stream = self . source_stream , verifier = self . verifier ) return plaintext
Reads the requested number of bytes from a streaming non - framed message body .
59,449
def _read_bytes_from_framed_body ( self , b ) : plaintext = b"" final_frame = False _LOGGER . debug ( "collecting %d bytes" , b ) while len ( plaintext ) < b and not final_frame : _LOGGER . debug ( "Reading frame" ) frame_data , final_frame = deserialize_frame ( stream = self . source_stream , header = self . _header , verifier = self . verifier ) _LOGGER . debug ( "Read complete for frame %d" , frame_data . sequence_number ) if frame_data . sequence_number != self . last_sequence_number + 1 : raise SerializationError ( "Malformed message: frames out of order" ) self . last_sequence_number += 1 aad_content_string = aws_encryption_sdk . internal . utils . get_aad_content_string ( content_type = self . _header . content_type , is_final_frame = frame_data . final_frame ) associated_data = assemble_content_aad ( message_id = self . _header . message_id , aad_content_string = aad_content_string , seq_num = frame_data . sequence_number , length = len ( frame_data . ciphertext ) , ) plaintext += decrypt ( algorithm = self . _header . algorithm , key = self . _derived_data_key , encrypted_data = frame_data , associated_data = associated_data , ) plaintext_length = len ( plaintext ) _LOGGER . debug ( "bytes collected: %d" , plaintext_length ) if final_frame : _LOGGER . debug ( "Reading footer" ) self . footer = deserialize_footer ( stream = self . source_stream , verifier = self . verifier ) return plaintext
Reads the requested number of bytes from a streaming framed message body .
59,450
def close ( self ) : _LOGGER . debug ( "Closing stream" ) if not hasattr ( self , "footer" ) : raise SerializationError ( "Footer not read" ) super ( StreamDecryptor , self ) . close ( )
Closes out the stream .
59,451
def _region_from_key_id ( key_id , default_region = None ) : try : region_name = key_id . split ( ":" , 4 ) [ 3 ] except IndexError : if default_region is None : raise UnknownRegionError ( "No default region found and no region determinable from key id: {}" . format ( key_id ) ) region_name = default_region return region_name
Determine the target region from a key ID falling back to a default region if provided .
59,452
def _process_config ( self ) : self . _user_agent_adding_config = botocore . config . Config ( user_agent_extra = USER_AGENT_SUFFIX ) if self . config . region_names : self . add_regional_clients_from_list ( self . config . region_names ) self . default_region = self . config . region_names [ 0 ] else : self . default_region = self . config . botocore_session . get_config_variable ( "region" ) if self . default_region is not None : self . add_regional_client ( self . default_region ) if self . config . key_ids : self . add_master_keys_from_list ( self . config . key_ids )
Traverses the config and adds master keys and regional clients as needed .
59,453
def _wrap_client ( self , region_name , method , * args , ** kwargs ) : try : return method ( * args , ** kwargs ) except botocore . exceptions . BotoCoreError : self . _regional_clients . pop ( region_name ) _LOGGER . error ( 'Removing regional client "%s" from cache due to BotoCoreError on %s call' , region_name , method . __name__ ) raise
Proxies all calls to a kms clients methods and removes misbehaving clients
59,454
def _register_client ( self , client , region_name ) : for item in client . meta . method_to_api_mapping : method = getattr ( client , item ) wrapped_method = functools . partial ( self . _wrap_client , region_name , method ) setattr ( client , item , wrapped_method )
Uses functools . partial to wrap all methods on a client with the self . _wrap_client method
59,455
def add_regional_client ( self , region_name ) : if region_name not in self . _regional_clients : session = boto3 . session . Session ( region_name = region_name , botocore_session = self . config . botocore_session ) client = session . client ( "kms" , config = self . _user_agent_adding_config ) self . _register_client ( client , region_name ) self . _regional_clients [ region_name ] = client
Adds a regional client for the specified region if it does not already exist .
59,456
def _client ( self , key_id ) : region_name = _region_from_key_id ( key_id , self . default_region ) self . add_regional_client ( region_name ) return self . _regional_clients [ region_name ]
Returns a Boto3 KMS client for the appropriate region .
59,457
def _new_master_key ( self , key_id ) : _key_id = to_str ( key_id ) return KMSMasterKey ( config = KMSMasterKeyConfig ( key_id = key_id , client = self . _client ( _key_id ) ) )
Returns a KMSMasterKey for the specified key_id .
59,458
def _generate_data_key ( self , algorithm , encryption_context = None ) : kms_params = { "KeyId" : self . _key_id , "NumberOfBytes" : algorithm . kdf_input_len } if encryption_context is not None : kms_params [ "EncryptionContext" ] = encryption_context if self . config . grant_tokens : kms_params [ "GrantTokens" ] = self . config . grant_tokens try : response = self . config . client . generate_data_key ( ** kms_params ) plaintext = response [ "Plaintext" ] ciphertext = response [ "CiphertextBlob" ] key_id = response [ "KeyId" ] except ( ClientError , KeyError ) : error_message = "Master Key {key_id} unable to generate data key" . format ( key_id = self . _key_id ) _LOGGER . exception ( error_message ) raise GenerateKeyError ( error_message ) return DataKey ( key_provider = MasterKeyInfo ( provider_id = self . provider_id , key_info = key_id ) , data_key = plaintext , encrypted_data_key = ciphertext , )
Generates data key and returns plaintext and ciphertext of key .
59,459
def _encrypt_data_key ( self , data_key , algorithm , encryption_context = None ) : kms_params = { "KeyId" : self . _key_id , "Plaintext" : data_key . data_key } if encryption_context : kms_params [ "EncryptionContext" ] = encryption_context if self . config . grant_tokens : kms_params [ "GrantTokens" ] = self . config . grant_tokens try : response = self . config . client . encrypt ( ** kms_params ) ciphertext = response [ "CiphertextBlob" ] key_id = response [ "KeyId" ] except ( ClientError , KeyError ) : error_message = "Master Key {key_id} unable to encrypt data key" . format ( key_id = self . _key_id ) _LOGGER . exception ( error_message ) raise EncryptKeyError ( error_message ) return EncryptedDataKey ( key_provider = MasterKeyInfo ( provider_id = self . provider_id , key_info = key_id ) , encrypted_data_key = ciphertext )
Encrypts a data key and returns the ciphertext .
59,460
def serialize_encrypted_data_key ( encrypted_data_key ) : encrypted_data_key_format = ( ">" "H" "{provider_id_len}s" "H" "{provider_info_len}s" "H" "{enc_data_key_len}s" ) return struct . pack ( encrypted_data_key_format . format ( provider_id_len = len ( encrypted_data_key . key_provider . provider_id ) , provider_info_len = len ( encrypted_data_key . key_provider . key_info ) , enc_data_key_len = len ( encrypted_data_key . encrypted_data_key ) , ) , len ( encrypted_data_key . key_provider . provider_id ) , to_bytes ( encrypted_data_key . key_provider . provider_id ) , len ( encrypted_data_key . key_provider . key_info ) , to_bytes ( encrypted_data_key . key_provider . key_info ) , len ( encrypted_data_key . encrypted_data_key ) , encrypted_data_key . encrypted_data_key , )
Serializes an encrypted data key .
59,461
def serialize_header ( header , signer = None ) : ec_serialized = aws_encryption_sdk . internal . formatting . encryption_context . serialize_encryption_context ( header . encryption_context ) header_start_format = ( ">" "B" "B" "H" "16s" "H" "{}s" ) . format ( len ( ec_serialized ) ) header_bytes = bytearray ( ) header_bytes . extend ( struct . pack ( header_start_format , header . version . value , header . type . value , header . algorithm . algorithm_id , header . message_id , len ( ec_serialized ) , ec_serialized , ) ) serialized_data_keys = bytearray ( ) for data_key in header . encrypted_data_keys : serialized_data_keys . extend ( serialize_encrypted_data_key ( data_key ) ) header_bytes . extend ( struct . pack ( ">H" , len ( header . encrypted_data_keys ) ) ) header_bytes . extend ( serialized_data_keys ) header_close_format = ( ">" "B" "4x" "B" "I" ) header_bytes . extend ( struct . pack ( header_close_format , header . content_type . value , header . algorithm . iv_len , header . frame_length ) ) output = bytes ( header_bytes ) if signer is not None : signer . update ( output ) return output
Serializes a header object .
59,462
def serialize_header_auth ( algorithm , header , data_encryption_key , signer = None ) : header_auth = encrypt ( algorithm = algorithm , key = data_encryption_key , plaintext = b"" , associated_data = header , iv = header_auth_iv ( algorithm ) , ) output = struct . pack ( ">{iv_len}s{tag_len}s" . format ( iv_len = algorithm . iv_len , tag_len = algorithm . tag_len ) , header_auth . iv , header_auth . tag , ) if signer is not None : signer . update ( output ) return output
Creates serialized header authentication data .
59,463
def serialize_non_framed_open ( algorithm , iv , plaintext_length , signer = None ) : body_start_format = ( ">" "{iv_length}s" "Q" ) . format ( iv_length = algorithm . iv_len ) body_start = struct . pack ( body_start_format , iv , plaintext_length ) if signer : signer . update ( body_start ) return body_start
Serializes the opening block for a non - framed message body .
59,464
def serialize_non_framed_close ( tag , signer = None ) : body_close = struct . pack ( "{auth_len}s" . format ( auth_len = len ( tag ) ) , tag ) if signer : signer . update ( body_close ) return body_close
Serializes the closing block for a non - framed message body .
59,465
def serialize_frame ( algorithm , plaintext , message_id , data_encryption_key , frame_length , sequence_number , is_final_frame , signer = None ) : if sequence_number < 1 : raise SerializationError ( "Frame sequence number must be greater than 0" ) if sequence_number > aws_encryption_sdk . internal . defaults . MAX_FRAME_COUNT : raise SerializationError ( "Max frame count exceeded" ) if is_final_frame : content_string = ContentAADString . FINAL_FRAME_STRING_ID else : content_string = ContentAADString . FRAME_STRING_ID frame_plaintext = plaintext [ : frame_length ] frame_ciphertext = encrypt ( algorithm = algorithm , key = data_encryption_key , plaintext = frame_plaintext , associated_data = aws_encryption_sdk . internal . formatting . encryption_context . assemble_content_aad ( message_id = message_id , aad_content_string = content_string , seq_num = sequence_number , length = len ( frame_plaintext ) , ) , iv = frame_iv ( algorithm , sequence_number ) , ) plaintext = plaintext [ frame_length : ] if is_final_frame : _LOGGER . debug ( "Serializing final frame" ) packed_frame = struct . pack ( ">II{iv_len}sI{content_len}s{auth_len}s" . format ( iv_len = algorithm . iv_len , content_len = len ( frame_ciphertext . ciphertext ) , auth_len = algorithm . auth_len ) , SequenceIdentifier . SEQUENCE_NUMBER_END . value , sequence_number , frame_ciphertext . iv , len ( frame_ciphertext . ciphertext ) , frame_ciphertext . ciphertext , frame_ciphertext . tag , ) else : _LOGGER . debug ( "Serializing frame" ) packed_frame = struct . pack ( ">I{iv_len}s{content_len}s{auth_len}s" . format ( iv_len = algorithm . iv_len , content_len = frame_length , auth_len = algorithm . auth_len ) , sequence_number , frame_ciphertext . iv , frame_ciphertext . ciphertext , frame_ciphertext . tag , ) if signer is not None : signer . update ( packed_frame ) return packed_frame , plaintext
Receives a message plaintext breaks off a frame encrypts and serializes the frame and returns the encrypted frame and the remaining plaintext .
59,466
def serialize_footer ( signer ) : footer = b"" if signer is not None : signature = signer . finalize ( ) footer = struct . pack ( ">H{sig_len}s" . format ( sig_len = len ( signature ) ) , len ( signature ) , signature ) return footer
Uses the signer object which has been used to sign the message to generate the signature then serializes that signature .
59,467
def serialize_raw_master_key_prefix ( raw_master_key ) : if raw_master_key . config . wrapping_key . wrapping_algorithm . encryption_type is EncryptionType . ASYMMETRIC : return to_bytes ( raw_master_key . key_id ) return struct . pack ( ">{}sII" . format ( len ( raw_master_key . key_id ) ) , to_bytes ( raw_master_key . key_id ) , raw_master_key . config . wrapping_key . wrapping_algorithm . algorithm . tag_len * 8 , raw_master_key . config . wrapping_key . wrapping_algorithm . algorithm . iv_len , )
Produces the prefix that a RawMasterKey will always use for the key_info value of keys which require additional information .
59,468
def serialize_wrapped_key ( key_provider , wrapping_algorithm , wrapping_key_id , encrypted_wrapped_key ) : if encrypted_wrapped_key . iv is None : key_info = wrapping_key_id key_ciphertext = encrypted_wrapped_key . ciphertext else : key_info = struct . pack ( ">{key_id_len}sII{iv_len}s" . format ( key_id_len = len ( wrapping_key_id ) , iv_len = wrapping_algorithm . algorithm . iv_len ) , to_bytes ( wrapping_key_id ) , len ( encrypted_wrapped_key . tag ) * 8 , wrapping_algorithm . algorithm . iv_len , encrypted_wrapped_key . iv , ) key_ciphertext = encrypted_wrapped_key . ciphertext + encrypted_wrapped_key . tag return EncryptedDataKey ( key_provider = MasterKeyInfo ( provider_id = key_provider . provider_id , key_info = key_info ) , encrypted_data_key = key_ciphertext , )
Serializes EncryptedData into a Wrapped EncryptedDataKey .
59,469
def assemble_content_aad ( message_id , aad_content_string , seq_num , length ) : if not isinstance ( aad_content_string , aws_encryption_sdk . identifiers . ContentAADString ) : raise SerializationError ( "Unknown aad_content_string" ) fmt = ">16s{}sIQ" . format ( len ( aad_content_string . value ) ) return struct . pack ( fmt , message_id , aad_content_string . value , seq_num , length )
Assembles the Body AAD string for a message body structure .
59,470
def serialize_encryption_context ( encryption_context ) : if not encryption_context : return bytes ( ) serialized_context = bytearray ( ) dict_size = len ( encryption_context ) if dict_size > aws_encryption_sdk . internal . defaults . MAX_BYTE_ARRAY_SIZE : raise SerializationError ( "The encryption context contains too many elements." ) serialized_context . extend ( struct . pack ( ">H" , dict_size ) ) encryption_context_list = [ ] for key , value in encryption_context . items ( ) : try : if isinstance ( key , bytes ) : key = codecs . decode ( key ) if isinstance ( value , bytes ) : value = codecs . decode ( value ) encryption_context_list . append ( ( aws_encryption_sdk . internal . str_ops . to_bytes ( key ) , aws_encryption_sdk . internal . str_ops . to_bytes ( value ) ) ) except Exception : raise SerializationError ( "Cannot encode dictionary key or value using {}." . format ( aws_encryption_sdk . internal . defaults . ENCODING ) ) for key , value in sorted ( encryption_context_list , key = lambda x : x [ 0 ] ) : serialized_context . extend ( struct . pack ( ">H{key_size}sH{value_size}s" . format ( key_size = len ( key ) , value_size = len ( value ) ) , len ( key ) , key , len ( value ) , value , ) ) if len ( serialized_context ) > aws_encryption_sdk . internal . defaults . MAX_BYTE_ARRAY_SIZE : raise SerializationError ( "The serialized context is too large." ) return bytes ( serialized_context )
Serializes the contents of a dictionary into a byte string .
59,471
def read_short ( source , offset ) : try : ( short , ) = struct . unpack_from ( ">H" , source , offset ) return short , offset + struct . calcsize ( ">H" ) except struct . error : raise SerializationError ( "Bad format of serialized context." )
Reads a number from a byte array .
59,472
def read_string ( source , offset , length ) : end = offset + length try : return ( codecs . decode ( source [ offset : end ] , aws_encryption_sdk . internal . defaults . ENCODING ) , end ) except Exception : raise SerializationError ( "Bad format of serialized context." )
Reads a string from a byte string .
59,473
def deserialize_encryption_context ( serialized_encryption_context ) : if len ( serialized_encryption_context ) > aws_encryption_sdk . internal . defaults . MAX_BYTE_ARRAY_SIZE : raise SerializationError ( "Serialized context is too long." ) if serialized_encryption_context == b"" : _LOGGER . debug ( "No encryption context data found" ) return { } deserialized_size = 0 encryption_context = { } dict_size , deserialized_size = read_short ( source = serialized_encryption_context , offset = deserialized_size ) _LOGGER . debug ( "Found %d keys" , dict_size ) for _ in range ( dict_size ) : key_size , deserialized_size = read_short ( source = serialized_encryption_context , offset = deserialized_size ) key , deserialized_size = read_string ( source = serialized_encryption_context , offset = deserialized_size , length = key_size ) value_size , deserialized_size = read_short ( source = serialized_encryption_context , offset = deserialized_size ) value , deserialized_size = read_string ( source = serialized_encryption_context , offset = deserialized_size , length = value_size ) if key in encryption_context : raise SerializationError ( "Duplicate key in serialized context." ) encryption_context [ key ] = value if deserialized_size != len ( serialized_encryption_context ) : raise SerializationError ( "Formatting error: Extra data in serialized context." ) return encryption_context
Deserializes the contents of a byte string into a dictionary .
59,474
def owns_data_key ( self , data_key : DataKey ) -> bool : return data_key . key_provider . provider_id in self . _allowed_provider_ids
Determine whether the data key is owned by a null or zero provider .
59,475
def frame_iv ( algorithm , sequence_number ) : if sequence_number < 1 or sequence_number > MAX_FRAME_COUNT : raise ActionNotAllowedError ( "Invalid frame sequence number: {actual}\nMust be between 1 and {max}" . format ( actual = sequence_number , max = MAX_FRAME_COUNT ) ) prefix_len = algorithm . iv_len - 4 prefix = b"\x00" * prefix_len return prefix + struct . pack ( ">I" , sequence_number )
Builds the deterministic IV for a body frame .
59,476
def valid_kdf ( self , kdf ) : if kdf . input_length is None : return True if self . data_key_length > kdf . input_length ( self ) : raise InvalidAlgorithmError ( "Invalid Algorithm definition: data_key_len must not be greater than kdf_input_len" ) return True
Determine whether a KDFSuite can be used with this EncryptionSuite .
59,477
def header_length ( header ) : header_length = len ( serialize_header ( header ) ) header_length += header . algorithm . iv_len header_length += header . algorithm . auth_len return header_length
Calculates the ciphertext message header length given a complete header .
59,478
def _non_framed_body_length ( header , plaintext_length ) : body_length = header . algorithm . iv_len body_length += 8 body_length += plaintext_length body_length += header . algorithm . auth_len return body_length
Calculates the length of a non - framed message body given a complete header .
59,479
def _standard_frame_length ( header ) : frame_length = 4 frame_length += header . algorithm . iv_len frame_length += header . frame_length frame_length += header . algorithm . auth_len return frame_length
Calculates the length of a standard ciphertext frame given a complete header .
59,480
def _final_frame_length ( header , final_frame_bytes ) : final_frame_length = 4 final_frame_length += 4 final_frame_length += header . algorithm . iv_len final_frame_length += 4 final_frame_length += final_frame_bytes final_frame_length += header . algorithm . auth_len return final_frame_length
Calculates the length of a final ciphertext frame given a complete header and the number of bytes of ciphertext in the final frame .
59,481
def body_length ( header , plaintext_length ) : body_length = 0 if header . frame_length == 0 : body_length += _non_framed_body_length ( header , plaintext_length ) else : frames , final_frame_bytes = divmod ( plaintext_length , header . frame_length ) body_length += frames * _standard_frame_length ( header ) body_length += _final_frame_length ( header , final_frame_bytes ) return body_length
Calculates the ciphertext message body length given a complete header .
59,482
def footer_length ( header ) : footer_length = 0 if header . algorithm . signing_algorithm_info is not None : footer_length += 2 footer_length += header . algorithm . signature_len return footer_length
Calculates the ciphertext message footer length given a complete header .
59,483
def ciphertext_length ( header , plaintext_length ) : ciphertext_length = header_length ( header ) ciphertext_length += body_length ( header , plaintext_length ) ciphertext_length += footer_length ( header ) return ciphertext_length
Calculates the complete ciphertext message length given a complete header .
59,484
def owns_data_key ( self , data_key ) : expected_key_info_len = - 1 if ( self . config . wrapping_key . wrapping_algorithm . encryption_type is EncryptionType . ASYMMETRIC and data_key . key_provider == self . key_provider ) : return True elif self . config . wrapping_key . wrapping_algorithm . encryption_type is EncryptionType . SYMMETRIC : expected_key_info_len = ( len ( self . _key_info_prefix ) + self . config . wrapping_key . wrapping_algorithm . algorithm . iv_len ) if ( data_key . key_provider . provider_id == self . provider_id and len ( data_key . key_provider . key_info ) == expected_key_info_len and data_key . key_provider . key_info . startswith ( self . _key_info_prefix ) ) : return True _LOGGER . debug ( ( "RawMasterKey does not own data_key: %s\n" "Expected provider_id: %s\n" "Expected key_info len: %s\n" "Expected key_info prefix: %s" ) , data_key , self . provider_id , expected_key_info_len , self . _key_info_prefix , ) return False
Determines if data_key object is owned by this RawMasterKey .
59,485
def _encrypt_data_key ( self , data_key , algorithm , encryption_context ) : encrypted_wrapped_key = self . config . wrapping_key . encrypt ( plaintext_data_key = data_key . data_key , encryption_context = encryption_context ) return aws_encryption_sdk . internal . formatting . serialize . serialize_wrapped_key ( key_provider = self . key_provider , wrapping_algorithm = self . config . wrapping_key . wrapping_algorithm , wrapping_key_id = self . key_id , encrypted_wrapped_key = encrypted_wrapped_key , )
Performs the provider - specific key encryption actions .
59,486
def put_encryption_materials ( self , cache_key , encryption_materials , plaintext_length , entry_hints = None ) : return CryptoMaterialsCacheEntry ( cache_key = cache_key , value = encryption_materials )
Does not add encryption materials to the cache since there is no cache to which to add them .
59,487
def _set_signature_type ( self ) : try : verify_interface ( ec . EllipticCurve , self . algorithm . signing_algorithm_info ) return ec . EllipticCurve except InterfaceNotImplemented : raise NotSupportedError ( "Unsupported signing algorithm info" )
Ensures that the algorithm signature type is a known type and sets a reference value .
59,488
def from_key_bytes ( cls , algorithm , key_bytes ) : key = serialization . load_der_private_key ( data = key_bytes , password = None , backend = default_backend ( ) ) return cls ( algorithm , key )
Builds a Signer from an algorithm suite and a raw signing key .
59,489
def key_bytes ( self ) : return self . key . private_bytes ( encoding = serialization . Encoding . DER , format = serialization . PrivateFormat . PKCS8 , encryption_algorithm = serialization . NoEncryption ( ) , )
Returns the raw signing key .
59,490
def finalize ( self ) : prehashed_digest = self . _hasher . finalize ( ) return _ecc_static_length_signature ( key = self . key , algorithm = self . algorithm , digest = prehashed_digest )
Finalizes the signer and returns the signature .
59,491
def from_encoded_point ( cls , algorithm , encoded_point ) : return cls ( algorithm = algorithm , key = _ecc_public_numbers_from_compressed_point ( curve = algorithm . signing_algorithm_info ( ) , compressed_point = base64 . b64decode ( encoded_point ) ) . public_key ( default_backend ( ) ) , )
Creates a Verifier object based on the supplied algorithm and encoded compressed ECC curve point .
59,492
def from_key_bytes ( cls , algorithm , key_bytes ) : return cls ( algorithm = algorithm , key = serialization . load_der_public_key ( data = key_bytes , backend = default_backend ( ) ) )
Creates a Verifier object based on the supplied algorithm and raw verification key .
59,493
def key_bytes ( self ) : return self . key . public_bytes ( encoding = serialization . Encoding . DER , format = serialization . PublicFormat . SubjectPublicKeyInfo )
Returns the raw verification key .
59,494
def verify ( self , signature ) : prehashed_digest = self . _hasher . finalize ( ) self . key . verify ( signature = signature , data = prehashed_digest , signature_algorithm = ec . ECDSA ( Prehashed ( self . algorithm . signing_hash_type ( ) ) ) , )
Verifies the signature against the current cryptographic verifier state .
59,495
def encrypt ( self , plaintext_data_key , encryption_context ) : if self . wrapping_algorithm . encryption_type is EncryptionType . ASYMMETRIC : if self . wrapping_key_type is EncryptionKeyType . PRIVATE : encrypted_key = self . _wrapping_key . public_key ( ) . encrypt ( plaintext = plaintext_data_key , padding = self . wrapping_algorithm . padding ) else : encrypted_key = self . _wrapping_key . encrypt ( plaintext = plaintext_data_key , padding = self . wrapping_algorithm . padding ) return EncryptedData ( iv = None , ciphertext = encrypted_key , tag = None ) serialized_encryption_context = serialize_encryption_context ( encryption_context = encryption_context ) iv = os . urandom ( self . wrapping_algorithm . algorithm . iv_len ) return encrypt ( algorithm = self . wrapping_algorithm . algorithm , key = self . _derived_wrapping_key , plaintext = plaintext_data_key , associated_data = serialized_encryption_context , iv = iv , )
Encrypts a data key using a direct wrapping key .
59,496
def decrypt ( self , encrypted_wrapped_data_key , encryption_context ) : if self . wrapping_key_type is EncryptionKeyType . PUBLIC : raise IncorrectMasterKeyError ( "Public key cannot decrypt" ) if self . wrapping_key_type is EncryptionKeyType . PRIVATE : return self . _wrapping_key . decrypt ( ciphertext = encrypted_wrapped_data_key . ciphertext , padding = self . wrapping_algorithm . padding ) serialized_encryption_context = serialize_encryption_context ( encryption_context = encryption_context ) return decrypt ( algorithm = self . wrapping_algorithm . algorithm , key = self . _derived_wrapping_key , encrypted_data = encrypted_wrapped_data_key , associated_data = serialized_encryption_context , )
Decrypts a wrapped encrypted data key .
59,497
def _generate_data_key ( self , algorithm : AlgorithmSuite , encryption_context : Dict [ Text , Text ] ) -> DataKey : data_key = b"" . join ( [ chr ( i ) . encode ( "utf-8" ) for i in range ( 1 , algorithm . data_key_len + 1 ) ] ) return DataKey ( key_provider = self . key_provider , data_key = data_key , encrypted_data_key = self . _encrypted_data_key )
Perform the provider - specific data key generation task .
59,498
def _encrypt_data_key ( self , data_key : DataKey , algorithm : AlgorithmSuite , encryption_context : Dict [ Text , Text ] ) -> NoReturn : raise NotImplementedError ( "CountingMasterKey does not support encrypt_data_key" )
Encrypt a data key and return the ciphertext .
59,499
def validate_header ( header , header_auth , raw_header , data_key ) : _LOGGER . debug ( "Starting header validation" ) try : decrypt ( algorithm = header . algorithm , key = data_key , encrypted_data = EncryptedData ( header_auth . iv , b"" , header_auth . tag ) , associated_data = raw_header , ) except InvalidTag : raise SerializationError ( "Header authorization failed" )
Validates the header using the header authentication data .