idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
59,500 |
def _deserialize_encrypted_data_keys ( stream ) : ( encrypted_data_key_count , ) = unpack_values ( ">H" , stream ) encrypted_data_keys = set ( [ ] ) for _ in range ( encrypted_data_key_count ) : ( key_provider_length , ) = unpack_values ( ">H" , stream ) ( key_provider_identifier , ) = unpack_values ( ">{}s" . format ( key_provider_length ) , stream ) ( key_provider_information_length , ) = unpack_values ( ">H" , stream ) ( key_provider_information , ) = unpack_values ( ">{}s" . format ( key_provider_information_length ) , stream ) ( encrypted_data_key_length , ) = unpack_values ( ">H" , stream ) encrypted_data_key = stream . read ( encrypted_data_key_length ) encrypted_data_keys . add ( EncryptedDataKey ( key_provider = MasterKeyInfo ( provider_id = to_str ( key_provider_identifier ) , key_info = key_provider_information ) , encrypted_data_key = encrypted_data_key , ) ) return encrypted_data_keys
|
Deserialize some encrypted data keys from a stream .
|
59,501 |
def _verified_iv_length ( iv_length , algorithm_suite ) : if iv_length != algorithm_suite . iv_len : raise SerializationError ( "Specified IV length ({length}) does not match algorithm IV length ({algorithm})" . format ( length = iv_length , algorithm = algorithm_suite ) ) return iv_length
|
Verify an IV length for an algorithm suite .
|
59,502 |
def _verified_frame_length ( frame_length , content_type ) : if content_type == ContentType . FRAMED_DATA and frame_length > MAX_FRAME_SIZE : raise SerializationError ( "Specified frame length larger than allowed maximum: {found} > {max}" . format ( found = frame_length , max = MAX_FRAME_SIZE ) ) if content_type == ContentType . NO_FRAMING and frame_length != 0 : raise SerializationError ( "Non-zero frame length found for non-framed message" ) return frame_length
|
Verify a frame length value for a message content type .
|
59,503 |
def deserialize_header ( stream ) : _LOGGER . debug ( "Starting header deserialization" ) tee = io . BytesIO ( ) tee_stream = TeeStream ( stream , tee ) version_id , message_type_id = unpack_values ( ">BB" , tee_stream ) header = dict ( ) header [ "version" ] = _verified_version_from_id ( version_id ) header [ "type" ] = _verified_message_type_from_id ( message_type_id ) algorithm_id , message_id , ser_encryption_context_length = unpack_values ( ">H16sH" , tee_stream ) header [ "algorithm" ] = _verified_algorithm_from_id ( algorithm_id ) header [ "message_id" ] = message_id header [ "encryption_context" ] = deserialize_encryption_context ( tee_stream . read ( ser_encryption_context_length ) ) header [ "encrypted_data_keys" ] = _deserialize_encrypted_data_keys ( tee_stream ) ( content_type_id , ) = unpack_values ( ">B" , tee_stream ) header [ "content_type" ] = _verified_content_type_from_id ( content_type_id ) ( content_aad_length , ) = unpack_values ( ">I" , tee_stream ) header [ "content_aad_length" ] = _verified_content_aad_length ( content_aad_length ) ( iv_length , ) = unpack_values ( ">B" , tee_stream ) header [ "header_iv_length" ] = _verified_iv_length ( iv_length , header [ "algorithm" ] ) ( frame_length , ) = unpack_values ( ">I" , tee_stream ) header [ "frame_length" ] = _verified_frame_length ( frame_length , header [ "content_type" ] ) return MessageHeader ( ** header ) , tee . getvalue ( )
|
Deserializes the header from a source stream
|
59,504 |
def deserialize_header_auth ( stream , algorithm , verifier = None ) : _LOGGER . debug ( "Starting header auth deserialization" ) format_string = ">{iv_len}s{tag_len}s" . format ( iv_len = algorithm . iv_len , tag_len = algorithm . tag_len ) return MessageHeaderAuthentication ( * unpack_values ( format_string , stream , verifier ) )
|
Deserializes a MessageHeaderAuthentication object from a source stream .
|
59,505 |
def deserialize_non_framed_values ( stream , header , verifier = None ) : _LOGGER . debug ( "Starting non-framed body iv/tag deserialization" ) ( data_iv , data_length ) = unpack_values ( ">{}sQ" . format ( header . algorithm . iv_len ) , stream , verifier ) return data_iv , data_length
|
Deserializes the IV and body length from a non - framed stream .
|
59,506 |
def deserialize_tag ( stream , header , verifier = None ) : ( data_tag , ) = unpack_values ( format_string = ">{auth_len}s" . format ( auth_len = header . algorithm . auth_len ) , stream = stream , verifier = verifier ) return data_tag
|
Deserialize the Tag value from a non - framed stream .
|
59,507 |
def deserialize_frame ( stream , header , verifier = None ) : _LOGGER . debug ( "Starting frame deserialization" ) frame_data = { } final_frame = False ( sequence_number , ) = unpack_values ( ">I" , stream , verifier ) if sequence_number == SequenceIdentifier . SEQUENCE_NUMBER_END . value : _LOGGER . debug ( "Deserializing final frame" ) ( sequence_number , ) = unpack_values ( ">I" , stream , verifier ) final_frame = True else : _LOGGER . debug ( "Deserializing frame sequence number %d" , int ( sequence_number ) ) frame_data [ "final_frame" ] = final_frame frame_data [ "sequence_number" ] = sequence_number ( frame_iv , ) = unpack_values ( ">{iv_len}s" . format ( iv_len = header . algorithm . iv_len ) , stream , verifier ) frame_data [ "iv" ] = frame_iv if final_frame is True : ( content_length , ) = unpack_values ( ">I" , stream , verifier ) if content_length >= header . frame_length : raise SerializationError ( "Invalid final frame length: {final} >= {normal}" . format ( final = content_length , normal = header . frame_length ) ) else : content_length = header . frame_length ( frame_content , frame_tag ) = unpack_values ( ">{content_len}s{auth_len}s" . format ( content_len = content_length , auth_len = header . algorithm . auth_len ) , stream , verifier , ) frame_data [ "ciphertext" ] = frame_content frame_data [ "tag" ] = frame_tag return MessageFrameBody ( ** frame_data ) , final_frame
|
Deserializes a frame from a body .
|
59,508 |
def deserialize_footer ( stream , verifier = None ) : _LOGGER . debug ( "Starting footer deserialization" ) signature = b"" if verifier is None : return MessageFooter ( signature = signature ) try : ( sig_len , ) = unpack_values ( ">H" , stream ) ( signature , ) = unpack_values ( ">{sig_len}s" . format ( sig_len = sig_len ) , stream ) except SerializationError : raise SerializationError ( "No signature found in message" ) if verifier : verifier . verify ( signature ) return MessageFooter ( signature = signature )
|
Deserializes a footer .
|
59,509 |
def deserialize_wrapped_key ( wrapping_algorithm , wrapping_key_id , wrapped_encrypted_key ) : if wrapping_key_id == wrapped_encrypted_key . key_provider . key_info : encrypted_wrapped_key = EncryptedData ( iv = None , ciphertext = wrapped_encrypted_key . encrypted_data_key , tag = None ) else : if not wrapped_encrypted_key . key_provider . key_info . startswith ( wrapping_key_id ) : raise SerializationError ( "Master Key mismatch for wrapped data key" ) _key_info = wrapped_encrypted_key . key_provider . key_info [ len ( wrapping_key_id ) : ] try : tag_len , iv_len = struct . unpack ( ">II" , _key_info [ : 8 ] ) except struct . error : raise SerializationError ( "Malformed key info: key info missing data" ) tag_len //= 8 if iv_len != wrapping_algorithm . algorithm . iv_len : raise SerializationError ( "Wrapping AlgorithmSuite mismatch for wrapped data key" ) iv = _key_info [ 8 : ] if len ( iv ) != iv_len : raise SerializationError ( "Malformed key info: incomplete iv" ) ciphertext = wrapped_encrypted_key . encrypted_data_key [ : - 1 * tag_len ] tag = wrapped_encrypted_key . encrypted_data_key [ - 1 * tag_len : ] if not ciphertext or len ( tag ) != tag_len : raise SerializationError ( "Malformed key info: incomplete ciphertext or tag" ) encrypted_wrapped_key = EncryptedData ( iv = iv , ciphertext = ciphertext , tag = tag ) return encrypted_wrapped_key
|
Extracts and deserializes EncryptedData from a Wrapped EncryptedDataKey .
|
59,510 |
def validate_frame_length ( frame_length , algorithm ) : if frame_length < 0 or frame_length % algorithm . encryption_algorithm . block_size != 0 : raise SerializationError ( "Frame size must be a non-negative multiple of the block size of the crypto algorithm: {block_size}" . format ( block_size = algorithm . encryption_algorithm . block_size ) ) if frame_length > aws_encryption_sdk . internal . defaults . MAX_FRAME_SIZE : raise SerializationError ( "Frame size too large: {frame} > {max}" . format ( frame = frame_length , max = aws_encryption_sdk . internal . defaults . MAX_FRAME_SIZE ) )
|
Validates that frame length is within the defined limits and is compatible with the selected algorithm .
|
59,511 |
def get_aad_content_string ( content_type , is_final_frame ) : if content_type == ContentType . NO_FRAMING : aad_content_string = ContentAADString . NON_FRAMED_STRING_ID elif content_type == ContentType . FRAMED_DATA : if is_final_frame : aad_content_string = ContentAADString . FINAL_FRAME_STRING_ID else : aad_content_string = ContentAADString . FRAME_STRING_ID else : raise UnknownIdentityError ( "Unhandled content type" ) return aad_content_string
|
Prepares the appropriate Body AAD Value for a message body .
|
59,512 |
def prepare_data_keys ( primary_master_key , master_keys , algorithm , encryption_context ) : encrypted_data_keys = set ( ) encrypted_data_encryption_key = None data_encryption_key = primary_master_key . generate_data_key ( algorithm , encryption_context ) _LOGGER . debug ( "encryption data generated with master key: %s" , data_encryption_key . key_provider ) for master_key in master_keys : if master_key is primary_master_key : encrypted_data_encryption_key = EncryptedDataKey ( key_provider = data_encryption_key . key_provider , encrypted_data_key = data_encryption_key . encrypted_data_key ) encrypted_data_keys . add ( encrypted_data_encryption_key ) continue encrypted_key = master_key . encrypt_data_key ( data_key = data_encryption_key , algorithm = algorithm , encryption_context = encryption_context ) encrypted_data_keys . add ( encrypted_key ) _LOGGER . debug ( "encryption key encrypted with master key: %s" , master_key . key_provider ) return data_encryption_key , encrypted_data_keys
|
Prepares a DataKey to be used for encrypting message and list of EncryptedDataKey objects to be serialized into header .
|
59,513 |
def prep_stream_data ( data ) : if isinstance ( data , ( six . string_types , six . binary_type ) ) : stream = io . BytesIO ( to_bytes ( data ) ) else : stream = data return InsistentReaderBytesIO ( stream )
|
Take an input and prepare it for use as a stream .
|
59,514 |
def source_data_key_length_check ( source_data_key , algorithm ) : if len ( source_data_key . data_key ) != algorithm . kdf_input_len : raise InvalidDataKeyError ( "Invalid Source Data Key length {actual} for algorithm required: {required}" . format ( actual = len ( source_data_key . data_key ) , required = algorithm . kdf_input_len ) )
|
Validates that the supplied source_data_key s data_key is the correct length for the supplied algorithm s kdf_input_len value .
|
59,515 |
def encrypt ( algorithm , key , plaintext , associated_data , iv ) : encryptor = Encryptor ( algorithm , key , associated_data , iv ) ciphertext = encryptor . update ( plaintext ) + encryptor . finalize ( ) return EncryptedData ( encryptor . iv , ciphertext , encryptor . tag )
|
Encrypts a frame body .
|
59,516 |
def decrypt ( algorithm , key , encrypted_data , associated_data ) : decryptor = Decryptor ( algorithm , key , associated_data , encrypted_data . iv , encrypted_data . tag ) return decryptor . update ( encrypted_data . ciphertext ) + decryptor . finalize ( )
|
Decrypts a frame body .
|
59,517 |
def _master_key_provider ( ) -> KMSMasterKeyProvider : master_key_provider = KMSMasterKeyProvider ( ) master_key_provider . add_master_key_provider ( NullMasterKey ( ) ) master_key_provider . add_master_key_provider ( CountingMasterKey ( ) ) return master_key_provider
|
Build the V0 master key provider .
|
59,518 |
def basic_decrypt ( ) -> Response : APP . log . debug ( "Request:" ) APP . log . debug ( json . dumps ( APP . current_request . to_dict ( ) ) ) APP . log . debug ( "Ciphertext:" ) APP . log . debug ( APP . current_request . raw_body ) try : ciphertext = APP . current_request . raw_body plaintext , _header = aws_encryption_sdk . decrypt ( source = ciphertext , key_provider = _master_key_provider ( ) ) APP . log . debug ( "Plaintext:" ) APP . log . debug ( plaintext ) response = Response ( body = plaintext , headers = { "Content-Type" : "application/octet-stream" } , status_code = 200 ) except Exception as error : response = Response ( body = str ( error ) , status_code = 400 ) APP . log . debug ( "Response:" ) APP . log . debug ( json . dumps ( response . to_dict ( binary_types = [ "application/octet-stream" ] ) ) ) return response
|
Basic decrypt handler for decrypt oracle v0 .
|
59,519 |
def read ( self , b = None ) : data = self . __wrapped__ . read ( b ) self . __tee . write ( data ) return data
|
Reads data from source copying it into tee before returning .
|
59,520 |
def read ( self , b = - 1 ) : remaining_bytes = b data = io . BytesIO ( ) while True : try : chunk = to_bytes ( self . __wrapped__ . read ( remaining_bytes ) ) except ValueError : if self . __wrapped__ . closed : break raise if not chunk : break data . write ( chunk ) remaining_bytes -= len ( chunk ) if remaining_bytes <= 0 : break return data . getvalue ( )
|
Keep reading from source stream until either the source stream is done or the requested number of bytes have been obtained .
|
59,521 |
def _ecc_static_length_signature ( key , algorithm , digest ) : pre_hashed_algorithm = ec . ECDSA ( Prehashed ( algorithm . signing_hash_type ( ) ) ) signature = b"" while len ( signature ) != algorithm . signature_len : _LOGGER . debug ( "Signature length %d is not desired length %d. Recalculating." , len ( signature ) , algorithm . signature_len ) signature = key . sign ( digest , pre_hashed_algorithm ) if len ( signature ) != algorithm . signature_len : _LOGGER . debug ( "Signature length %d is not desired length %d. Negating s." , len ( signature ) , algorithm . signature_len ) r , s = decode_dss_signature ( signature ) s = _ECC_CURVE_PARAMETERS [ algorithm . signing_algorithm_info . name ] . order - s signature = encode_dss_signature ( r , s ) return signature
|
Calculates an elliptic curve signature with a static length using pre - calculated hash .
|
59,522 |
def generate_ecc_signing_key ( algorithm ) : try : verify_interface ( ec . EllipticCurve , algorithm . signing_algorithm_info ) return ec . generate_private_key ( curve = algorithm . signing_algorithm_info ( ) , backend = default_backend ( ) ) except InterfaceNotImplemented : raise NotSupportedError ( "Unsupported signing algorithm info" )
|
Returns an ECC signing key .
|
59,523 |
def derive_data_encryption_key ( source_key , algorithm , message_id ) : key = source_key if algorithm . kdf_type is not None : key = algorithm . kdf_type ( algorithm = algorithm . kdf_hash_type ( ) , length = algorithm . data_key_len , salt = None , info = struct . pack ( ">H16s" , algorithm . algorithm_id , message_id ) , backend = default_backend ( ) , ) . derive ( source_key ) return key
|
Derives the data encryption key using the defined algorithm .
|
59,524 |
def encrypt ( ** kwargs ) : with StreamEncryptor ( ** kwargs ) as encryptor : ciphertext = encryptor . read ( ) return ciphertext , encryptor . header
|
Encrypts and serializes provided plaintext .
|
59,525 |
def decrypt ( ** kwargs ) : with StreamDecryptor ( ** kwargs ) as decryptor : plaintext = decryptor . read ( ) return plaintext , decryptor . header
|
Deserializes and decrypts provided ciphertext .
|
59,526 |
def cycle_file ( key_arn , source_plaintext_filename , botocore_session = None ) : ciphertext_filename = source_plaintext_filename + ".encrypted" cycled_kms_plaintext_filename = source_plaintext_filename + ".kms.decrypted" cycled_static_plaintext_filename = source_plaintext_filename + ".static.decrypted" kms_kwargs = dict ( key_ids = [ key_arn ] ) if botocore_session is not None : kms_kwargs [ "botocore_session" ] = botocore_session kms_master_key_provider = aws_encryption_sdk . KMSMasterKeyProvider ( ** kms_kwargs ) static_key_id = os . urandom ( 8 ) static_master_key_provider = StaticRandomMasterKeyProvider ( ) static_master_key_provider . add_master_key ( static_key_id ) kms_master_key_provider . add_master_key_provider ( static_master_key_provider ) with open ( source_plaintext_filename , "rb" ) as plaintext , open ( ciphertext_filename , "wb" ) as ciphertext : with aws_encryption_sdk . stream ( source = plaintext , mode = "e" , key_provider = kms_master_key_provider ) as encryptor : for chunk in encryptor : ciphertext . write ( chunk ) with open ( ciphertext_filename , "rb" ) as ciphertext , open ( cycled_kms_plaintext_filename , "wb" ) as plaintext : with aws_encryption_sdk . stream ( source = ciphertext , mode = "d" , key_provider = aws_encryption_sdk . KMSMasterKeyProvider ( ** kms_kwargs ) ) as kms_decryptor : for chunk in kms_decryptor : plaintext . write ( chunk ) with open ( ciphertext_filename , "rb" ) as ciphertext , open ( cycled_static_plaintext_filename , "wb" ) as plaintext : with aws_encryption_sdk . stream ( source = ciphertext , mode = "d" , key_provider = static_master_key_provider ) as static_decryptor : for chunk in static_decryptor : plaintext . write ( chunk ) assert filecmp . cmp ( source_plaintext_filename , cycled_kms_plaintext_filename ) assert filecmp . cmp ( source_plaintext_filename , cycled_static_plaintext_filename ) assert all ( pair in kms_decryptor . header . encryption_context . items ( ) for pair in encryptor . header . encryption_context . items ( ) ) assert all ( pair in static_decryptor . header . encryption_context . items ( ) for pair in encryptor . header . encryption_context . items ( ) ) return ciphertext_filename , cycled_kms_plaintext_filename , cycled_static_plaintext_filename
|
Encrypts and then decrypts a file using a KMS master key provider and a custom static master key provider . Both master key providers are used to encrypt the plaintext file so either one alone can decrypt it .
|
59,527 |
def _get_raw_key ( self , key_id ) : try : static_key = self . _static_keys [ key_id ] except KeyError : private_key = rsa . generate_private_key ( public_exponent = 65537 , key_size = 4096 , backend = default_backend ( ) ) static_key = private_key . private_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PrivateFormat . PKCS8 , encryption_algorithm = serialization . NoEncryption ( ) , ) self . _static_keys [ key_id ] = static_key return WrappingKey ( wrapping_algorithm = WrappingAlgorithm . RSA_OAEP_SHA1_MGF1 , wrapping_key = static_key , wrapping_key_type = EncryptionKeyType . PRIVATE , )
|
Retrieves a static randomly generated RSA key for the specified key id .
|
59,528 |
def month_boundaries ( dt = None ) : dt = dt or date . today ( ) wkday , ndays = calendar . monthrange ( dt . year , dt . month ) start = datetime ( dt . year , dt . month , 1 ) return ( start , start + timedelta ( ndays - 1 ) )
|
Return a 2 - tuple containing the datetime instances for the first and last dates of the current month or using dt as a reference .
|
59,529 |
def css_class_cycler ( ) : FMT = 'evt-{0}-{1}' . format return defaultdict ( default_css_class_cycler , ( ( e . abbr , itertools . cycle ( ( FMT ( e . abbr , 'even' ) , FMT ( e . abbr , 'odd' ) ) ) ) for e in EventType . objects . all ( ) ) )
|
Return a dictionary keyed by EventType abbreviations whose values are an iterable or cycle of CSS class names .
|
59,530 |
def create_event ( title , event_type , description = '' , start_time = None , end_time = None , note = None , ** rrule_params ) : if isinstance ( event_type , tuple ) : event_type , created = EventType . objects . get_or_create ( abbr = event_type [ 0 ] , label = event_type [ 1 ] ) event = Event . objects . create ( title = title , description = description , event_type = event_type ) if note is not None : event . notes . create ( note = note ) start_time = start_time or datetime . now ( ) . replace ( minute = 0 , second = 0 , microsecond = 0 ) end_time = end_time or ( start_time + swingtime_settings . DEFAULT_OCCURRENCE_DURATION ) event . add_occurrences ( start_time , end_time , ** rrule_params ) return event
|
Convenience function to create an Event optionally create an EventType and associated Occurrence s . Occurrence creation rules match those for Event . add_occurrences .
|
59,531 |
def add_occurrences ( self , start_time , end_time , ** rrule_params ) : count = rrule_params . get ( 'count' ) until = rrule_params . get ( 'until' ) if not ( count or until ) : self . occurrence_set . create ( start_time = start_time , end_time = end_time ) else : rrule_params . setdefault ( 'freq' , rrule . DAILY ) delta = end_time - start_time occurrences = [ ] for ev in rrule . rrule ( dtstart = start_time , ** rrule_params ) : occurrences . append ( Occurrence ( start_time = ev , end_time = ev + delta , event = self ) ) self . occurrence_set . bulk_create ( occurrences )
|
Add one or more occurences to the event using a comparable API to dateutil . rrule .
|
59,532 |
def daily_occurrences ( self , dt = None ) : return Occurrence . objects . daily_occurrences ( dt = dt , event = self )
|
Convenience method wrapping Occurrence . objects . daily_occurrences .
|
59,533 |
def daily_occurrences ( self , dt = None , event = None ) : dt = dt or datetime . now ( ) start = datetime ( dt . year , dt . month , dt . day ) end = start . replace ( hour = 23 , minute = 59 , second = 59 ) qs = self . filter ( models . Q ( start_time__gte = start , start_time__lte = end , ) | models . Q ( end_time__gte = start , end_time__lte = end , ) | models . Q ( start_time__lt = start , end_time__gt = end ) ) return qs . filter ( event = event ) if event else qs
|
Returns a queryset of for instances that have any overlap with a particular day .
|
59,534 |
def event_listing ( request , template = 'swingtime/event_list.html' , events = None , ** extra_context ) : events = events or Event . objects . all ( ) extra_context [ 'events' ] = events return render ( request , template , extra_context )
|
View all events .
|
59,535 |
def event_view ( request , pk , template = 'swingtime/event_detail.html' , event_form_class = forms . EventForm , recurrence_form_class = forms . MultipleOccurrenceForm ) : event = get_object_or_404 ( Event , pk = pk ) event_form = recurrence_form = None if request . method == 'POST' : if '_update' in request . POST : event_form = event_form_class ( request . POST , instance = event ) if event_form . is_valid ( ) : event_form . save ( event ) return http . HttpResponseRedirect ( request . path ) elif '_add' in request . POST : recurrence_form = recurrence_form_class ( request . POST ) if recurrence_form . is_valid ( ) : recurrence_form . save ( event ) return http . HttpResponseRedirect ( request . path ) else : return http . HttpResponseBadRequest ( 'Bad Request' ) data = { 'event' : event , 'event_form' : event_form or event_form_class ( instance = event ) , 'recurrence_form' : recurrence_form or recurrence_form_class ( initial = { 'dtstart' : datetime . now ( ) } ) } return render ( request , template , data )
|
View an Event instance and optionally update either the event or its occurrences .
|
59,536 |
def occurrence_view ( request , event_pk , pk , template = 'swingtime/occurrence_detail.html' , form_class = forms . SingleOccurrenceForm ) : occurrence = get_object_or_404 ( Occurrence , pk = pk , event__pk = event_pk ) if request . method == 'POST' : form = form_class ( request . POST , instance = occurrence ) if form . is_valid ( ) : form . save ( ) return http . HttpResponseRedirect ( request . path ) else : form = form_class ( instance = occurrence ) return render ( request , template , { 'occurrence' : occurrence , 'form' : form } )
|
View a specific occurrence and optionally handle any updates .
|
59,537 |
def add_event ( request , template = 'swingtime/add_event.html' , event_form_class = forms . EventForm , recurrence_form_class = forms . MultipleOccurrenceForm ) : dtstart = None if request . method == 'POST' : event_form = event_form_class ( request . POST ) recurrence_form = recurrence_form_class ( request . POST ) if event_form . is_valid ( ) and recurrence_form . is_valid ( ) : event = event_form . save ( ) recurrence_form . save ( event ) return http . HttpResponseRedirect ( event . get_absolute_url ( ) ) else : if 'dtstart' in request . GET : try : dtstart = parser . parse ( request . GET [ 'dtstart' ] ) except ( TypeError , ValueError ) as exc : logging . warning ( exc ) dtstart = dtstart or datetime . now ( ) event_form = event_form_class ( ) recurrence_form = recurrence_form_class ( initial = { 'dtstart' : dtstart } ) return render ( request , template , { 'dtstart' : dtstart , 'event_form' : event_form , 'recurrence_form' : recurrence_form } )
|
Add a new Event instance and 1 or more associated Occurrence s .
|
59,538 |
def _datetime_view ( request , template , dt , timeslot_factory = None , items = None , params = None ) : timeslot_factory = timeslot_factory or utils . create_timeslot_table params = params or { } return render ( request , template , { 'day' : dt , 'next_day' : dt + timedelta ( days = + 1 ) , 'prev_day' : dt + timedelta ( days = - 1 ) , 'timeslots' : timeslot_factory ( dt , items , ** params ) } )
|
Build a time slot grid representation for the given datetime dt . See utils . create_timeslot_table documentation for items and params .
|
59,539 |
def month_view ( request , year , month , template = 'swingtime/monthly_view.html' , queryset = None ) : year , month = int ( year ) , int ( month ) cal = calendar . monthcalendar ( year , month ) dtstart = datetime ( year , month , 1 ) last_day = max ( cal [ - 1 ] ) dtend = datetime ( year , month , last_day ) queryset = queryset . _clone ( ) if queryset is not None else Occurrence . objects . select_related ( ) occurrences = queryset . filter ( start_time__year = year , start_time__month = month ) def start_day ( o ) : return o . start_time . day by_day = dict ( [ ( dt , list ( o ) ) for dt , o in itertools . groupby ( occurrences , start_day ) ] ) data = { 'today' : datetime . now ( ) , 'calendar' : [ [ ( d , by_day . get ( d , [ ] ) ) for d in row ] for row in cal ] , 'this_month' : dtstart , 'next_month' : dtstart + timedelta ( days = + last_day ) , 'last_month' : dtstart + timedelta ( days = - 1 ) , } return render ( request , template , data )
|
Render a tradional calendar grid view with temporal navigation variables .
|
59,540 |
def cast ( self , value , custom_formatters = None , strict = True ) : if value is None : if not self . nullable : raise InvalidSchemaValue ( "Null value for non-nullable schema" , value , self . type ) return self . default cast_mapping = self . get_cast_mapping ( custom_formatters = custom_formatters , strict = strict ) if self . type is not SchemaType . STRING and value == '' : return None cast_callable = cast_mapping [ self . type ] try : return cast_callable ( value ) except ValueError : raise InvalidSchemaValue ( "Failed to cast value {value} to type {type}" , value , self . type )
|
Cast value to schema type
|
59,541 |
def unmarshal ( self , value , custom_formatters = None , strict = True ) : if self . deprecated : warnings . warn ( "The schema is deprecated" , DeprecationWarning ) casted = self . cast ( value , custom_formatters = custom_formatters , strict = strict ) if casted is None and not self . required : return None if self . enum and casted not in self . enum : raise InvalidSchemaValue ( "Value {value} not in enum choices: {type}" , value , self . enum ) return casted
|
Unmarshal parameter from the value .
|
59,542 |
def get_operation_pattern ( server_url , request_url_pattern ) : if server_url [ - 1 ] == "/" : server_url = server_url [ : - 1 ] if is_absolute ( server_url ) : return request_url_pattern . replace ( server_url , "" , 1 ) return path_qs ( request_url_pattern ) . replace ( server_url , "" , 1 )
|
Return an updated request URL pattern with the server URL removed .
|
59,543 |
def check ( definition , data , * args , ** kwargs ) : checker = checker_factory ( definition ) return checker ( data , * args , ** kwargs )
|
Checks if the input follows the definition
|
59,544 |
def check ( self , data ) : if isinstance ( data , Iterable ) : data = "" . join ( str ( x ) for x in data ) try : data = str ( data ) except UnicodeDecodeError : return False return bool ( data and self . __regexp . match ( data ) )
|
returns True if any match any regexp
|
59,545 |
def _build_item_closure ( itemset , productionset ) : if not isinstance ( itemset , LR0ItemSet ) : raise TypeError import copy resultset = copy . copy ( itemset ) changed = True while changed : changed = False for currentitem in resultset . itemlist : nextsymbol = currentitem . next_symbol ( ) if nextsymbol is None : break for rule in productionset . productions : newitem = LR0Item ( rule ) if rule . leftside [ 0 ] == nextsymbol and newitem not in resultset . itemlist : resultset . append_item ( newitem ) changed = True return resultset
|
Build input itemset closure
|
59,546 |
def item_set_goto ( itemset , inputsymbol , productionset ) : resultset = LR0ItemSet ( ) for item in itemset . itemlist : if item . next_symbol ( ) == inputsymbol : newitem = LR0Item ( item . rule , item . position + 1 ) resultset . append_item ( newitem ) return _build_item_closure ( resultset , productionset )
|
returns an itemset locate inside itemset every element with inputsymbol following cursor for every located item append its itemclosure
|
59,547 |
def _slr_build_parser_table ( productionset ) : result = ParserTable ( ) statesset = build_states_sets ( productionset ) for itemindex , itemset in enumerate ( statesset ) : LOG . debug ( "_slr_build_parser_table: Evaluating itemset:" + str ( itemset ) ) for symbol in productionset . getSymbols ( ) + [ EndSymbol ( ) ] : numberoptions = 0 for lritem in itemset . itemlist : if isinstance ( symbol , TerminalSymbol ) and lritem . next_symbol ( ) == symbol and itemset . has_transition ( symbol ) : destinationstate = statesset . index ( itemset . get_transition ( symbol ) ) result . append ( itemindex , symbol , "Shift" , destinationstate ) numberoptions += 1 if isinstance ( symbol , NonTerminalSymbol ) and lritem . next_symbol ( ) == symbol and itemset . has_transition ( symbol ) : destinationstate = statesset . index ( itemset . get_transition ( symbol ) ) result . append_goto ( itemindex , symbol , destinationstate ) if lritem . previous_symbol ( ) == symbol and lritem . is_last_position ( ) and symbol != Extended_S : for x in productionset . next_lookup ( symbol ) : if isinstance ( x , Grammar ) : result . append ( itemindex , TerminalSymbol ( x ) , "Reduce" , None , lritem . rule ) elif isinstance ( x , Symbol ) : result . append ( itemindex , x , "Reduce" , None , lritem . rule ) else : raise TypeError ( x ) numberoptions += 1 if symbol == EndSymbol ( ) and lritem . previous_symbol ( ) == productionset . initialsymbol and lritem . next_symbol ( ) == EndSymbol ( ) : result . append ( itemindex , symbol , "Accept" , None ) numberoptions += 1 if not numberoptions : LOG . info ( "No rule found to generate a new parsertable entry " ) LOG . debug ( "symbol: " + str ( symbol ) ) LOG . debug ( "itemset: " + str ( itemset ) ) elif numberoptions > 1 : raise Exception ( "LR Conflict %s" % symbol ) return result
|
SLR method to build parser table
|
59,548 |
def append ( self , state , symbol , action , destinationstate , production = None ) : if action not in ( None , "Accept" , "Shift" , "Reduce" ) : raise TypeError rule = { "action" : action , "dest" : destinationstate } if action == "Reduce" : if rule is None : raise TypeError ( "Expected production parameter" ) rule [ "rule" ] = production while isinstance ( symbol , TerminalSymbol ) and isinstance ( symbol . gd , Iterable ) and len ( symbol . gd ) == 1 and isinstance ( list ( symbol . gd ) [ 0 ] , Grammar ) : symbol = TerminalSymbol ( list ( symbol . gd ) [ 0 ] ) if not isinstance ( symbol , Symbol ) : raise TypeError ( "Expected symbol, got %s" % symbol ) self [ state ] [ symbol ] = rule
|
Appends a new rule
|
59,549 |
def insert ( self , state , token ) : if token == EndSymbol ( ) : return self [ state ] [ EndSymbol ( ) ] from pydsl . check import check symbol_list = [ x for x in self [ state ] if isinstance ( x , TerminalSymbol ) and check ( x . gd , [ token ] ) ] if not symbol_list : return { "action" : "Fail" } if len ( symbol_list ) > 1 : raise Exception ( "Multiple symbols matches input" ) symbol = symbol_list [ 0 ] return self [ state ] [ symbol ]
|
change internal state return action
|
59,550 |
def append_item ( self , item ) : if not isinstance ( item , LR0Item ) : raise TypeError self . itemlist . append ( item )
|
Append new item to set
|
59,551 |
def append_transition ( self , symbol , targetset ) : if symbol in self . transitions : return self . transitions [ symbol ] = targetset
|
Appends a transition
|
59,552 |
def __parse ( self , tokenlist ) : tokenlist = [ x for x in tokenlist ] if not isinstance ( tokenlist , list ) : raise TypeError ( "Expected list, got %s" % tokenlist . __class__ . __name__ ) LOG . debug ( "get_trees: checking list: " + str ( tokenlist ) ) stack = [ ( 0 , Extended_S ) ] while True : state = stack [ - 1 ] [ 0 ] if len ( tokenlist ) : token = tokenlist [ 0 ] else : token = EndSymbol ( ) newdic = self . __parsertable . insert ( state , token ) action = newdic [ "action" ] if action == "Fail" : return False elif action == "Accept" : return True if action == "Reduce" : reductionrule = newdic [ "rule" ] for rsymbol in reversed ( reductionrule . rightside ) : state , symbol = stack . pop ( ) state = stack [ - 1 ] [ 0 ] state = self . __parsertable . goto ( state , reductionrule . leftside [ 0 ] ) stack . append ( ( state , reductionrule . leftside [ 0 ] ) ) elif action == "Shift" : stack . append ( ( newdic [ 'dest' ] , tokenlist . pop ( 0 ) ) ) else : raise ValueError ( "Unknown action" ) return False
|
see parent docstring
|
59,553 |
def graph_from_alphabet ( alphabet , base ) : if not isinstance ( alphabet , Choice ) : raise TypeError ( alphabet . __class__ . __name__ ) if not isinstance ( base , Choice ) : raise TypeError ( base . __class__ . __name__ ) import networkx result = networkx . DiGraph ( ) current_alphabet = alphabet pending_stack = set ( current_alphabet ) while pending_stack : current_alphabet = pending_stack . pop ( ) if current_alphabet == base : continue if current_alphabet in base : result . add_edge ( current_alphabet , base ) elif isinstance ( current_alphabet , Choice ) : for element in current_alphabet : if element in base : result . add_edge ( current_alphabet , base ) else : result . add_edge ( current_alphabet , element ) pending_stack . add ( element ) elif current_alphabet . alphabet : result . add_edge ( current_alphabet , current_alphabet . alphabet ) pending_stack . add ( current_alphabet . alphabet ) return result
|
Creates a graph that connects the base with the target through alphabets If every target is connected to any inputs create the independent paths
|
59,554 |
def is_subset ( a , b ) : return b . left <= a . left and b . right > a . right or b . left < a . left and b . right >= a . right
|
Excluding same size
|
59,555 |
def digraph_walker_backwards ( graph , element , call_back ) : call_back ( graph , element ) for predecessor in graph . predecessors ( element ) : call_back ( graph , predecessor ) for predecessor in graph . predecessors ( element ) : digraph_walker_backwards ( graph , predecessor , call_back )
|
Visits every element guaranteeing that the previous elements have been visited before
|
59,556 |
def first_lookup ( self , symbol , size = 1 ) : if isinstance ( symbol , ( TerminalSymbol , NullSymbol ) ) : return [ symbol . gd ] result = [ ] for production in self . productions : if production . leftside [ 0 ] != symbol : continue for right_symbol in production . rightside : if right_symbol == symbol : break current_symbol_first = self . first_lookup ( right_symbol , size ) import collections from pydsl . grammar . definition import String if isinstance ( current_symbol_first , collections . Iterable ) and not isinstance ( current_symbol_first , String ) : result += current_symbol_first else : result . append ( current_symbol_first ) if isinstance ( current_symbol_first , String ) or not isinstance ( current_symbol_first , collections . Iterable ) or ( NullSymbol not in current_symbol_first ) : break if not result : raise KeyError ( "Symbol doesn't exist in this grammar" ) return Choice ( result )
|
Returns a Grammar Definition with the first n terminal symbols produced by the input symbol
|
59,557 |
def next_lookup ( self , symbol ) : result = [ ] if symbol == self . initialsymbol : result . append ( EndSymbol ( ) ) for production in self . productions : if symbol in production . rightside : nextindex = production . rightside . index ( symbol ) + 1 while nextindex < len ( production . rightside ) : nextsymbol = production . rightside [ nextindex ] firstlist = self . first_lookup ( nextsymbol ) cleanfirstlist = Choice ( [ x for x in firstlist if x != NullSymbol ( ) ] ) result . append ( cleanfirstlist ) if NullSymbol ( ) not in firstlist : break else : result += self . next_lookup ( production . leftside [ 0 ] ) return result
|
Returns the next TerminalSymbols produced by the input symbol within this grammar definition
|
59,558 |
def main_production ( self ) : for rule in self . productions : if rule . leftside [ 0 ] == self . _initialsymbol : return rule raise IndexError
|
Returns main rule
|
59,559 |
def getSymbols ( self ) : symbollist = [ ] for rule in self . productions : for symbol in rule . leftside + rule . rightside : if symbol not in symbollist : symbollist . append ( symbol ) symbollist += self . terminal_symbols return symbollist
|
Returns every symbol
|
59,560 |
def extract_alphabet ( alphabet , inputdata , fixed_start = False ) : if not inputdata : return [ ] base_alphabet = alphabet . alphabet lexer = lexer_factory ( alphabet , base_alphabet ) totallen = len ( inputdata ) maxl = totallen minl = 1 if fixed_start : max_start = 1 else : max_start = totallen result = [ ] for i in range ( max_start ) : for j in range ( i + minl , min ( i + maxl , totallen ) + 1 ) : try : lexed = lexer ( inputdata [ i : j ] ) if lexed and len ( lexed ) == 1 : result . append ( ( i , j , inputdata [ i : j ] , lexed [ 0 ] . gd ) ) elif lexed : raise Exception except : continue result = filter_subsets ( result ) return [ PositionToken ( content , gd , left , right ) for ( left , right , content , gd ) in result ]
|
Receives a sequence and an alphabet returns a list of PositionTokens with all of the parts of the sequence that are a subset of the alphabet
|
59,561 |
def extract ( grammar , inputdata , fixed_start = False , return_first = False ) : if not inputdata : return [ ] checker = checker_factory ( grammar ) totallen = len ( inputdata ) from pydsl . grammar . PEG import Choice try : maxl = grammar . maxsize or totallen except NotImplementedError : maxl = totallen try : minl = 1 except NotImplementedError : minl = 1 if fixed_start : max_start = 1 else : max_start = totallen result = [ ] for i in range ( max_start ) : for j in range ( i + minl , min ( i + maxl , totallen ) + 1 ) : slice = inputdata [ i : j ] check = checker . check ( slice ) if check : this_pt = PositionToken ( slice , grammar , i , j ) if return_first : return this_pt result . append ( this_pt ) return result
|
Receives a sequence and a grammar returns a list of PositionTokens with all of the parts of the sequence that are recognized by the grammar
|
59,562 |
def append_position_to_token_list ( token_list ) : return [ PositionToken ( value . content , value . gd , index , index + 1 ) for ( index , value ) in enumerate ( token_list ) ]
|
Converts a list of Token into a list of Token asuming size == 1
|
59,563 |
def load_python_file ( moduleobject ) : if isinstance ( moduleobject , str ) : moduleobject = load_module ( moduleobject ) if not hasattr ( moduleobject , "iclass" ) : raise KeyError ( "Element" + str ( moduleobject ) ) iclass = getattr ( moduleobject , "iclass" ) mylist = getattr ( moduleobject , "__all__" , None ) or list ( filter ( lambda x : x [ : 1 ] != "_" , ( dir ( moduleobject ) ) ) ) mylist . remove ( 'iclass' ) resultdic = { } for x in mylist : resultdic [ x ] = getattr ( moduleobject , x ) if iclass == "SymbolGrammar" : from pydsl . grammar . BNF import BNFGrammar return BNFGrammar ( ** resultdic ) elif iclass == "PLY" : from pydsl . grammar . definition import PLYGrammar return PLYGrammar ( moduleobject ) elif iclass in [ "PythonGrammar" ] : from pydsl . grammar . definition import PythonGrammar return PythonGrammar ( resultdic ) elif iclass == "PythonTranslator" : return resultdic elif iclass == "parsley" : from pydsl . grammar . parsley import ParsleyGrammar return ParsleyGrammar ( ** resultdic ) elif iclass == "pyparsing" : return resultdic [ 'root_symbol' ] else : raise ValueError ( str ( moduleobject ) )
|
Try to create an indexable instance from a module
|
59,564 |
def load_bnf_file ( filepath , repository = None ) : linelist = [ ] with open ( filepath , 'r' ) as mlfile : for line in mlfile : linelist . append ( line ) return strlist_to_production_set ( linelist , repository )
|
Converts a bnf file into a BNFGrammar instance
|
59,565 |
def load_re_from_file ( filepath ) : regexp = None with open ( filepath , 'r' ) as mlfile : flagstr = "" for line in mlfile : cleanline = re . sub ( "//.*$" , "" , line ) if re . search ( "^\s*$" , cleanline ) : continue if re . search ( "^#.*$" , cleanline ) : flagstr = cleanline [ 1 : ] continue if regexp is not None : raise Exception ( "Regular expression file format error" ) else : regexp = cleanline . rstrip ( '\n' ) flags = 0 if "i" in flagstr : flags |= re . I from pydsl . grammar . definition import RegularExpression return RegularExpression ( regexp , flags )
|
Converts a re file to Regular Grammar instance
|
59,566 |
def url_for ( context , __route_name , ** parts ) : app = context [ 'app' ] query = None if 'query_' in parts : query = parts . pop ( 'query_' ) for key in parts : val = parts [ key ] if isinstance ( val , str ) : val = str ( val ) elif type ( val ) is int : val = str ( val ) else : raise TypeError ( "argument value should be str or int, " "got {} -> [{}] {!r}" . format ( key , type ( val ) , val ) ) parts [ key ] = val url = app . router [ __route_name ] . url_for ( ** parts ) if query : url = url . with_query ( query ) return url
|
Filter for generating urls .
|
59,567 |
def static_url ( context , static_file_path ) : app = context [ 'app' ] try : static_url = app [ 'static_root_url' ] except KeyError : raise RuntimeError ( "app does not define a static root url " "'static_root_url', you need to set the url root " "with app['static_root_url'] = '<static root>'." ) from None return '{}/{}' . format ( static_url . rstrip ( '/' ) , static_file_path . lstrip ( '/' ) )
|
Filter for generating urls for static files .
|
59,568 |
def init_gl ( self ) : "allocate OpenGL resources" self . vr_system = openvr . init ( openvr . VRApplication_Scene ) w , h = self . vr_system . getRecommendedRenderTargetSize ( ) self . left_fb = OpenVrFramebuffer ( w , h , multisample = self . multisample ) self . right_fb = OpenVrFramebuffer ( w , h , multisample = self . multisample ) self . compositor = openvr . VRCompositor ( ) if self . compositor is None : raise Exception ( "Unable to create compositor" ) self . left_fb . init_gl ( ) self . right_fb . init_gl ( ) zNear = 0.2 zFar = 500.0 self . projection_left = numpy . asarray ( matrixForOpenVrMatrix ( self . vr_system . getProjectionMatrix ( openvr . Eye_Left , zNear , zFar ) ) ) self . projection_right = numpy . asarray ( matrixForOpenVrMatrix ( self . vr_system . getProjectionMatrix ( openvr . Eye_Right , zNear , zFar ) ) ) self . view_left = matrixForOpenVrMatrix ( self . vr_system . getEyeToHeadTransform ( openvr . Eye_Left ) ) . I self . view_right = matrixForOpenVrMatrix ( self . vr_system . getEyeToHeadTransform ( openvr . Eye_Right ) ) . I for actor in self : actor . init_gl ( )
|
allocate OpenGL resources
|
59,569 |
def display ( self ) : "Renders the scene once every refresh" self . compositor . waitGetPoses ( self . poses , openvr . k_unMaxTrackedDeviceCount , None , 0 ) hmd_pose0 = self . poses [ openvr . k_unTrackedDeviceIndex_Hmd ] if not hmd_pose0 . bPoseIsValid : return if True : glClearColor ( 0.8 , 0.4 , 0.4 , 0 ) glClear ( GL_COLOR_BUFFER_BIT ) glFlush ( ) glBindFramebuffer ( GL_FRAMEBUFFER , self . fb ) glClearColor ( 0.8 , 0.4 , 0.4 , 0 ) glClear ( GL_COLOR_BUFFER_BIT ) glBindFramebuffer ( GL_FRAMEBUFFER , 0 ) self . compositor . submit ( openvr . Eye_Left , self . texture ) self . compositor . submit ( openvr . Eye_Right , self . texture ) glBindFramebuffer ( GL_FRAMEBUFFER , 0 )
|
Renders the scene once every refresh
|
59,570 |
def key_press ( self , key , x , y ) : "Close the application when the player presses ESCAPE" if ord ( key ) == 27 : if bool ( glutLeaveMainLoop ) : glutLeaveMainLoop ( ) else : raise Exception ( "Application quit" )
|
Close the application when the player presses ESCAPE
|
59,571 |
def key_callback ( self , window , key , scancode , action , mods ) : if key == glfw . KEY_ESCAPE and action == glfw . PRESS : glfw . SetWindowShouldClose ( self . window , True )
|
press ESCAPE to quite the application
|
59,572 |
def run_loop ( self ) : "keep rendering until the user says quit" self . running = True event = SDL_Event ( ) try : while self . running : while SDL_PollEvent ( ctypes . byref ( event ) ) != 0 : f = self . _sdl_event_handlers . get ( event . type ) if f is not None : f ( event ) self . render_scene ( ) except SdlAppQuit as e : pass
|
keep rendering until the user says quit
|
59,573 |
def scale ( self , x , y = None , z = None ) : "Uniform scale, if only sx argument is specified" if y is None : y = x if z is None : z = x m = self for col in range ( 4 ) : m [ 0 , col ] *= x m [ 1 , col ] *= y m [ 2 , col ] *= z return self
|
Uniform scale if only sx argument is specified
|
59,574 |
def _check_devices ( self ) : "Enumerate OpenVR tracked devices and check whether any need to be initialized" for i in range ( 1 , len ( self . poses ) ) : pose = self . poses [ i ] if not pose . bDeviceIsConnected : continue if not pose . bPoseIsValid : continue if self . show_controllers_only : device_class = openvr . VRSystem ( ) . getTrackedDeviceClass ( i ) if not device_class == openvr . TrackedDeviceClass_Controller : continue model_name = openvr . VRSystem ( ) . getStringTrackedDeviceProperty ( i , openvr . Prop_RenderModelName_String ) if model_name not in self . meshes : self . meshes [ model_name ] = TrackedDeviceMesh ( model_name )
|
Enumerate OpenVR tracked devices and check whether any need to be initialized
|
59,575 |
def getGenericInterface ( interfaceVersion ) : error = EVRInitError ( ) result = _openvr . VR_GetGenericInterface ( interfaceVersion , byref ( error ) ) _checkInitError ( error . value ) return result
|
Returns the interface of the specified version . This method must be called after VR_Init . The pointer returned is valid until VR_Shutdown is called .
|
59,576 |
def getRecommendedRenderTargetSize ( self ) : fn = self . function_table . getRecommendedRenderTargetSize pnWidth = c_uint32 ( ) pnHeight = c_uint32 ( ) fn ( byref ( pnWidth ) , byref ( pnHeight ) ) return pnWidth . value , pnHeight . value
|
Suggested size for the intermediate render target that the distortion pulls from .
|
59,577 |
def getProjectionMatrix ( self , eEye , fNearZ , fFarZ ) : fn = self . function_table . getProjectionMatrix result = fn ( eEye , fNearZ , fFarZ ) return result
|
The projection matrix for the specified eye
|
59,578 |
def getProjectionRaw ( self , eEye ) : fn = self . function_table . getProjectionRaw pfLeft = c_float ( ) pfRight = c_float ( ) pfTop = c_float ( ) pfBottom = c_float ( ) fn ( eEye , byref ( pfLeft ) , byref ( pfRight ) , byref ( pfTop ) , byref ( pfBottom ) ) return pfLeft . value , pfRight . value , pfTop . value , pfBottom . value
|
The components necessary to build your own projection matrix in case your application is doing something fancy like infinite Z
|
59,579 |
def computeDistortion ( self , eEye , fU , fV ) : fn = self . function_table . computeDistortion pDistortionCoordinates = DistortionCoordinates_t ( ) result = fn ( eEye , fU , fV , byref ( pDistortionCoordinates ) ) return result , pDistortionCoordinates
|
Gets the result of the distortion function for the specified eye and input UVs . UVs go from 0 0 in the upper left of that eye s viewport and 1 1 in the lower right of that eye s viewport . Returns true for success . Otherwise returns false and distortion coordinates are not suitable .
|
59,580 |
def getTimeSinceLastVsync ( self ) : fn = self . function_table . getTimeSinceLastVsync pfSecondsSinceLastVsync = c_float ( ) pulFrameCounter = c_uint64 ( ) result = fn ( byref ( pfSecondsSinceLastVsync ) , byref ( pulFrameCounter ) ) return result , pfSecondsSinceLastVsync . value , pulFrameCounter . value
|
Returns the number of elapsed seconds since the last recorded vsync event . This will come from a vsync timer event in the timer if possible or from the application - reported time if that is not available . If no vsync times are available the function will return zero for vsync time and frame counter and return false from the method .
|
59,581 |
def getTrackedDeviceActivityLevel ( self , unDeviceId ) : fn = self . function_table . getTrackedDeviceActivityLevel result = fn ( unDeviceId ) return result
|
Returns the level of activity on the device .
|
59,582 |
def applyTransform ( self ) : fn = self . function_table . applyTransform pOutputPose = TrackedDevicePose_t ( ) pTrackedDevicePose = TrackedDevicePose_t ( ) pTransform = HmdMatrix34_t ( ) fn ( byref ( pOutputPose ) , byref ( pTrackedDevicePose ) , byref ( pTransform ) ) return pOutputPose , pTrackedDevicePose , pTransform
|
Convenience utility to apply the specified transform to the specified pose . This properly transforms all pose components including velocity and angular velocity
|
59,583 |
def getTrackedDeviceIndexForControllerRole ( self , unDeviceType ) : fn = self . function_table . getTrackedDeviceIndexForControllerRole result = fn ( unDeviceType ) return result
|
Returns the device index associated with a specific role for example the left hand or the right hand . This function is deprecated in favor of the new IVRInput system .
|
59,584 |
def getControllerRoleForTrackedDeviceIndex ( self , unDeviceIndex ) : fn = self . function_table . getControllerRoleForTrackedDeviceIndex result = fn ( unDeviceIndex ) return result
|
Returns the controller type associated with a device index . This function is deprecated in favor of the new IVRInput system .
|
59,585 |
def isTrackedDeviceConnected ( self , unDeviceIndex ) : fn = self . function_table . isTrackedDeviceConnected result = fn ( unDeviceIndex ) return result
|
Returns true if there is a device connected in this slot .
|
59,586 |
def getBoolTrackedDeviceProperty ( self , unDeviceIndex , prop ) : fn = self . function_table . getBoolTrackedDeviceProperty pError = ETrackedPropertyError ( ) result = fn ( unDeviceIndex , prop , byref ( pError ) ) return result , pError
|
Returns a bool property . If the device index is not valid or the property is not a bool type this function will return false .
|
59,587 |
def getArrayTrackedDeviceProperty ( self , unDeviceIndex , prop , propType , pBuffer , unBufferSize ) : fn = self . function_table . getArrayTrackedDeviceProperty pError = ETrackedPropertyError ( ) result = fn ( unDeviceIndex , prop , propType , pBuffer , unBufferSize , byref ( pError ) ) return result , pError
|
Returns an array of one type of property . If the device index is not valid or the property is not a single value or an array of the specified type this function will return 0 . Otherwise it returns the number of bytes necessary to hold the array of properties . If unBufferSize is greater than the returned size and pBuffer is non - NULL pBuffer is filled with the contents of array of properties .
|
59,588 |
def getStringTrackedDeviceProperty ( self , unDeviceIndex , prop ) : fn = self . function_table . getStringTrackedDeviceProperty pError = ETrackedPropertyError ( ) unRequiredBufferLen = fn ( unDeviceIndex , prop , None , 0 , byref ( pError ) ) if unRequiredBufferLen == 0 : return b"" pchBuffer = ctypes . create_string_buffer ( unRequiredBufferLen ) fn ( unDeviceIndex , prop , pchBuffer , unRequiredBufferLen , byref ( pError ) ) if pError . value != TrackedProp_Success : raise OpenVRError ( str ( pError ) ) sResult = bytes ( pchBuffer . value ) return sResult
|
Returns a string property . If the device index is not valid or the property is not a string type this function will return 0 . Otherwise it returns the length of the number of bytes necessary to hold this string including the trailing null . Strings will always fit in buffers of k_unMaxPropertyStringSize characters .
|
59,589 |
def getPropErrorNameFromEnum ( self , error ) : fn = self . function_table . getPropErrorNameFromEnum result = fn ( error ) return result
|
returns a string that corresponds with the specified property error . The string will be the name of the error enum value for all valid error codes
|
59,590 |
def pollNextEvent ( self , pEvent ) : fn = self . function_table . pollNextEvent result = fn ( byref ( pEvent ) , sizeof ( VREvent_t ) ) return result != 0
|
Returns true and fills the event with the next event on the queue if there is one . If there are no events this method returns false . uncbVREvent should be the size in bytes of the VREvent_t struct
|
59,591 |
def pollNextEventWithPose ( self , eOrigin , uncbVREvent ) : fn = self . function_table . pollNextEventWithPose pEvent = VREvent_t ( ) pTrackedDevicePose = TrackedDevicePose_t ( ) result = fn ( eOrigin , byref ( pEvent ) , uncbVREvent , byref ( pTrackedDevicePose ) ) return result , pEvent , pTrackedDevicePose
|
Returns true and fills the event with the next event on the queue if there is one . If there are no events this method returns false . Fills in the pose of the associated tracked device in the provided pose struct . This pose will always be older than the call to this function and should not be used to render the device . uncbVREvent should be the size in bytes of the VREvent_t struct
|
59,592 |
def getEventTypeNameFromEnum ( self , eType ) : fn = self . function_table . getEventTypeNameFromEnum result = fn ( eType ) return result
|
returns the name of an EVREvent enum value
|
59,593 |
def getControllerState ( self , unControllerDeviceIndex , unControllerStateSize = sizeof ( VRControllerState_t ) ) : fn = self . function_table . getControllerState pControllerState = VRControllerState_t ( ) result = fn ( unControllerDeviceIndex , byref ( pControllerState ) , unControllerStateSize ) return result , pControllerState
|
Fills the supplied struct with the current state of the controller . Returns false if the controller index is invalid . This function is deprecated in favor of the new IVRInput system .
|
59,594 |
def getControllerStateWithPose ( self , eOrigin , unControllerDeviceIndex , unControllerStateSize = sizeof ( VRControllerState_t ) ) : fn = self . function_table . getControllerStateWithPose pControllerState = VRControllerState_t ( ) pTrackedDevicePose = TrackedDevicePose_t ( ) result = fn ( eOrigin , unControllerDeviceIndex , byref ( pControllerState ) , unControllerStateSize , byref ( pTrackedDevicePose ) ) return result , pControllerState , pTrackedDevicePose
|
fills the supplied struct with the current state of the controller and the provided pose with the pose of the controller when the controller state was updated most recently . Use this form if you need a precise controller pose as input to your application when the user presses or releases a button . This function is deprecated in favor of the new IVRInput system .
|
59,595 |
def triggerHapticPulse ( self , unControllerDeviceIndex , unAxisId , usDurationMicroSec ) : fn = self . function_table . triggerHapticPulse fn ( unControllerDeviceIndex , unAxisId , usDurationMicroSec )
|
Trigger a single haptic pulse on a controller . After this call the application may not trigger another haptic pulse on this controller and axis combination for 5ms . This function is deprecated in favor of the new IVRInput system .
|
59,596 |
def getButtonIdNameFromEnum ( self , eButtonId ) : fn = self . function_table . getButtonIdNameFromEnum result = fn ( eButtonId ) return result
|
returns the name of an EVRButtonId enum value . This function is deprecated in favor of the new IVRInput system .
|
59,597 |
def getControllerAxisTypeNameFromEnum ( self , eAxisType ) : fn = self . function_table . getControllerAxisTypeNameFromEnum result = fn ( eAxisType ) return result
|
returns the name of an EVRControllerAxisType enum value . This function is deprecated in favor of the new IVRInput system .
|
59,598 |
def driverDebugRequest ( self , unDeviceIndex , pchRequest , pchResponseBuffer , unResponseBufferSize ) : fn = self . function_table . driverDebugRequest result = fn ( unDeviceIndex , pchRequest , pchResponseBuffer , unResponseBufferSize ) return result
|
Sends a request to the driver for the specified device and returns the response . The maximum response size is 32k but this method can be called with a smaller buffer . If the response exceeds the size of the buffer it is truncated . The size of the response including its terminating null is returned .
|
59,599 |
def getWindowBounds ( self ) : fn = self . function_table . getWindowBounds pnX = c_int32 ( ) pnY = c_int32 ( ) pnWidth = c_uint32 ( ) pnHeight = c_uint32 ( ) fn ( byref ( pnX ) , byref ( pnY ) , byref ( pnWidth ) , byref ( pnHeight ) ) return pnX . value , pnY . value , pnWidth . value , pnHeight . value
|
Size and position that the window needs to be on the VR display .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.