idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
1,600
def i2c_bus_timeout ( self ) : ret = api . py_aa_i2c_bus_timeout ( self . handle , 0 ) _raise_error_if_negative ( ret ) return ret
I2C bus lock timeout in ms .
1,601
def i2c_master_write ( self , i2c_address , data , flags = I2C_NO_FLAGS ) : data = array . array ( 'B' , data ) status , _ = api . py_aa_i2c_write_ext ( self . handle , i2c_address , flags , len ( data ) , data ) _raise_i2c_status_code_error_if_failure ( status )
Make an I2C write access .
1,602
def i2c_master_read ( self , addr , length , flags = I2C_NO_FLAGS ) : data = array . array ( 'B' , ( 0 , ) * length ) status , rx_len = api . py_aa_i2c_read_ext ( self . handle , addr , flags , length , data ) _raise_i2c_status_code_error_if_failure ( status ) del data [ rx_len : ] return bytes ( data )
Make an I2C read access .
1,603
def poll ( self , timeout = None ) : if timeout is None : timeout = - 1 ret = api . py_aa_async_poll ( self . handle , timeout ) _raise_error_if_negative ( ret ) events = list ( ) for event in ( POLL_I2C_READ , POLL_I2C_WRITE , POLL_SPI , POLL_I2C_MONITOR ) : if ret & event : events . append ( event ) return events
Wait for an event to occur .
1,604
def enable_i2c_slave ( self , slave_address ) : ret = api . py_aa_i2c_slave_enable ( self . handle , slave_address , self . BUFFER_SIZE , self . BUFFER_SIZE ) _raise_error_if_negative ( ret )
Enable I2C slave mode .
1,605
def i2c_slave_read ( self ) : data = array . array ( 'B' , ( 0 , ) * self . BUFFER_SIZE ) status , addr , rx_len = api . py_aa_i2c_slave_read_ext ( self . handle , self . BUFFER_SIZE , data ) _raise_i2c_status_code_error_if_failure ( status ) if addr == 0x80 : addr = 0x00 del data [ rx_len : ] return ( addr , bytes ( data ) )
Read the bytes from an I2C slave reception .
1,606
def i2c_slave_last_transmit_size ( self ) : ret = api . py_aa_i2c_slave_write_stats ( self . handle ) _raise_error_if_negative ( ret ) return ret
Returns the number of bytes transmitted by the slave .
1,607
def i2c_monitor_read ( self ) : data = array . array ( 'H' , ( 0 , ) * self . BUFFER_SIZE ) ret = api . py_aa_i2c_monitor_read ( self . handle , self . BUFFER_SIZE , data ) _raise_error_if_negative ( ret ) del data [ ret : ] return data . tolist ( )
Retrieved any data fetched by the monitor .
1,608
def spi_bitrate ( self ) : ret = api . py_aa_spi_bitrate ( self . handle , 0 ) _raise_error_if_negative ( ret ) return ret
SPI bitrate in kHz . Not every bitrate is supported by the host adapter . Therefore the actual bitrate may be less than the value which is set . The slowest bitrate supported is 125kHz . Any smaller value will be rounded up to 125kHz .
1,609
def spi_configure ( self , polarity , phase , bitorder ) : ret = api . py_aa_spi_configure ( self . handle , polarity , phase , bitorder ) _raise_error_if_negative ( ret )
Configure the SPI interface .
1,610
def spi_configure_mode ( self , spi_mode ) : if spi_mode == SPI_MODE_0 : self . spi_configure ( SPI_POL_RISING_FALLING , SPI_PHASE_SAMPLE_SETUP , SPI_BITORDER_MSB ) elif spi_mode == SPI_MODE_3 : self . spi_configure ( SPI_POL_FALLING_RISING , SPI_PHASE_SETUP_SAMPLE , SPI_BITORDER_MSB ) else : raise RuntimeError ( 'SPI Mode not supported' )
Configure the SPI interface by the well known SPI modes .
1,611
def spi_write ( self , data ) : data_out = array . array ( 'B' , data ) data_in = array . array ( 'B' , ( 0 , ) * len ( data_out ) ) ret = api . py_aa_spi_write ( self . handle , len ( data_out ) , data_out , len ( data_in ) , data_in ) _raise_error_if_negative ( ret ) return bytes ( data_in )
Write a stream of bytes to a SPI device .
1,612
def spi_ss_polarity ( self , polarity ) : ret = api . py_aa_spi_master_ss_polarity ( self . handle , polarity ) _raise_error_if_negative ( ret )
Change the ouput polarity on the SS line .
1,613
def edit_form ( self , obj ) : form = super ( OAISetModelView , self ) . edit_form ( obj ) del form . spec return form
Customize edit form .
1,614
def _schema_from_verb ( verb , partial = False ) : from . verbs import Verbs return getattr ( Verbs , verb ) ( partial = partial )
Return an instance of schema for given verb .
1,615
def serialize ( pagination , ** kwargs ) : if not pagination . has_next : return token_builder = URLSafeTimedSerializer ( current_app . config [ 'SECRET_KEY' ] , salt = kwargs [ 'verb' ] , ) schema = _schema_from_verb ( kwargs [ 'verb' ] , partial = False ) data = dict ( seed = random . random ( ) , page = pagination . next_num , kwargs = schema . dump ( kwargs ) . data ) scroll_id = getattr ( pagination , '_scroll_id' , None ) if scroll_id : data [ 'scroll_id' ] = scroll_id return token_builder . dumps ( data )
Return resumption token serializer .
1,616
def _deserialize ( self , value , attr , data ) : token_builder = URLSafeTimedSerializer ( current_app . config [ 'SECRET_KEY' ] , salt = data [ 'verb' ] , ) result = token_builder . loads ( value , max_age = current_app . config [ 'OAISERVER_RESUMPTION_TOKEN_EXPIRE_TIME' ] ) result [ 'token' ] = value result [ 'kwargs' ] = self . root . load ( result [ 'kwargs' ] , partial = True ) . data return result
Serialize resumption token .
1,617
def load ( self , data , many = None , partial = None ) : result = super ( ResumptionTokenSchema , self ) . load ( data , many = many , partial = partial ) result . data . update ( result . data . get ( 'resumptionToken' , { } ) . get ( 'kwargs' , { } ) ) return result
Deserialize a data structure to an object .
1,618
def make_request_validator ( request ) : verb = request . values . get ( 'verb' , '' , type = str ) resumption_token = request . values . get ( 'resumptionToken' , None ) schema = Verbs if resumption_token is None else ResumptionVerbs return getattr ( schema , verb , OAISchema ) ( partial = False )
Validate arguments in incomming request .
1,619
def from_iso_permissive ( datestring , use_dateutil = True ) : dateutil_available = False try : from dateutil import parser dateutil_available = True except ImportError : dateutil_available = False import datetime if dateutil_available and use_dateutil : return parser . parse ( datestring ) else : return datetime . datetime . strptime ( datestring [ : 19 ] , '%Y-%m-%dT%H:%M:%S' )
Parse an ISO8601 - formatted datetime and return a datetime object .
1,620
def validate ( self , data ) : if 'verb' in data and data [ 'verb' ] != self . __class__ . __name__ : raise ValidationError ( 'This is not a valid OAI-PMH verb:{0}' . format ( data [ 'verb' ] ) , field_names = [ 'verb' ] , ) if 'from_' in data and 'until' in data and data [ 'from_' ] > data [ 'until' ] : raise ValidationError ( 'Date "from" must be before "until".' ) extra = set ( request . values . keys ( ) ) - set ( [ f . load_from or f . name for f in self . fields . values ( ) ] ) if extra : raise ValidationError ( 'You have passed too many arguments.' )
Check range between dates under keys from_ and until .
1,621
def sets ( self ) : if self . cache : return self . cache . get ( self . app . config [ 'OAISERVER_CACHE_KEY' ] )
Get list of sets .
1,622
def sets ( self , values ) : if self . cache : self . cache . set ( self . app . config [ 'OAISERVER_CACHE_KEY' ] , values )
Set list of sets .
1,623
def register_signals_oaiset ( self ) : from . models import OAISet from . receivers import after_insert_oai_set , after_update_oai_set , after_delete_oai_set listen ( OAISet , 'after_insert' , after_insert_oai_set ) listen ( OAISet , 'after_update' , after_update_oai_set ) listen ( OAISet , 'after_delete' , after_delete_oai_set )
Register OAISet signals to update records .
1,624
def unregister_signals_oaiset ( self ) : from . models import OAISet from . receivers import after_insert_oai_set , after_update_oai_set , after_delete_oai_set if contains ( OAISet , 'after_insert' , after_insert_oai_set ) : remove ( OAISet , 'after_insert' , after_insert_oai_set ) remove ( OAISet , 'after_update' , after_update_oai_set ) remove ( OAISet , 'after_delete' , after_delete_oai_set )
Unregister signals oaiset .
1,625
def extract_params ( params ) : values = [ ] if isinstance ( params , dict ) : for key , value in params . items ( ) : values . extend ( extract_params ( value ) ) elif isinstance ( params , list ) : for value in params : values . extend ( extract_params ( value ) ) else : values . append ( params ) return values
Extracts the values of a set of parameters recursing into nested dictionaries .
1,626
def get_list ( self , list_name , options = None ) : options = options or { } data = { 'list' : list_name } data . update ( options ) return self . api_get ( 'list' , data )
Get detailed metadata information about a list .
1,627
def import_contacts ( self , email , password , include_name = False ) : data = { 'email' : email , 'password' : password } if include_name : data [ 'names' ] = 1 return self . api_post ( 'contacts' , data )
Fetch email contacts from a user s address book on one of the major email websites . Currently supports AOL Gmail Hotmail and Yahoo! Mail .
1,628
def push_content ( self , title , url , images = None , date = None , expire_date = None , description = None , location = None , price = None , tags = None , author = None , site_name = None , spider = None , vars = None ) : vars = vars or { } data = { 'title' : title , 'url' : url } if images is not None : data [ 'images' ] = images if date is not None : data [ 'date' ] = date if expire_date is not None : data [ 'expire_date' ] = date if location is not None : data [ 'location' ] = date if price is not None : data [ 'price' ] = price if description is not None : data [ 'description' ] = description if site_name is not None : data [ 'site_name' ] = images if author is not None : data [ 'author' ] = author if spider : data [ 'spider' ] = 1 if tags is not None : data [ 'tags' ] = "," . join ( tags ) if isinstance ( tags , list ) else tags if len ( vars ) > 0 : data [ 'vars' ] = vars . copy ( ) return self . api_post ( 'content' , data )
Push a new piece of content to Sailthru .
1,629
def delete_alert ( self , email , alert_id ) : data = { 'email' : email , 'alert_id' : alert_id } return self . api_delete ( 'alert' , data )
delete user alert
1,630
def get_purchase ( self , purchase_id , purchase_key = 'sid' ) : data = { 'purchase_id' : purchase_id , 'purchase_key' : purchase_key } return self . api_get ( 'purchase' , data )
Retrieve information about a purchase using the system s unique ID or a client s ID
1,631
def receive_verify_post ( self , post_params ) : if isinstance ( post_params , dict ) : required_params = [ 'action' , 'email' , 'send_id' , 'sig' ] if not self . check_for_valid_postback_actions ( required_params , post_params ) : return False else : return False if post_params [ 'action' ] != 'verify' : return False sig = post_params [ 'sig' ] post_params = post_params . copy ( ) del post_params [ 'sig' ] if sig != get_signature_hash ( post_params , self . secret ) : return False send_response = self . get_send ( post_params [ 'send_id' ] ) try : send_body = send_response . get_body ( ) send_json = json . loads ( send_body ) if 'email' not in send_body : return False if send_json [ 'email' ] != post_params [ 'email' ] : return False except ValueError : return False return True
Returns true if the incoming request is an authenticated verify post .
1,632
def receive_hardbounce_post ( self , post_params ) : if isinstance ( post_params , dict ) : required_params = [ 'action' , 'email' , 'sig' ] if not self . check_for_valid_postback_actions ( required_params , post_params ) : return False else : return False if post_params [ 'action' ] != 'hardbounce' : return False signature = post_params [ 'sig' ] post_params = post_params . copy ( ) del post_params [ 'sig' ] if signature != get_signature_hash ( post_params , self . secret ) : return False if 'send_id' in post_params : send_id = post_params [ 'send_id' ] send_response = self . get_send ( send_id ) if not send_response . is_ok ( ) : return False send_obj = send_response . get_body ( ) if not send_obj or 'email' not in send_obj : return False if 'blast_id' in post_params : blast_id = post_params [ 'blast_id' ] blast_response = self . get_blast ( blast_id ) if not blast_response . is_ok ( ) : return False blast_obj = blast_response . get_body ( ) if not blast_obj : return False return True
Hard bounce postbacks
1,633
def check_for_valid_postback_actions ( self , required_keys , post_params ) : for key in required_keys : if key not in post_params : return False return True
checks if post_params contain required keys
1,634
def api_get ( self , action , data , headers = None ) : return self . _api_request ( action , data , 'GET' , headers )
Perform an HTTP GET request using the shared - secret auth hash .
1,635
def api_post ( self , action , data , binary_data_param = None ) : binary_data_param = binary_data_param or [ ] if binary_data_param : return self . api_post_multipart ( action , data , binary_data_param ) else : return self . _api_request ( action , data , 'POST' )
Perform an HTTP POST request using the shared - secret auth hash .
1,636
def api_post_multipart ( self , action , data , binary_data_param ) : binary_data = { } data = data . copy ( ) try : file_handles = [ ] for param in binary_data_param : if param in data : binary_data [ param ] = file_handle = open ( data [ param ] , 'r' ) file_handles . append ( file_handle ) del data [ param ] json_payload = self . _prepare_json_payload ( data ) return self . _http_request ( action , json_payload , "POST" , binary_data ) finally : for file_handle in file_handles : file_handle . close ( )
Perform an HTTP Multipart POST request using the shared - secret auth hash .
1,637
def _api_request ( self , action , data , request_type , headers = None ) : if 'file' in data : file_data = { 'file' : open ( data [ 'file' ] , 'rb' ) } else : file_data = None return self . _http_request ( action , self . _prepare_json_payload ( data ) , request_type , file_data , headers )
Make Request to Sailthru API with given data and api key format and signature hash
1,638
def validation_error ( exception ) : messages = getattr ( exception , 'messages' , None ) if messages is None : messages = getattr ( exception , 'data' , { 'messages' : None } ) [ 'messages' ] def extract_errors ( ) : if isinstance ( messages , dict ) : for field , message in messages . items ( ) : if field == 'verb' : yield 'badVerb' , '\n' . join ( message ) else : yield 'badArgument' , '\n' . join ( message ) else : for field in exception . field_names : if field == 'verb' : yield 'badVerb' , '\n' . join ( messages ) else : yield 'badArgument' , '\n' . join ( messages ) if not exception . field_names : yield 'badArgument' , '\n' . join ( messages ) return ( etree . tostring ( xml . error ( extract_errors ( ) ) ) , 422 , { 'Content-Type' : 'text/xml' } )
Return formatter validation error .
1,639
def response ( args ) : e_tree = getattr ( xml , args [ 'verb' ] . lower ( ) ) ( ** args ) response = make_response ( etree . tostring ( e_tree , pretty_print = True , xml_declaration = True , encoding = 'UTF-8' , ) ) response . headers [ 'Content-Type' ] = 'text/xml' return response
Response endpoint .
1,640
def _create_percolator_mapping ( index , doc_type ) : if ES_VERSION [ 0 ] >= 5 : current_search_client . indices . put_mapping ( index = index , doc_type = doc_type , body = PERCOLATOR_MAPPING , ignore = [ 400 , 404 ] )
Update mappings with the percolator field .
1,641
def _percolate_query ( index , doc_type , percolator_doc_type , document ) : if ES_VERSION [ 0 ] in ( 2 , 5 ) : results = current_search_client . percolate ( index = index , doc_type = doc_type , allow_no_indices = True , ignore_unavailable = True , body = { 'doc' : document } ) return results [ 'matches' ] elif ES_VERSION [ 0 ] == 6 : results = current_search_client . search ( index = index , doc_type = percolator_doc_type , allow_no_indices = True , ignore_unavailable = True , body = { 'query' : { 'percolate' : { 'field' : 'query' , 'document_type' : percolator_doc_type , 'document' : document , } } } ) return results [ 'hits' ] [ 'hits' ]
Get results for a percolate query .
1,642
def _new_percolator ( spec , search_pattern ) : if spec and search_pattern : query = query_string_parser ( search_pattern = search_pattern ) . to_dict ( ) for index in current_search . mappings . keys ( ) : percolator_doc_type = _get_percolator_doc_type ( index ) _create_percolator_mapping ( index , percolator_doc_type ) current_search_client . index ( index = index , doc_type = percolator_doc_type , id = 'oaiset-{}' . format ( spec ) , body = { 'query' : query } )
Create new percolator associated with the new set .
1,643
def _delete_percolator ( spec , search_pattern ) : if spec : for index in current_search . mappings . keys ( ) : percolator_doc_type = _get_percolator_doc_type ( index ) _create_percolator_mapping ( index , percolator_doc_type ) current_search_client . delete ( index = index , doc_type = percolator_doc_type , id = 'oaiset-{}' . format ( spec ) , ignore = [ 404 ] )
Delete percolator associated with the new oaiset .
1,644
def _build_cache ( ) : sets = current_oaiserver . sets if sets is None : sets = current_oaiserver . sets = [ oaiset . spec for oaiset in OAISet . query . filter ( OAISet . search_pattern . is_ ( None ) ) . all ( ) ] return sets
Build sets cache .
1,645
def get_record_sets ( record ) : record_sets = set ( record . get ( '_oai' , { } ) . get ( 'sets' , [ ] ) ) for spec in _build_cache ( ) : if spec in record_sets : yield spec index , doc_type = RecordIndexer ( ) . record_to_index ( record ) document = record . dumps ( ) percolator_doc_type = _get_percolator_doc_type ( index ) _create_percolator_mapping ( index , percolator_doc_type ) results = _percolate_query ( index , doc_type , percolator_doc_type , document ) prefix = 'oaiset-' prefix_len = len ( prefix ) for match in results : set_name = match [ '_id' ] if set_name . startswith ( prefix ) : name = set_name [ prefix_len : ] yield name raise StopIteration
Find matching sets .
1,646
def _records_commit ( record_ids ) : for record_id in record_ids : record = Record . get_record ( record_id ) record . commit ( )
Commit all records .
1,647
def update_affected_records ( spec = None , search_pattern = None ) : chunk_size = current_app . config [ 'OAISERVER_CELERY_TASK_CHUNK_SIZE' ] record_ids = get_affected_records ( spec = spec , search_pattern = search_pattern ) group ( update_records_sets . s ( list ( filter ( None , chunk ) ) ) for chunk in zip_longest ( * [ iter ( record_ids ) ] * chunk_size ) ) ( )
Update all affected records by OAISet change .
1,648
def envelope ( ** kwargs ) : e_oaipmh = Element ( etree . QName ( NS_OAIPMH , 'OAI-PMH' ) , nsmap = NSMAP ) e_oaipmh . set ( etree . QName ( NS_XSI , 'schemaLocation' ) , '{0} {1}' . format ( NS_OAIPMH , NS_OAIPMH_XSD ) ) e_tree = ElementTree ( element = e_oaipmh ) if current_app . config [ 'OAISERVER_XSL_URL' ] : e_oaipmh . addprevious ( etree . ProcessingInstruction ( 'xml-stylesheet' , 'type="text/xsl" href="{0}"' . format ( current_app . config [ 'OAISERVER_XSL_URL' ] ) ) ) e_responseDate = SubElement ( e_oaipmh , etree . QName ( NS_OAIPMH , 'responseDate' ) ) e_responseDate . text = datetime_to_datestamp ( datetime . utcnow ( ) ) e_request = SubElement ( e_oaipmh , etree . QName ( NS_OAIPMH , 'request' ) ) for key , value in kwargs . items ( ) : if key == 'from_' or key == 'until' : value = datetime_to_datestamp ( value ) elif key == 'resumptionToken' : value = value [ 'token' ] e_request . set ( key , value ) e_request . text = url_for ( 'invenio_oaiserver.response' , _external = True ) return e_tree , e_oaipmh
Create OAI - PMH envelope for response .
1,649
def error ( errors ) : e_tree , e_oaipmh = envelope ( ) for code , message in errors : e_error = SubElement ( e_oaipmh , etree . QName ( NS_OAIPMH , 'error' ) ) e_error . set ( 'code' , code ) e_error . text = message return e_tree
Create error element .
1,650
def verb ( ** kwargs ) : e_tree , e_oaipmh = envelope ( ** kwargs ) e_element = SubElement ( e_oaipmh , etree . QName ( NS_OAIPMH , kwargs [ 'verb' ] ) ) return e_tree , e_element
Create OAI - PMH envelope for response with verb .
1,651
def resumption_token ( parent , pagination , ** kwargs ) : if pagination . page == 1 and not pagination . has_next : return token = serialize ( pagination , ** kwargs ) e_resumptionToken = SubElement ( parent , etree . QName ( NS_OAIPMH , 'resumptionToken' ) ) if pagination . total : expiration_date = datetime . utcnow ( ) + timedelta ( seconds = current_app . config [ 'OAISERVER_RESUMPTION_TOKEN_EXPIRE_TIME' ] ) e_resumptionToken . set ( 'expirationDate' , datetime_to_datestamp ( expiration_date ) ) e_resumptionToken . set ( 'cursor' , str ( ( pagination . page - 1 ) * pagination . per_page ) ) e_resumptionToken . set ( 'completeListSize' , str ( pagination . total ) ) if token : e_resumptionToken . text = token
Attach resumption token element to a parent .
1,652
def listsets ( ** kwargs ) : e_tree , e_listsets = verb ( ** kwargs ) page = kwargs . get ( 'resumptionToken' , { } ) . get ( 'page' , 1 ) size = current_app . config [ 'OAISERVER_PAGE_SIZE' ] oai_sets = OAISet . query . paginate ( page = page , per_page = size , error_out = False ) for oai_set in oai_sets . items : e_set = SubElement ( e_listsets , etree . QName ( NS_OAIPMH , 'set' ) ) e_setSpec = SubElement ( e_set , etree . QName ( NS_OAIPMH , 'setSpec' ) ) e_setSpec . text = oai_set . spec e_setName = SubElement ( e_set , etree . QName ( NS_OAIPMH , 'setName' ) ) e_setName . text = sanitize_unicode ( oai_set . name ) if oai_set . description : e_setDescription = SubElement ( e_set , etree . QName ( NS_OAIPMH , 'setDescription' ) ) e_dc = SubElement ( e_setDescription , etree . QName ( NS_OAIDC , 'dc' ) , nsmap = NSMAP_DESCRIPTION ) e_dc . set ( etree . QName ( NS_XSI , 'schemaLocation' ) , NS_OAIDC ) e_description = SubElement ( e_dc , etree . QName ( NS_DC , 'description' ) ) e_description . text = oai_set . description resumption_token ( e_listsets , oai_sets , ** kwargs ) return e_tree
Create OAI - PMH response for ListSets verb .
1,653
def listmetadataformats ( ** kwargs ) : cfg = current_app . config e_tree , e_listmetadataformats = verb ( ** kwargs ) if 'identifier' in kwargs : OAIIDProvider . get ( pid_value = kwargs [ 'identifier' ] ) for prefix , metadata in cfg . get ( 'OAISERVER_METADATA_FORMATS' , { } ) . items ( ) : e_metadataformat = SubElement ( e_listmetadataformats , etree . QName ( NS_OAIPMH , 'metadataFormat' ) ) e_metadataprefix = SubElement ( e_metadataformat , etree . QName ( NS_OAIPMH , 'metadataPrefix' ) ) e_metadataprefix . text = prefix e_schema = SubElement ( e_metadataformat , etree . QName ( NS_OAIPMH , 'schema' ) ) e_schema . text = metadata [ 'schema' ] e_metadataNamespace = SubElement ( e_metadataformat , etree . QName ( NS_OAIPMH , 'metadataNamespace' ) ) e_metadataNamespace . text = metadata [ 'namespace' ] return e_tree
Create OAI - PMH response for ListMetadataFormats verb .
1,654
def listidentifiers ( ** kwargs ) : e_tree , e_listidentifiers = verb ( ** kwargs ) result = get_records ( ** kwargs ) for record in result . items : pid = oaiid_fetcher ( record [ 'id' ] , record [ 'json' ] [ '_source' ] ) header ( e_listidentifiers , identifier = pid . pid_value , datestamp = record [ 'updated' ] , sets = record [ 'json' ] [ '_source' ] . get ( '_oai' , { } ) . get ( 'sets' , [ ] ) , ) resumption_token ( e_listidentifiers , result , ** kwargs ) return e_tree
Create OAI - PMH response for verb ListIdentifiers .
1,655
def listrecords ( ** kwargs ) : record_dumper = serializer ( kwargs [ 'metadataPrefix' ] ) e_tree , e_listrecords = verb ( ** kwargs ) result = get_records ( ** kwargs ) for record in result . items : pid = oaiid_fetcher ( record [ 'id' ] , record [ 'json' ] [ '_source' ] ) e_record = SubElement ( e_listrecords , etree . QName ( NS_OAIPMH , 'record' ) ) header ( e_record , identifier = pid . pid_value , datestamp = record [ 'updated' ] , sets = record [ 'json' ] [ '_source' ] . get ( '_oai' , { } ) . get ( 'sets' , [ ] ) , ) e_metadata = SubElement ( e_record , etree . QName ( NS_OAIPMH , 'metadata' ) ) e_metadata . append ( record_dumper ( pid , record [ 'json' ] ) ) resumption_token ( e_listrecords , result , ** kwargs ) return e_tree
Create OAI - PMH response for verb ListRecords .
1,656
def oaiid_fetcher ( record_uuid , data ) : pid_value = data . get ( '_oai' , { } ) . get ( 'id' ) if pid_value is None : raise PersistentIdentifierError ( ) return FetchedPID ( provider = OAIIDProvider , pid_type = OAIIDProvider . pid_type , pid_value = str ( pid_value ) , )
Fetch a record s identifier .
1,657
def validate_spec ( self , key , value ) : if self . spec and self . spec != value : raise OAISetSpecUpdateError ( "Updating spec is not allowed." ) return value
Forbit updates of set identifier .
1,658
def add_record ( self , record ) : record . setdefault ( '_oai' , { } ) . setdefault ( 'sets' , [ ] ) assert not self . has_record ( record ) record [ '_oai' ] [ 'sets' ] . append ( self . spec )
Add a record to the OAISet .
1,659
def remove_record ( self , record ) : assert self . has_record ( record ) record [ '_oai' ] [ 'sets' ] = [ s for s in record [ '_oai' ] [ 'sets' ] if s != self . spec ]
Remove a record from the OAISet .
1,660
def oaiserver ( sets , records ) : from invenio_db import db from invenio_oaiserver . models import OAISet from invenio_records . api import Record with db . session . begin_nested ( ) : for i in range ( sets ) : db . session . add ( OAISet ( spec = 'test{0}' . format ( i ) , name = 'Test{0}' . format ( i ) , description = 'test desc {0}' . format ( i ) , search_pattern = 'title_statement.title:Test{0}' . format ( i ) , ) ) schema = { 'type' : 'object' , 'properties' : { 'title_statement' : { 'type' : 'object' , 'properties' : { 'title' : { 'type' : 'string' , } , } , } , 'field' : { 'type' : 'boolean' } , } , } with app . app_context ( ) : indexer = RecordIndexer ( ) with db . session . begin_nested ( ) : for i in range ( records ) : record_id = uuid . uuid4 ( ) data = { 'title_statement' : { 'title' : 'Test{0}' . format ( i ) } , '$schema' : schema , } recid_minter ( record_id , data ) oaiid_minter ( record_id , data ) record = Record . create ( data , id_ = record_id ) indexer . index ( record ) db . session . commit ( )
Initialize OAI - PMH server .
1,661
def serializer ( metadata_prefix ) : metadataFormats = current_app . config [ 'OAISERVER_METADATA_FORMATS' ] serializer_ = metadataFormats [ metadata_prefix ] [ 'serializer' ] if isinstance ( serializer_ , tuple ) : return partial ( import_string ( serializer_ [ 0 ] ) , ** serializer_ [ 1 ] ) return import_string ( serializer_ )
Return etree_dumper instances .
1,662
def dumps_etree ( pid , record , ** kwargs ) : from dojson . contrib . to_marc21 import to_marc21 from dojson . contrib . to_marc21 . utils import dumps_etree return dumps_etree ( to_marc21 . do ( record [ '_source' ] ) , ** kwargs )
Dump MARC21 compatible record .
1,663
def eprints_description ( metadataPolicy , dataPolicy , submissionPolicy = None , content = None ) : eprints = Element ( etree . QName ( NS_EPRINTS [ None ] , 'eprints' ) , nsmap = NS_EPRINTS ) eprints . set ( etree . QName ( ns [ 'xsi' ] , 'schemaLocation' ) , '{0} {1}' . format ( EPRINTS_SCHEMA_LOCATION , EPRINTS_SCHEMA_LOCATION_XSD ) ) if content : contentElement = etree . Element ( 'content' ) for key , value in content . items ( ) : contentElement . append ( E ( key , value ) ) eprints . append ( contentElement ) metadataPolicyElement = etree . Element ( 'metadataPolicy' ) for key , value in metadataPolicy . items ( ) : metadataPolicyElement . append ( E ( key , value ) ) eprints . append ( metadataPolicyElement ) dataPolicyElement = etree . Element ( 'dataPolicy' ) for key , value in dataPolicy . items ( ) : dataPolicyElement . append ( E ( key , value ) ) eprints . append ( dataPolicyElement ) if submissionPolicy : submissionPolicyElement = etree . Element ( 'submissionPolicy' ) for key , value in submissionPolicy . items ( ) : submissionPolicyElement . append ( E ( key , value ) ) eprints . append ( submissionPolicyElement ) return etree . tostring ( eprints , pretty_print = True )
Generate the eprints element for the identify response .
1,664
def oai_identifier_description ( scheme , repositoryIdentifier , delimiter , sampleIdentifier ) : oai_identifier = Element ( etree . QName ( NS_OAI_IDENTIFIER [ None ] , 'oai_identifier' ) , nsmap = NS_OAI_IDENTIFIER ) oai_identifier . set ( etree . QName ( ns [ 'xsi' ] , 'schemaLocation' ) , '{0} {1}' . format ( OAI_IDENTIFIER_SCHEMA_LOCATION , OAI_IDENTIFIER_SCHEMA_LOCATION_XSD ) ) oai_identifier . append ( E ( 'scheme' , scheme ) ) oai_identifier . append ( E ( 'repositoryIdentifier' , repositoryIdentifier ) ) oai_identifier . append ( E ( 'delimiter' , delimiter ) ) oai_identifier . append ( E ( 'sampleIdentifier' , sampleIdentifier ) ) return etree . tostring ( oai_identifier , pretty_print = True )
Generate the oai - identifier element for the identify response .
1,665
def friends_description ( baseURLs ) : friends = Element ( etree . QName ( NS_FRIENDS [ None ] , 'friends' ) , nsmap = NS_FRIENDS ) friends . set ( etree . QName ( ns [ 'xsi' ] , 'schemaLocation' ) , '{0} {1}' . format ( FRIENDS_SCHEMA_LOCATION , FRIENDS_SCHEMA_LOCATION_XSD ) ) for baseURL in baseURLs : friends . append ( E ( 'baseURL' , baseURL ) ) return etree . tostring ( friends , pretty_print = True )
Generate the friends element for the identify response .
1,666
def after_insert_oai_set ( mapper , connection , target ) : _new_percolator ( spec = target . spec , search_pattern = target . search_pattern ) sleep ( 2 ) update_affected_records . delay ( search_pattern = target . search_pattern )
Update records on OAISet insertion .
1,667
def after_update_oai_set ( mapper , connection , target ) : _delete_percolator ( spec = target . spec , search_pattern = target . search_pattern ) _new_percolator ( spec = target . spec , search_pattern = target . search_pattern ) sleep ( 2 ) update_affected_records . delay ( spec = target . spec , search_pattern = target . search_pattern )
Update records on OAISet update .
1,668
def after_delete_oai_set ( mapper , connection , target ) : _delete_percolator ( spec = target . spec , search_pattern = target . search_pattern ) sleep ( 2 ) update_affected_records . delay ( spec = target . spec )
Update records on OAISet deletion .
1,669
def query_string_parser ( search_pattern ) : if not hasattr ( current_oaiserver , 'query_parser' ) : query_parser = current_app . config [ 'OAISERVER_QUERY_PARSER' ] if isinstance ( query_parser , six . string_types ) : query_parser = import_string ( query_parser ) current_oaiserver . query_parser = query_parser return current_oaiserver . query_parser ( 'query_string' , query = search_pattern )
Elasticsearch query string parser .
1,670
def get_affected_records ( spec = None , search_pattern = None ) : if spec is None and search_pattern is None : raise StopIteration queries = [ ] if spec is not None : queries . append ( Q ( 'match' , ** { '_oai.sets' : spec } ) ) if search_pattern : queries . append ( query_string_parser ( search_pattern = search_pattern ) ) search = OAIServerSearch ( index = current_app . config [ 'OAISERVER_RECORD_INDEX' ] , ) . query ( Q ( 'bool' , should = queries ) ) for result in search . scan ( ) : yield result . meta . id
Get list of affected records .
1,671
def get_records ( ** kwargs ) : page_ = kwargs . get ( 'resumptionToken' , { } ) . get ( 'page' , 1 ) size_ = current_app . config [ 'OAISERVER_PAGE_SIZE' ] scroll = current_app . config [ 'OAISERVER_RESUMPTION_TOKEN_EXPIRE_TIME' ] scroll_id = kwargs . get ( 'resumptionToken' , { } ) . get ( 'scroll_id' ) if scroll_id is None : search = OAIServerSearch ( index = current_app . config [ 'OAISERVER_RECORD_INDEX' ] , ) . params ( scroll = '{0}s' . format ( scroll ) , ) . extra ( version = True , ) [ ( page_ - 1 ) * size_ : page_ * size_ ] if 'set' in kwargs : search = search . query ( 'match' , ** { '_oai.sets' : kwargs [ 'set' ] } ) time_range = { } if 'from_' in kwargs : time_range [ 'gte' ] = kwargs [ 'from_' ] if 'until' in kwargs : time_range [ 'lte' ] = kwargs [ 'until' ] if time_range : search = search . filter ( 'range' , ** { '_updated' : time_range } ) response = search . execute ( ) . to_dict ( ) else : response = current_search_client . scroll ( scroll_id = scroll_id , scroll = '{0}s' . format ( scroll ) , ) class Pagination ( object ) : page = page_ per_page = size_ def __init__ ( self , response ) : self . response = response self . total = response [ 'hits' ] [ 'total' ] self . _scroll_id = response . get ( '_scroll_id' ) if not self . has_next : current_search_client . clear_scroll ( scroll_id = self . _scroll_id ) self . _scroll_id = None @ cached_property def has_next ( self ) : return self . page * self . per_page <= self . total @ cached_property def next_num ( self ) : return self . page + 1 if self . has_next else None @ property def items ( self ) : from datetime import datetime for result in self . response [ 'hits' ] [ 'hits' ] : if '_oai' in result [ '_source' ] : yield { 'id' : result [ '_id' ] , 'json' : result , 'updated' : datetime . strptime ( result [ '_source' ] [ '_updated' ] [ : 19 ] , '%Y-%m-%dT%H:%M:%S' ) , } return Pagination ( response )
Get records paginated .
1,672
def get_file_path ( filename , local = True , relative_to_module = None , my_dir = my_dir ) : if relative_to_module is not None : my_dir = os . path . dirname ( relative_to_module . __file__ ) user_path = result = filename if local : user_path = os . path . expanduser ( filename ) result = os . path . abspath ( user_path ) if os . path . exists ( result ) : return result result = os . path . join ( my_dir , filename ) assert os . path . exists ( result ) , "no such file " + repr ( ( filename , result , user_path ) ) return result
Look for an existing path matching filename . Try to resolve relative to the module location if the path cannot by found using normal resolution .
1,673
def load_if_not_loaded ( widget , filenames , verbose = False , delay = 0.1 , force = False , local = True , evaluator = None ) : if evaluator is None : evaluator = EVALUATOR for filename in filenames : loaded = False if force or not filename in LOADED_JAVASCRIPT : js_text = get_text_from_file_name ( filename , local ) if verbose : print ( "loading javascript file" , filename , "with" , evaluator ) evaluator ( widget , js_text ) LOADED_JAVASCRIPT . add ( filename ) loaded = True else : if verbose : print ( "not reloading javascript file" , filename ) if loaded and delay > 0 : if verbose : print ( "delaying to allow JS interpreter to sync." ) time . sleep ( delay )
Load a javascript file to the Jupyter notebook context unless it was already loaded .
1,674
def _set ( self , name , value ) : "Proxy to set a property of the widget element." return self . widget ( self . widget_element . _set ( name , value ) )
Proxy to set a property of the widget element .
1,675
def base_url ( root ) : for attr , value in root . attrib . iteritems ( ) : if attr . endswith ( 'base' ) and 'http' in value : return value return None
Determine the base url for a root element .
1,676
def clean_ns ( tag ) : if '}' in tag : split = tag . split ( '}' ) return split [ 0 ] . strip ( '{' ) , split [ - 1 ] return '' , tag
Return a tag and its namespace separately .
1,677
def xpath ( node , query , namespaces = { } ) : if namespaces and 'None' not in namespaces : return node . xpath ( query , namespaces = namespaces ) return node . xpath ( query )
A safe xpath that only uses namespaces if available .
1,678
def innertext ( node ) : if not len ( node ) : return node . text return ( node . text or '' ) + '' . join ( [ etree . tostring ( c ) for c in node ] ) + ( node . tail or '' )
Return the inner text of a node . If a node has no sub elements this is just node . text . Otherwise it s node . text + sub - element - text + node . tail .
1,679
def parse ( document , clean_html = True , unix_timestamp = False , encoding = None ) : if isinstance ( clean_html , bool ) : cleaner = default_cleaner if clean_html else fake_cleaner else : cleaner = clean_html result = feedparser . FeedParserDict ( ) result [ 'feed' ] = feedparser . FeedParserDict ( ) result [ 'entries' ] = [ ] result [ 'bozo' ] = 0 try : parser = SpeedParser ( document , cleaner , unix_timestamp , encoding ) parser . update ( result ) except Exception as e : if isinstance ( e , UnicodeDecodeError ) and encoding is True : encoding = chardet . detect ( document ) [ 'encoding' ] document = document . decode ( encoding , 'replace' ) . encode ( 'utf-8' ) return parse ( document , clean_html , unix_timestamp , encoding ) import traceback result [ 'bozo' ] = 1 result [ 'bozo_exception' ] = e result [ 'bozo_tb' ] = traceback . format_exc ( ) return result
Parse a document and return a feedparser dictionary with attr key access . If clean_html is False the html in the feed will not be cleaned . If clean_html is True a sane version of lxml . html . clean . Cleaner will be used . If it is a Cleaner object that cleaner will be used . If unix_timestamp is True the date information will be a numerical unix timestamp rather than a struct_time . If encoding is provided the encoding of the document will be manually set to that .
1,680
def changed_path ( self ) : "Find any changed path and update all changed modification times." result = None for path in self . paths_to_modification_times : lastmod = self . paths_to_modification_times [ path ] mod = os . path . getmtime ( path ) if mod > lastmod : result = "Watch file has been modified: " + repr ( path ) self . paths_to_modification_times [ path ] = mod for folder in self . folder_paths : for filename in os . listdir ( folder ) : subpath = os . path . join ( folder , filename ) if os . path . isfile ( subpath ) and subpath not in self . paths_to_modification_times : result = "New file in watched folder: " + repr ( subpath ) self . add ( subpath ) if self . check_python_modules : self . add_all_modules ( ) if self . check_javascript : self . watch_javascript ( ) return result
Find any changed path and update all changed modification times .
1,681
def _parse_date_iso8601 ( dateString ) : m = None for _iso8601_match in _iso8601_matches : m = _iso8601_match ( dateString ) if m : break if not m : return if m . span ( ) == ( 0 , 0 ) : return params = m . groupdict ( ) ordinal = params . get ( 'ordinal' , 0 ) if ordinal : ordinal = int ( ordinal ) else : ordinal = 0 year = params . get ( 'year' , '--' ) if not year or year == '--' : year = time . gmtime ( ) [ 0 ] elif len ( year ) == 2 : year = 100 * int ( time . gmtime ( ) [ 0 ] / 100 ) + int ( year ) else : year = int ( year ) month = params . get ( 'month' , '-' ) if not month or month == '-' : if ordinal : month = 1 else : month = time . gmtime ( ) [ 1 ] month = int ( month ) day = params . get ( 'day' , 0 ) if not day : if ordinal : day = ordinal elif params . get ( 'century' , 0 ) or params . get ( 'year' , 0 ) or params . get ( 'month' , 0 ) : day = 1 else : day = time . gmtime ( ) [ 2 ] else : day = int ( day ) if 'century' in params : year = ( int ( params [ 'century' ] ) - 1 ) * 100 + 1 for field in [ 'hour' , 'minute' , 'second' , 'tzhour' , 'tzmin' ] : if not params . get ( field , None ) : params [ field ] = 0 hour = int ( params . get ( 'hour' , 0 ) ) minute = int ( params . get ( 'minute' , 0 ) ) second = int ( float ( params . get ( 'second' , 0 ) ) ) weekday = 0 daylight_savings_flag = - 1 tm = [ year , month , day , hour , minute , second , weekday , ordinal , daylight_savings_flag ] tz = params . get ( 'tz' ) if tz and tz != 'Z' : if tz [ 0 ] == '-' : tm [ 3 ] += int ( params . get ( 'tzhour' , 0 ) ) tm [ 4 ] += int ( params . get ( 'tzmin' , 0 ) ) elif tz [ 0 ] == '+' : tm [ 3 ] -= int ( params . get ( 'tzhour' , 0 ) ) tm [ 4 ] -= int ( params . get ( 'tzmin' , 0 ) ) else : return None return time . localtime ( time . mktime ( tuple ( tm ) ) )
Parse a variety of ISO - 8601 - compatible formats like 20040105
1,682
def _parse_date_onblog ( dateString ) : m = _korean_onblog_date_re . match ( dateString ) if not m : return w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s' % { 'year' : m . group ( 1 ) , 'month' : m . group ( 2 ) , 'day' : m . group ( 3 ) , 'hour' : m . group ( 4 ) , 'minute' : m . group ( 5 ) , 'second' : m . group ( 6 ) , 'zonediff' : '+09:00' } return _parse_date_w3dtf ( w3dtfdate )
Parse a string according to the OnBlog 8 - bit date format
1,683
def _parse_date_nate ( dateString ) : m = _korean_nate_date_re . match ( dateString ) if not m : return hour = int ( m . group ( 5 ) ) ampm = m . group ( 4 ) if ( ampm == _korean_pm ) : hour += 12 hour = str ( hour ) if len ( hour ) == 1 : hour = '0' + hour w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s' % { 'year' : m . group ( 1 ) , 'month' : m . group ( 2 ) , 'day' : m . group ( 3 ) , 'hour' : hour , 'minute' : m . group ( 6 ) , 'second' : m . group ( 7 ) , 'zonediff' : '+09:00' } return _parse_date_w3dtf ( w3dtfdate )
Parse a string according to the Nate 8 - bit date format
1,684
def _parse_date_greek ( dateString ) : m = _greek_date_format_re . match ( dateString ) if not m : return wday = _greek_wdays [ m . group ( 1 ) ] month = _greek_months [ m . group ( 3 ) ] rfc822date = '%(wday)s, %(day)s %(month)s %(year)s %(hour)s:%(minute)s:%(second)s %(zonediff)s' % { 'wday' : wday , 'day' : m . group ( 2 ) , 'month' : month , 'year' : m . group ( 4 ) , 'hour' : m . group ( 5 ) , 'minute' : m . group ( 6 ) , 'second' : m . group ( 7 ) , 'zonediff' : m . group ( 8 ) } return _parse_date_rfc822 ( rfc822date )
Parse a string according to a Greek 8 - bit date format .
1,685
def _parse_date_hungarian ( dateString ) : m = _hungarian_date_format_re . match ( dateString ) if not m or m . group ( 2 ) not in _hungarian_months : return None month = _hungarian_months [ m . group ( 2 ) ] day = m . group ( 3 ) if len ( day ) == 1 : day = '0' + day hour = m . group ( 4 ) if len ( hour ) == 1 : hour = '0' + hour w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s%(zonediff)s' % { 'year' : m . group ( 1 ) , 'month' : month , 'day' : day , 'hour' : hour , 'minute' : m . group ( 5 ) , 'zonediff' : m . group ( 6 ) } return _parse_date_w3dtf ( w3dtfdate )
Parse a string according to a Hungarian 8 - bit date format .
1,686
def parse_date ( dateString ) : if not dateString : return None for handler in _date_handlers : try : date9tuple = handler ( dateString ) except ( KeyError , OverflowError , ValueError ) : continue if not date9tuple : continue if len ( date9tuple ) != 9 : continue return date9tuple return None
Parses a variety of date formats into a 9 - tuple in GMT
1,687
def handle_chunk_wrapper ( self , status , name , content , file_info ) : out = self . output if out is not None : with out : print ( "handling chunk " + repr ( type ( content ) ) ) self . handle_chunk ( status , name , content , file_info ) else : self . handle_chunk ( status , name , content , file_info )
wrapper to allow output redirects for handle_chunk .
1,688
def handle_chunk ( self , status , name , content , file_info ) : "Handle one chunk of the file. Override this method for peicewise delivery or error handling." if status == "error" : msg = repr ( file_info . get ( "message" ) ) exc = JavaScriptError ( msg ) exc . file_info = file_info self . status = "Javascript sent exception " + msg self . chunk_collector = [ ] raise exc if status == "more" : self . chunk_collector . append ( content ) self . progress_callback ( self . chunk_collector , file_info ) else : assert status == "done" , "Unknown status " + repr ( status ) self . save_chunks = self . chunk_collector self . chunk_collector . append ( content ) all_content = self . combine_chunks ( self . chunk_collector ) self . chunk_collector = [ ] content_callback = self . content_callback if content_callback is None : content_callback = self . default_content_callback self . status = "calling " + repr ( content_callback ) try : content_callback ( self . widget , name , all_content ) except Exception as e : self . status += "\n" + repr ( content_callback ) + " raised " + repr ( e ) raise
Handle one chunk of the file . Override this method for peicewise delivery or error handling .
1,689
def search ( self , query , method = "lucene" , start = None , rows = None , access_token = None ) : if access_token is None : access_token = self . get_search_token_from_orcid ( ) headers = { 'Accept' : 'application/orcid+json' , 'Authorization' : 'Bearer %s' % access_token } return self . _search ( query , method , start , rows , headers , self . _endpoint )
Search the ORCID database .
1,690
def search_generator ( self , query , method = "lucene" , pagination = 10 , access_token = None ) : if access_token is None : access_token = self . get_search_token_from_orcid ( ) headers = { 'Accept' : 'application/orcid+json' , 'Authorization' : 'Bearer %s' % access_token } index = 0 while True : paginated_result = self . _search ( query , method , index , pagination , headers , self . _endpoint ) if not paginated_result [ 'result' ] : return for result in paginated_result [ 'result' ] : yield result index += pagination
Search the ORCID database with a generator .
1,691
def get_search_token_from_orcid ( self , scope = '/read-public' ) : payload = { 'client_id' : self . _key , 'client_secret' : self . _secret , 'scope' : scope , 'grant_type' : 'client_credentials' } url = "%s/oauth/token" % self . _endpoint headers = { 'Accept' : 'application/json' } response = requests . post ( url , data = payload , headers = headers , timeout = self . _timeout ) response . raise_for_status ( ) if self . do_store_raw_response : self . raw_response = response return response . json ( ) [ 'access_token' ]
Get a token for searching ORCID records .
1,692
def get_token_from_authorization_code ( self , authorization_code , redirect_uri ) : token_dict = { "client_id" : self . _key , "client_secret" : self . _secret , "grant_type" : "authorization_code" , "code" : authorization_code , "redirect_uri" : redirect_uri , } response = requests . post ( self . _token_url , data = token_dict , headers = { 'Accept' : 'application/json' } , timeout = self . _timeout ) response . raise_for_status ( ) if self . do_store_raw_response : self . raw_response = response return json . loads ( response . text )
Like get_token but using an OAuth 2 authorization code .
1,693
def read_record_public ( self , orcid_id , request_type , token , put_code = None , accept_type = 'application/orcid+json' ) : return self . _get_info ( orcid_id , self . _get_public_info , request_type , token , put_code , accept_type )
Get the public info about the researcher .
1,694
def get_user_orcid ( self , user_id , password , redirect_uri ) : response = self . _authenticate ( user_id , password , redirect_uri , '/authenticate' ) return response [ 'orcid' ]
Get the user orcid from authentication process .
1,695
def read_record_member ( self , orcid_id , request_type , token , put_code = None , accept_type = 'application/orcid+json' ) : return self . _get_info ( orcid_id , self . _get_member_info , request_type , token , put_code , accept_type )
Get the member info about the researcher .
1,696
def get_access_tokens ( self , authorization_code ) : response = self . box_request . get_access_token ( authorization_code ) try : att = response . json ( ) except Exception , ex : raise BoxHttpResponseError ( ex ) if response . status_code >= 400 : raise BoxError ( response . status_code , att ) return att [ 'access_token' ] , att [ 'refresh_token' ]
From the authorization code get the access token and the refresh token from Box .
1,697
def unpack_frame ( message ) : body = [ ] returned = dict ( cmd = '' , headers = { } , body = '' ) breakdown = message . split ( '\n' ) returned [ 'cmd' ] = breakdown [ 0 ] breakdown = breakdown [ 1 : ] def headD ( field ) : index = field . find ( ':' ) if index : header = field [ : index ] . strip ( ) data = field [ index + 1 : ] . strip ( ) returned [ 'headers' ] [ header . strip ( ) ] = data . strip ( ) def bodyD ( field ) : field = field . strip ( ) if field : body . append ( field ) handler = headD for field in breakdown : if field . strip ( ) == '' : handler = bodyD continue handler ( field ) body = "" . join ( body ) returned [ 'body' ] = body . replace ( '\x00' , '' ) return returned
Called to unpack a STOMP message into a dictionary .
1,698
def ack ( messageid , transactionid = None ) : header = 'message-id: %s' % messageid if transactionid : header = 'message-id: %s\ntransaction: %s' % ( messageid , transactionid ) return "ACK\n%s\n\n\x00\n" % header
STOMP acknowledge command .
1,699
def send ( dest , msg , transactionid = None ) : transheader = '' if transactionid : transheader = 'transaction: %s\n' % transactionid return "SEND\ndestination: %s\n%s\n%s\x00\n" % ( dest , transheader , msg )
STOMP send command .