idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
62,800 |
def sb_filter ( fastq , bc , cores , nedit ) : barcodes = set ( sb . strip ( ) for sb in bc ) if nedit == 0 : filter_sb = partial ( exact_sample_filter2 , barcodes = barcodes ) else : barcodehash = MutationHash ( barcodes , nedit ) filter_sb = partial ( correcting_sample_filter2 , barcodehash = barcodehash ) p = multiprocessing . Pool ( cores ) chunks = tz . partition_all ( 10000 , read_fastq ( fastq ) ) bigchunks = tz . partition_all ( cores , chunks ) for bigchunk in bigchunks : for chunk in p . map ( filter_sb , list ( bigchunk ) ) : for read in chunk : sys . stdout . write ( read )
|
Filters reads with non - matching sample barcodes Expects formatted fastq files .
|
62,801 |
def mb_filter ( fastq , cores ) : filter_mb = partial ( umi_filter ) p = multiprocessing . Pool ( cores ) chunks = tz . partition_all ( 10000 , read_fastq ( fastq ) ) bigchunks = tz . partition_all ( cores , chunks ) for bigchunk in bigchunks : for chunk in p . map ( filter_mb , list ( bigchunk ) ) : for read in chunk : sys . stdout . write ( read )
|
Filters umis with non - ACGT bases Expects formatted fastq files .
|
62,802 |
def kallisto ( fastq , out_dir , cb_histogram , cb_cutoff ) : parser_re = re . compile ( '(.*):CELL_(?<CB>.*):UMI_(?P<UMI>.*)\\n(.*)\\n\\+\\n(.*)\\n' ) if fastq . endswith ( 'gz' ) : fastq_fh = gzip . GzipFile ( fileobj = open ( fastq ) ) elif fastq == "-" : fastq_fh = sys . stdin else : fastq_fh = open ( fastq ) cb_depth_set = get_cb_depth_set ( cb_histogram , cb_cutoff ) cb_set = set ( ) cb_batch = collections . defaultdict ( list ) parsed = 0 for read in stream_fastq ( fastq_fh ) : match = parser_re . search ( read ) . groupdict ( ) umi = match [ 'UMI' ] cb = match [ 'CB' ] if cb_depth_set and cb not in cb_depth_set : continue parsed += 1 cb_set . add ( cb ) cb_batch [ cb ] . append ( ( read , umi ) ) if not parsed % 10000000 : for cb , chunk in cb_batch . items ( ) : write_kallisto_chunk ( out_dir , cb , chunk ) cb_batch = collections . defaultdict ( list ) for cb , chunk in cb_batch . items ( ) : write_kallisto_chunk ( out_dir , cb , chunk ) with open ( os . path . join ( out_dir , "barcodes.batch" ) , "w" ) as out_handle : out_handle . write ( "#id umi-file file-1\n" ) batchformat = "{cb} {cb}.umi {cb}.fq\n" for cb in cb_set : out_handle . write ( batchformat . format ( ** locals ( ) ) )
|
Convert fastqtransformed file to output format compatible with kallisto .
|
62,803 |
def demultiplex_samples ( fastq , out_dir , nedit , barcodes ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) if barcodes : barcodes = set ( barcode . strip ( ) for barcode in barcodes ) else : barcodes = set ( ) if nedit == 0 : filter_bc = partial ( exact_sample_filter , barcodes = barcodes ) else : barcodehash = MutationHash ( barcodes , nedit ) filter_bc = partial ( correcting_sample_filter , barcodehash = barcodehash ) sample_set = set ( ) batch = collections . defaultdict ( list ) parsed = 0 safe_makedir ( out_dir ) for read in read_fastq ( fastq ) : parsed += 1 read = filter_bc ( read ) if not read : continue match = parser_re . search ( read ) . groupdict ( ) sample = match [ 'SB' ] sample_set . add ( sample ) batch [ sample ] . append ( read ) if not parsed % 10000000 : for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , sample + ".fq" ) with open ( out_file , "a" ) as out_handle : for read in reads : fixed = filter_bc ( read ) if fixed : out_handle . write ( fixed ) batch = collections . defaultdict ( list ) for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , sample + ".fq" ) with open ( out_file , "a" ) as out_handle : for read in reads : fixed = filter_bc ( read ) if fixed : out_handle . write ( read )
|
Demultiplex a fastqtransformed FASTQ file into a FASTQ file for each sample .
|
62,804 |
def demultiplex_cells ( fastq , out_dir , readnumber , prefix , cb_histogram , cb_cutoff ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) readstring = "" if not readnumber else "_R{}" . format ( readnumber ) filestring = "{prefix}{sample}{readstring}.fq" cb_set = set ( ) if cb_histogram : cb_set = get_cb_depth_set ( cb_histogram , cb_cutoff ) sample_set = set ( ) batch = collections . defaultdict ( list ) parsed = 0 safe_makedir ( out_dir ) for read in read_fastq ( fastq ) : parsed += 1 match = parser_re . search ( read ) . groupdict ( ) sample = match [ 'CB' ] if cb_set and sample not in cb_set : continue sample_set . add ( sample ) batch [ sample ] . append ( read ) if not parsed % 10000000 : for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , filestring . format ( ** locals ( ) ) ) with open ( out_file , "a" ) as out_handle : for read in reads : out_handle . write ( read ) batch = collections . defaultdict ( list ) for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , filestring . format ( ** locals ( ) ) ) with open ( out_file , "a" ) as out_handle : for read in reads : out_handle . write ( read )
|
Demultiplex a fastqtransformed FASTQ file into a FASTQ file for each cell .
|
62,805 |
def array_type ( data_types , field ) : from sqlalchemy . dialects import postgresql internal_type = field . base_field . get_internal_type ( ) if internal_type in data_types and internal_type != 'ArrayField' : sub_type = data_types [ internal_type ] ( field ) if not isinstance ( sub_type , ( list , tuple ) ) : sub_type = [ sub_type ] else : raise RuntimeError ( 'Unsupported array element type' ) return postgresql . ARRAY ( sub_type )
|
Allows conversion of Django ArrayField to SQLAlchemy Array . Takes care of mapping the type of the array element .
|
62,806 |
def set_verbose_logger_handlers ( ) : global _REGISTERED_LOGGER_HANDLERS formatter = logging . Formatter ( '%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d ' '%(message)s' ) formatter . default_msec_format = '%s.%03d' for handler in _REGISTERED_LOGGER_HANDLERS : handler . setFormatter ( formatter )
|
Set logger handler formatters to more detail
|
62,807 |
def download ( ctx ) : settings . add_cli_options ( ctx . cli_options , settings . TransferAction . Download ) ctx . initialize ( settings . TransferAction . Download ) specs = settings . create_download_specifications ( ctx . cli_options , ctx . config ) del ctx . cli_options for spec in specs : blobxfer . api . Downloader ( ctx . general_options , ctx . credentials , spec ) . start ( )
|
Download blobs or files from Azure Storage
|
62,808 |
def synccopy ( ctx ) : settings . add_cli_options ( ctx . cli_options , settings . TransferAction . Synccopy ) ctx . initialize ( settings . TransferAction . Synccopy ) specs = settings . create_synccopy_specifications ( ctx . cli_options , ctx . config ) del ctx . cli_options for spec in specs : blobxfer . api . SyncCopy ( ctx . general_options , ctx . credentials , spec ) . start ( )
|
Synchronously copy blobs or files between Azure Storage accounts
|
62,809 |
def upload ( ctx ) : settings . add_cli_options ( ctx . cli_options , settings . TransferAction . Upload ) ctx . initialize ( settings . TransferAction . Upload ) specs = settings . create_upload_specifications ( ctx . cli_options , ctx . config ) del ctx . cli_options for spec in specs : blobxfer . api . Uploader ( ctx . general_options , ctx . credentials , spec ) . start ( )
|
Upload files to Azure Storage
|
62,810 |
def get_idp_sso_supported_bindings ( idp_entity_id = None , config = None ) : if config is None : from djangosaml2 . conf import get_config config = get_config ( ) meta = getattr ( config , 'metadata' , { } ) if idp_entity_id is None : try : idp_entity_id = list ( available_idps ( config ) . keys ( ) ) [ 0 ] except IndexError : raise ImproperlyConfigured ( "No IdP configured!" ) try : return meta . service ( idp_entity_id , 'idpsso_descriptor' , 'single_sign_on_service' ) . keys ( ) except UnknownSystemEntity : return [ ]
|
Returns the list of bindings supported by an IDP This is not clear in the pysaml2 code so wrapping it in a util
|
62,811 |
def fail_acs_response ( request , * args , ** kwargs ) : failure_function = import_string ( get_custom_setting ( 'SAML_ACS_FAILURE_RESPONSE_FUNCTION' , 'djangosaml2.acs_failures.template_failure' ) ) return failure_function ( request , * args , ** kwargs )
|
Serves as a common mechanism for ending ACS in case of any SAML related failure . Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as suitable for the project .
|
62,812 |
def echo_attributes ( request , config_loader_path = None , template = 'djangosaml2/echo_attributes.html' ) : state = StateCache ( request . session ) conf = get_config ( config_loader_path , request ) client = Saml2Client ( conf , state_cache = state , identity_cache = IdentityCache ( request . session ) ) subject_id = _get_subject_id ( request . session ) try : identity = client . users . get_identity ( subject_id , check_not_on_or_after = False ) except AttributeError : return HttpResponse ( "No active SAML identity found. Are you sure you have logged in via SAML?" ) return render ( request , template , { 'attributes' : identity [ 0 ] } )
|
Example view that echo the SAML attributes of an user
|
62,813 |
def logout ( request , config_loader_path = None ) : state = StateCache ( request . session ) conf = get_config ( config_loader_path , request ) client = Saml2Client ( conf , state_cache = state , identity_cache = IdentityCache ( request . session ) ) subject_id = _get_subject_id ( request . session ) if subject_id is None : logger . warning ( 'The session does not contain the subject id for user %s' , request . user ) result = client . global_logout ( subject_id ) state . sync ( ) if not result : logger . error ( "Looks like the user %s is not logged in any IdP/AA" , subject_id ) return HttpResponseBadRequest ( "You are not logged in any IdP/AA" ) if len ( result ) > 1 : logger . error ( 'Sorry, I do not know how to logout from several sources. I will logout just from the first one' ) for entityid , logout_info in result . items ( ) : if isinstance ( logout_info , tuple ) : binding , http_info = logout_info if binding == BINDING_HTTP_POST : logger . debug ( 'Returning form to the IdP to continue the logout process' ) body = '' . join ( http_info [ 'data' ] ) return HttpResponse ( body ) elif binding == BINDING_HTTP_REDIRECT : logger . debug ( 'Redirecting to the IdP to continue the logout process' ) return HttpResponseRedirect ( get_location ( http_info ) ) else : logger . error ( 'Unknown binding: %s' , binding ) return HttpResponseServerError ( 'Failed to log out' ) else : return finish_logout ( request , logout_info ) logger . error ( 'Could not logout because there only the HTTP_REDIRECT is supported' ) return HttpResponseServerError ( 'Logout Binding not supported' )
|
SAML Logout Request initiator
|
62,814 |
def do_logout_service ( request , data , binding , config_loader_path = None , next_page = None , logout_error_template = 'djangosaml2/logout_error.html' ) : logger . debug ( 'Logout service started' ) conf = get_config ( config_loader_path , request ) state = StateCache ( request . session ) client = Saml2Client ( conf , state_cache = state , identity_cache = IdentityCache ( request . session ) ) if 'SAMLResponse' in data : logger . debug ( 'Receiving a logout response from the IdP' ) response = client . parse_logout_request_response ( data [ 'SAMLResponse' ] , binding ) state . sync ( ) return finish_logout ( request , response , next_page = next_page ) elif 'SAMLRequest' in data : logger . debug ( 'Receiving a logout request from the IdP' ) subject_id = _get_subject_id ( request . session ) if subject_id is None : logger . warning ( 'The session does not contain the subject id for user %s. Performing local logout' , request . user ) auth . logout ( request ) return render ( request , logout_error_template , status = 403 ) else : http_info = client . handle_logout_request ( data [ 'SAMLRequest' ] , subject_id , binding , relay_state = data . get ( 'RelayState' , '' ) ) state . sync ( ) auth . logout ( request ) return HttpResponseRedirect ( get_location ( http_info ) ) else : logger . error ( 'No SAMLResponse or SAMLRequest parameter found' ) raise Http404 ( 'No SAMLResponse or SAMLRequest parameter found' )
|
SAML Logout Response endpoint
|
62,815 |
def metadata ( request , config_loader_path = None , valid_for = None ) : conf = get_config ( config_loader_path , request ) metadata = entity_descriptor ( conf ) return HttpResponse ( content = text_type ( metadata ) . encode ( 'utf-8' ) , content_type = "text/xml; charset=utf8" )
|
Returns an XML with the SAML 2 . 0 metadata for this SP as configured in the settings . py file .
|
62,816 |
def configure_user ( self , user , attributes , attribute_mapping ) : user . set_unusable_password ( ) return self . update_user ( user , attributes , attribute_mapping , force_save = True )
|
Configures a user after creation and returns the updated user .
|
62,817 |
def update_user ( self , user , attributes , attribute_mapping , force_save = False ) : if not attribute_mapping : return user user_modified = False for saml_attr , django_attrs in attribute_mapping . items ( ) : attr_value_list = attributes . get ( saml_attr ) if not attr_value_list : logger . debug ( 'Could not find value for "%s", not updating fields "%s"' , saml_attr , django_attrs ) continue for attr in django_attrs : if hasattr ( user , attr ) : user_attr = getattr ( user , attr ) if callable ( user_attr ) : modified = user_attr ( attr_value_list ) else : modified = self . _set_attribute ( user , attr , attr_value_list [ 0 ] ) user_modified = user_modified or modified else : logger . debug ( 'Could not find attribute "%s" on user "%s"' , attr , user ) logger . debug ( 'Sending the pre_save signal' ) signal_modified = any ( [ response for receiver , response in pre_user_save . send_robust ( sender = user . __class__ , instance = user , attributes = attributes , user_modified = user_modified ) ] ) if user_modified or signal_modified or force_save : user . save ( ) return user
|
Update a user with a set of attributes and returns the updated user .
|
62,818 |
def _set_attribute ( self , obj , attr , value ) : field = obj . _meta . get_field ( attr ) if field . max_length is not None and len ( value ) > field . max_length : cleaned_value = value [ : field . max_length ] logger . warn ( 'The attribute "%s" was trimmed from "%s" to "%s"' , attr , value , cleaned_value ) else : cleaned_value = value old_value = getattr ( obj , attr ) if cleaned_value != old_value : setattr ( obj , attr , cleaned_value ) return True return False
|
Set an attribute of an object to a specific value .
|
62,819 |
def config_settings_loader ( request = None ) : conf = SPConfig ( ) conf . load ( copy . deepcopy ( settings . SAML_CONFIG ) ) return conf
|
Utility function to load the pysaml2 configuration .
|
62,820 |
def mkpath ( * segments , ** query ) : segments = [ bytes_to_str ( s ) for s in segments if s is not None ] pathstring = '/' . join ( segments ) pathstring = re . sub ( '/+' , '/' , pathstring ) _query = { } for key in query : if query [ key ] in [ False , True ] : _query [ key ] = str ( query [ key ] ) . lower ( ) elif query [ key ] is not None : if PY2 and isinstance ( query [ key ] , unicode ) : _query [ key ] = query [ key ] . encode ( 'utf-8' ) else : _query [ key ] = query [ key ] if len ( _query ) > 0 : pathstring += "?" + urlencode ( _query ) if not pathstring . startswith ( '/' ) : pathstring = '/' + pathstring return pathstring
|
Constructs the path & query portion of a URI from path segments and a dict .
|
62,821 |
def search_index_path ( self , index = None , ** options ) : if not self . yz_wm_index : raise RiakError ( "Yokozuna search is unsupported by this Riak node" ) if index : quote_plus ( index ) return mkpath ( self . yz_wm_index , "index" , index , ** options )
|
Builds a Yokozuna search index URL .
|
62,822 |
def search_schema_path ( self , index , ** options ) : if not self . yz_wm_schema : raise RiakError ( "Yokozuna search is unsupported by this Riak node" ) return mkpath ( self . yz_wm_schema , "schema" , quote_plus ( index ) , ** options )
|
Builds a Yokozuna search Solr schema URL .
|
62,823 |
def to_op ( self ) : if not self . _adds : return None changes = { } if self . _adds : changes [ 'adds' ] = list ( self . _adds ) return changes
|
Extracts the modification operation from the Hll .
|
62,824 |
def add ( self , element ) : if not isinstance ( element , six . string_types ) : raise TypeError ( "Hll elements can only be strings" ) self . _adds . add ( element )
|
Adds an element to the HyperLogLog . Datatype cardinality will be updated when the object is saved .
|
62,825 |
def ping ( self ) : status , _ , body = self . _request ( 'GET' , self . ping_path ( ) ) return ( status is not None ) and ( bytes_to_str ( body ) == 'OK' )
|
Check server is alive over HTTP
|
62,826 |
def stats ( self ) : status , _ , body = self . _request ( 'GET' , self . stats_path ( ) , { 'Accept' : 'application/json' } ) if status == 200 : return json . loads ( bytes_to_str ( body ) ) else : return None
|
Gets performance statistics and server information
|
62,827 |
def get_keys ( self , bucket , timeout = None ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . key_list_path ( bucket . name , bucket_type = bucket_type , timeout = timeout ) status , _ , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'keys' ] else : raise RiakError ( 'Error listing keys.' )
|
Fetch a list of keys for the bucket
|
62,828 |
def get_buckets ( self , bucket_type = None , timeout = None ) : bucket_type = self . _get_bucket_type ( bucket_type ) url = self . bucket_list_path ( bucket_type = bucket_type , timeout = timeout ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'buckets' ] else : raise RiakError ( 'Error getting buckets.' )
|
Fetch a list of all buckets
|
62,829 |
def get_bucket_props ( self , bucket ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . bucket_properties_path ( bucket . name , bucket_type = bucket_type ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'props' ] else : raise RiakError ( 'Error getting bucket properties.' )
|
Get properties for a bucket
|
62,830 |
def set_bucket_props ( self , bucket , props ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . bucket_properties_path ( bucket . name , bucket_type = bucket_type ) headers = { 'Content-Type' : 'application/json' } content = json . dumps ( { 'props' : props } ) status , _ , body = self . _request ( 'PUT' , url , headers , content ) if status == 401 : raise SecurityError ( 'Not authorized to set bucket properties.' ) elif status != 204 : raise RiakError ( 'Error setting bucket properties.' ) return True
|
Set the properties on the bucket object given
|
62,831 |
def clear_bucket_props ( self , bucket ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . bucket_properties_path ( bucket . name , bucket_type = bucket_type ) url = self . bucket_properties_path ( bucket . name ) headers = { 'Content-Type' : 'application/json' } status , _ , _ = self . _request ( 'DELETE' , url , headers , None ) if status == 204 : return True elif status == 405 : return False else : raise RiakError ( 'Error %s clearing bucket properties.' % status )
|
reset the properties on the bucket object given
|
62,832 |
def get_bucket_type_props ( self , bucket_type ) : self . _check_bucket_types ( bucket_type ) url = self . bucket_type_properties_path ( bucket_type . name ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'props' ] else : raise RiakError ( 'Error getting bucket-type properties.' )
|
Get properties for a bucket - type
|
62,833 |
def set_bucket_type_props ( self , bucket_type , props ) : self . _check_bucket_types ( bucket_type ) url = self . bucket_type_properties_path ( bucket_type . name ) headers = { 'Content-Type' : 'application/json' } content = json . dumps ( { 'props' : props } ) status , _ , _ = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error setting bucket-type properties.' ) return True
|
Set the properties on the bucket - type
|
62,834 |
def mapred ( self , inputs , query , timeout = None ) : content = self . _construct_mapred_json ( inputs , query , timeout ) url = self . mapred_path ( ) headers = { 'Content-Type' : 'application/json' } status , headers , body = self . _request ( 'POST' , url , headers , content ) if status != 200 : raise RiakError ( 'Error running MapReduce operation. Headers: %s Body: %s' % ( repr ( headers ) , repr ( body ) ) ) result = json . loads ( bytes_to_str ( body ) ) return result
|
Run a MapReduce query .
|
62,835 |
def create_search_index ( self , index , schema = None , n_val = None , timeout = None ) : if not self . yz_wm_index : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_index_path ( index ) headers = { 'Content-Type' : 'application/json' } content_dict = dict ( ) if schema : content_dict [ 'schema' ] = schema if n_val : content_dict [ 'n_val' ] = n_val if timeout : content_dict [ 'timeout' ] = timeout content = json . dumps ( content_dict ) status , _ , _ = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error setting Search 2.0 index.' ) return True
|
Create a Solr search index for Yokozuna .
|
62,836 |
def list_search_indexes ( self ) : if not self . yz_wm_index : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_index_path ( ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : json_data = json . loads ( bytes_to_str ( body ) ) return json_data else : raise RiakError ( 'Error getting Search 2.0 index.' )
|
Return a list of Solr search indexes from Yokozuna .
|
62,837 |
def create_search_schema ( self , schema , content ) : if not self . yz_wm_schema : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_schema_path ( schema ) headers = { 'Content-Type' : 'application/xml' } status , header , body = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error creating Search 2.0 schema.' ) return True
|
Create a new Solr schema for Yokozuna .
|
62,838 |
def get_search_schema ( self , schema ) : if not self . yz_wm_schema : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_schema_path ( schema ) status , _ , body = self . _request ( 'GET' , url ) if status == 200 : result = { } result [ 'name' ] = schema result [ 'content' ] = bytes_to_str ( body ) return result else : raise RiakError ( 'Error getting Search 2.0 schema.' )
|
Fetch a Solr schema from Yokozuna .
|
62,839 |
def search ( self , index , query , ** params ) : if index is None : index = 'search' options = { } if 'op' in params : op = params . pop ( 'op' ) options [ 'q.op' ] = op options . update ( params ) url = self . solr_select_path ( index , query , ** options ) status , headers , data = self . _request ( 'GET' , url ) self . check_http_code ( status , [ 200 ] ) if 'json' in headers [ 'content-type' ] : results = json . loads ( bytes_to_str ( data ) ) return self . _normalize_json_search_response ( results ) elif 'xml' in headers [ 'content-type' ] : return self . _normalize_xml_search_response ( data ) else : raise ValueError ( "Could not decode search response" )
|
Performs a search query .
|
62,840 |
def fulltext_add ( self , index , docs ) : xml = Document ( ) root = xml . createElement ( 'add' ) for doc in docs : doc_element = xml . createElement ( 'doc' ) for key in doc : value = doc [ key ] field = xml . createElement ( 'field' ) field . setAttribute ( "name" , key ) text = xml . createTextNode ( value ) field . appendChild ( text ) doc_element . appendChild ( field ) root . appendChild ( doc_element ) xml . appendChild ( root ) self . _request ( 'POST' , self . solr_update_path ( index ) , { 'Content-Type' : 'text/xml' } , xml . toxml ( ) . encode ( 'utf-8' ) )
|
Adds documents to the search index .
|
62,841 |
def fulltext_delete ( self , index , docs = None , queries = None ) : xml = Document ( ) root = xml . createElement ( 'delete' ) if docs : for doc in docs : doc_element = xml . createElement ( 'id' ) text = xml . createTextNode ( doc ) doc_element . appendChild ( text ) root . appendChild ( doc_element ) if queries : for query in queries : query_element = xml . createElement ( 'query' ) text = xml . createTextNode ( query ) query_element . appendChild ( text ) root . appendChild ( query_element ) xml . appendChild ( root ) self . _request ( 'POST' , self . solr_update_path ( index ) , { 'Content-Type' : 'text/xml' } , xml . toxml ( ) . encode ( 'utf-8' ) )
|
Removes documents from the full - text index .
|
62,842 |
def release ( self ) : if self . errored : self . pool . delete_resource ( self ) else : self . pool . release ( self )
|
Releases this resource back to the pool it came from .
|
62,843 |
def delete_resource ( self , resource ) : with self . lock : self . resources . remove ( resource ) self . destroy_resource ( resource . object ) del resource
|
Deletes the resource from the pool and destroys the associated resource . Not usually needed by users of the pool but called internally when BadResource is raised .
|
62,844 |
def encode_timeseries_put ( self , tsobj ) : if tsobj . columns : raise NotImplementedError ( 'columns are not used' ) if tsobj . rows and isinstance ( tsobj . rows , list ) : req_rows = [ ] for row in tsobj . rows : req_r = [ ] for cell in row : req_r . append ( self . encode_to_ts_cell ( cell ) ) req_rows . append ( tuple ( req_r ) ) req = tsputreq_a , tsobj . table . name , [ ] , req_rows mc = MSG_CODE_TS_TTB_MSG rc = MSG_CODE_TS_TTB_MSG return Msg ( mc , encode ( req ) , rc ) else : raise RiakError ( "TsObject requires a list of rows" )
|
Returns an Erlang - TTB encoded tuple with the appropriate data and metadata from a TsObject .
|
62,845 |
def decode_timeseries_row ( self , tsrow , tsct , convert_timestamp = False ) : row = [ ] for i , cell in enumerate ( tsrow ) : if cell is None : row . append ( None ) elif isinstance ( cell , list ) and len ( cell ) == 0 : row . append ( None ) else : if convert_timestamp and tsct [ i ] == timestamp_a : row . append ( datetime_from_unix_time_millis ( cell ) ) else : row . append ( cell ) return row
|
Decodes a TTB - encoded TsRow into a list
|
62,846 |
def to_op ( self ) : if not self . _adds and not self . _removes : return None changes = { } if self . _adds : changes [ 'adds' ] = list ( self . _adds ) if self . _removes : changes [ 'removes' ] = list ( self . _removes ) return changes
|
Extracts the modification operation from the set .
|
62,847 |
def discard ( self , element ) : _check_element ( element ) self . _require_context ( ) self . _removes . add ( element )
|
Removes an element from the set .
|
62,848 |
def getone ( self , key ) : v = self . getall ( key ) if not v : raise KeyError ( 'Key not found: %r' % key ) if len ( v ) > 1 : raise KeyError ( 'Multiple values match %r: %r' % ( key , v ) ) return v [ 0 ]
|
Get one value matching the key raising a KeyError if multiple values were found .
|
62,849 |
def dict_of_lists ( self ) : result = { } for key , value in self . _items : if key in result : result [ key ] . append ( value ) else : result [ key ] = [ value ] return result
|
Returns a dictionary where each key is associated with a list of values .
|
62,850 |
def enq ( self , task ) : if not self . _stop . is_set ( ) : self . _inq . put ( task ) else : raise RuntimeError ( "Attempted to enqueue an operation while " "multi pool was shutdown!" )
|
Enqueues a fetch task to the pool of workers . This will raise a RuntimeError if the pool is stopped or in the process of stopping .
|
62,851 |
def start ( self ) : if not self . _started . is_set ( ) : if self . _lock . acquire ( False ) : for i in range ( self . _size ) : name = "riak.client.multi-worker-{0}-{1}" . format ( self . _name , i ) worker = Thread ( target = self . _worker_method , name = name ) worker . daemon = False worker . start ( ) self . _workers . append ( worker ) self . _started . set ( ) self . _lock . release ( ) else : self . _started . wait ( )
|
Starts the worker threads if they are not already started . This method is thread - safe and will be called automatically when executing an operation .
|
62,852 |
def stop ( self ) : if not self . stopped ( ) : self . _stop . set ( ) for worker in self . _workers : worker . join ( )
|
Signals the worker threads to exit and waits on them .
|
62,853 |
def _check_key ( self , key ) : if not len ( key ) == 2 : raise TypeError ( 'invalid key: %r' % key ) elif key [ 1 ] not in TYPES : raise TypeError ( 'invalid datatype: %s' % key [ 1 ] )
|
Ensures well - formedness of a key .
|
62,854 |
def modified ( self ) : if self . _removes : return True for v in self . _value : if self . _value [ v ] . modified : return True for v in self . _updates : if self . _updates [ v ] . modified : return True return False
|
Whether the map has staged local modifications .
|
62,855 |
def _format_python2_or_3 ( self ) : pb_files = set ( ) with open ( self . source , 'r' , buffering = 1 ) as csvfile : reader = csv . reader ( csvfile ) for row in reader : _ , _ , proto = row pb_files . add ( 'riak/pb/{0}_pb2.py' . format ( proto ) ) for im in sorted ( pb_files ) : with open ( im , 'r' , buffering = 1 ) as pbfile : contents = 'from six import *\n' + pbfile . read ( ) contents = re . sub ( r'riak_pb2' , r'riak.pb.riak_pb2' , contents ) contents = re . sub ( r'class\s+(\S+)\((\S+)\):\s*\n' '\s+__metaclass__\s+=\s+(\S+)\s*\n' , r'@add_metaclass(\3)\nclass \1(\2):\n' , contents ) with open ( im , 'w' , buffering = 1 ) as pbfile : pbfile . write ( contents )
|
Change the PB files to use full pathnames for Python 3 . x and modify the metaclasses to be version agnostic
|
62,856 |
def reload ( self , ** params ) : if not self . bucket : raise ValueError ( 'bucket property not assigned' ) if not self . key : raise ValueError ( 'key property not assigned' ) dtype , value , context = self . bucket . _client . _fetch_datatype ( self . bucket , self . key , ** params ) if not dtype == self . type_name : raise TypeError ( "Expected datatype {} but " "got datatype {}" . format ( self . __class__ , TYPES [ dtype ] ) ) self . clear ( ) self . _context = context self . _set_value ( value ) return self
|
Reloads the datatype from Riak .
|
62,857 |
def update ( self , ** params ) : if not self . modified : raise ValueError ( "No operation to perform" ) params . setdefault ( 'return_body' , True ) self . bucket . _client . update_datatype ( self , ** params ) self . clear ( ) return self
|
Sends locally staged mutations to Riak .
|
62,858 |
def encode_quorum ( self , rw ) : if rw in QUORUM_TO_PB : return QUORUM_TO_PB [ rw ] elif type ( rw ) is int and rw >= 0 : return rw else : return None
|
Converts a symbolic quorum value into its on - the - wire equivalent .
|
62,859 |
def decode_contents ( self , contents , obj ) : obj . siblings = [ self . decode_content ( c , RiakContent ( obj ) ) for c in contents ] if len ( obj . siblings ) > 1 and obj . resolver is not None : obj . resolver ( obj ) return obj
|
Decodes the list of siblings from the protobuf representation into the object .
|
62,860 |
def decode_content ( self , rpb_content , sibling ) : if rpb_content . HasField ( "deleted" ) and rpb_content . deleted : sibling . exists = False else : sibling . exists = True if rpb_content . HasField ( "content_type" ) : sibling . content_type = bytes_to_str ( rpb_content . content_type ) if rpb_content . HasField ( "charset" ) : sibling . charset = bytes_to_str ( rpb_content . charset ) if rpb_content . HasField ( "content_encoding" ) : sibling . content_encoding = bytes_to_str ( rpb_content . content_encoding ) if rpb_content . HasField ( "vtag" ) : sibling . etag = bytes_to_str ( rpb_content . vtag ) sibling . links = [ self . decode_link ( link ) for link in rpb_content . links ] if rpb_content . HasField ( "last_mod" ) : sibling . last_modified = float ( rpb_content . last_mod ) if rpb_content . HasField ( "last_mod_usecs" ) : sibling . last_modified += rpb_content . last_mod_usecs / 1000000.0 sibling . usermeta = dict ( [ ( bytes_to_str ( usermd . key ) , bytes_to_str ( usermd . value ) ) for usermd in rpb_content . usermeta ] ) sibling . indexes = set ( [ ( bytes_to_str ( index . key ) , decode_index_value ( index . key , index . value ) ) for index in rpb_content . indexes ] ) sibling . encoded_data = rpb_content . value return sibling
|
Decodes a single sibling from the protobuf representation into a RiakObject .
|
62,861 |
def encode_content ( self , robj , rpb_content ) : if robj . content_type : rpb_content . content_type = str_to_bytes ( robj . content_type ) if robj . charset : rpb_content . charset = str_to_bytes ( robj . charset ) if robj . content_encoding : rpb_content . content_encoding = str_to_bytes ( robj . content_encoding ) for uk in robj . usermeta : pair = rpb_content . usermeta . add ( ) pair . key = str_to_bytes ( uk ) pair . value = str_to_bytes ( robj . usermeta [ uk ] ) for link in robj . links : pb_link = rpb_content . links . add ( ) try : bucket , key , tag = link except ValueError : raise RiakError ( "Invalid link tuple %s" % link ) pb_link . bucket = str_to_bytes ( bucket ) pb_link . key = str_to_bytes ( key ) if tag : pb_link . tag = str_to_bytes ( tag ) else : pb_link . tag = str_to_bytes ( '' ) for field , value in robj . indexes : pair = rpb_content . indexes . add ( ) pair . key = str_to_bytes ( field ) pair . value = str_to_bytes ( str ( value ) ) if six . PY2 : rpb_content . value = str ( robj . encoded_data ) else : rpb_content . value = robj . encoded_data
|
Fills an RpbContent message with the appropriate data and metadata from a RiakObject .
|
62,862 |
def decode_link ( self , link ) : if link . HasField ( "bucket" ) : bucket = bytes_to_str ( link . bucket ) else : bucket = None if link . HasField ( "key" ) : key = bytes_to_str ( link . key ) else : key = None if link . HasField ( "tag" ) : tag = bytes_to_str ( link . tag ) else : tag = None return ( bucket , key , tag )
|
Decodes an RpbLink message into a tuple
|
62,863 |
def encode_bucket_props ( self , props , msg ) : for prop in NORMAL_PROPS : if prop in props and props [ prop ] is not None : if isinstance ( props [ prop ] , six . string_types ) : setattr ( msg . props , prop , str_to_bytes ( props [ prop ] ) ) else : setattr ( msg . props , prop , props [ prop ] ) for prop in COMMIT_HOOK_PROPS : if prop in props : setattr ( msg . props , 'has_' + prop , True ) self . encode_hooklist ( props [ prop ] , getattr ( msg . props , prop ) ) for prop in MODFUN_PROPS : if prop in props and props [ prop ] is not None : self . encode_modfun ( props [ prop ] , getattr ( msg . props , prop ) ) for prop in QUORUM_PROPS : if prop in props and props [ prop ] not in ( None , 'default' ) : value = self . encode_quorum ( props [ prop ] ) if value is not None : if isinstance ( value , six . string_types ) : setattr ( msg . props , prop , str_to_bytes ( value ) ) else : setattr ( msg . props , prop , value ) if 'repl' in props : msg . props . repl = REPL_TO_PB [ props [ 'repl' ] ] return msg
|
Encodes a dict of bucket properties into the protobuf message .
|
62,864 |
def decode_bucket_props ( self , msg ) : props = { } for prop in NORMAL_PROPS : if msg . HasField ( prop ) : props [ prop ] = getattr ( msg , prop ) if isinstance ( props [ prop ] , bytes ) : props [ prop ] = bytes_to_str ( props [ prop ] ) for prop in COMMIT_HOOK_PROPS : if getattr ( msg , 'has_' + prop ) : props [ prop ] = self . decode_hooklist ( getattr ( msg , prop ) ) for prop in MODFUN_PROPS : if msg . HasField ( prop ) : props [ prop ] = self . decode_modfun ( getattr ( msg , prop ) ) for prop in QUORUM_PROPS : if msg . HasField ( prop ) : props [ prop ] = self . decode_quorum ( getattr ( msg , prop ) ) if msg . HasField ( 'repl' ) : props [ 'repl' ] = REPL_TO_PY [ msg . repl ] return props
|
Decodes the protobuf bucket properties message into a dict .
|
62,865 |
def encode_modfun ( self , props , msg = None ) : if msg is None : msg = riak . pb . riak_pb2 . RpbModFun ( ) msg . module = str_to_bytes ( props [ 'mod' ] ) msg . function = str_to_bytes ( props [ 'fun' ] ) return msg
|
Encodes a dict with mod and fun keys into a protobuf modfun pair . Used in bucket properties .
|
62,866 |
def encode_hooklist ( self , hooklist , msg ) : for hook in hooklist : pbhook = msg . add ( ) self . encode_hook ( hook , pbhook )
|
Encodes a list of commit hooks into their protobuf equivalent . Used in bucket properties .
|
62,867 |
def decode_hook ( self , hook ) : if hook . HasField ( 'modfun' ) : return self . decode_modfun ( hook . modfun ) else : return { 'name' : bytes_to_str ( hook . name ) }
|
Decodes a protobuf commit hook message into a dict . Used in bucket properties .
|
62,868 |
def encode_hook ( self , hook , msg ) : if 'name' in hook : msg . name = str_to_bytes ( hook [ 'name' ] ) else : self . encode_modfun ( hook , msg . modfun ) return msg
|
Encodes a commit hook dict into the protobuf message . Used in bucket properties .
|
62,869 |
def encode_index_req ( self , bucket , index , startkey , endkey = None , return_terms = None , max_results = None , continuation = None , timeout = None , term_regex = None , streaming = False ) : req = riak . pb . riak_kv_pb2 . RpbIndexReq ( bucket = str_to_bytes ( bucket . name ) , index = str_to_bytes ( index ) ) self . _add_bucket_type ( req , bucket . bucket_type ) if endkey is not None : req . qtype = riak . pb . riak_kv_pb2 . RpbIndexReq . range req . range_min = str_to_bytes ( str ( startkey ) ) req . range_max = str_to_bytes ( str ( endkey ) ) else : req . qtype = riak . pb . riak_kv_pb2 . RpbIndexReq . eq req . key = str_to_bytes ( str ( startkey ) ) if return_terms is not None : req . return_terms = return_terms if max_results : req . max_results = max_results if continuation : req . continuation = str_to_bytes ( continuation ) if timeout : if timeout == 'infinity' : req . timeout = 0 else : req . timeout = timeout if term_regex : req . term_regex = str_to_bytes ( term_regex ) req . stream = streaming mc = riak . pb . messages . MSG_CODE_INDEX_REQ rc = riak . pb . messages . MSG_CODE_INDEX_RESP return Msg ( mc , req . SerializeToString ( ) , rc )
|
Encodes a secondary index request into the protobuf message .
|
62,870 |
def decode_search_index ( self , index ) : result = { } result [ 'name' ] = bytes_to_str ( index . name ) if index . HasField ( 'schema' ) : result [ 'schema' ] = bytes_to_str ( index . schema ) if index . HasField ( 'n_val' ) : result [ 'n_val' ] = index . n_val return result
|
Fills an RpbYokozunaIndex message with the appropriate data .
|
62,871 |
def encode_timeseries_put ( self , tsobj ) : req = riak . pb . riak_ts_pb2 . TsPutReq ( ) req . table = str_to_bytes ( tsobj . table . name ) if tsobj . columns : raise NotImplementedError ( "columns are not implemented yet" ) if tsobj . rows and isinstance ( tsobj . rows , list ) : for row in tsobj . rows : tsr = req . rows . add ( ) if not isinstance ( row , list ) : raise ValueError ( "TsObject row must be a list of values" ) for cell in row : tsc = tsr . cells . add ( ) self . encode_to_ts_cell ( cell , tsc ) else : raise RiakError ( "TsObject requires a list of rows" ) mc = riak . pb . messages . MSG_CODE_TS_PUT_REQ rc = riak . pb . messages . MSG_CODE_TS_PUT_RESP return Msg ( mc , req . SerializeToString ( ) , rc )
|
Fills an TsPutReq message with the appropriate data and metadata from a TsObject .
|
62,872 |
def decode_timeseries_row ( self , tsrow , tscols = None , convert_timestamp = False ) : row = [ ] for i , cell in enumerate ( tsrow . cells ) : col = None if tscols is not None : col = tscols [ i ] if cell . HasField ( 'varchar_value' ) : if col and not ( col . type == TsColumnType . Value ( 'VARCHAR' ) or col . type == TsColumnType . Value ( 'BLOB' ) ) : raise TypeError ( 'expected VARCHAR or BLOB column' ) else : row . append ( cell . varchar_value ) elif cell . HasField ( 'sint64_value' ) : if col and col . type != TsColumnType . Value ( 'SINT64' ) : raise TypeError ( 'expected SINT64 column' ) else : row . append ( cell . sint64_value ) elif cell . HasField ( 'double_value' ) : if col and col . type != TsColumnType . Value ( 'DOUBLE' ) : raise TypeError ( 'expected DOUBLE column' ) else : row . append ( cell . double_value ) elif cell . HasField ( 'timestamp_value' ) : if col and col . type != TsColumnType . Value ( 'TIMESTAMP' ) : raise TypeError ( 'expected TIMESTAMP column' ) else : dt = cell . timestamp_value if convert_timestamp : dt = datetime_from_unix_time_millis ( cell . timestamp_value ) row . append ( dt ) elif cell . HasField ( 'boolean_value' ) : if col and col . type != TsColumnType . Value ( 'BOOLEAN' ) : raise TypeError ( 'expected BOOLEAN column' ) else : row . append ( cell . boolean_value ) else : row . append ( None ) return row
|
Decodes a TsRow into a list
|
62,873 |
def decode_preflist ( self , item ) : result = { 'partition' : item . partition , 'node' : bytes_to_str ( item . node ) , 'primary' : item . primary } return result
|
Decodes a preflist response
|
62,874 |
def ping ( self ) : msg_code = riak . pb . messages . MSG_CODE_PING_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_ping ( ) resp_code , _ = self . _request ( msg , codec ) if resp_code == riak . pb . messages . MSG_CODE_PING_RESP : return True else : return False
|
Ping the remote server
|
62,875 |
def get_server_info ( self ) : codec = PbufCodec ( ) msg = Msg ( riak . pb . messages . MSG_CODE_GET_SERVER_INFO_REQ , None , riak . pb . messages . MSG_CODE_GET_SERVER_INFO_RESP ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_get_server_info ( resp )
|
Get information about the server
|
62,876 |
def get ( self , robj , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : msg_code = riak . pb . messages . MSG_CODE_GET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get ( robj , r , pr , timeout , basic_quorum , notfound_ok , head_only ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_get ( robj , resp )
|
Serialize get request and deserialize response
|
62,877 |
def ts_stream_keys ( self , table , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_TS_LIST_KEYS_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_timeseries_listkeysreq ( table , timeout ) self . _send_msg ( msg . msg_code , msg . data ) return PbufTsKeyStream ( self , codec , self . _ts_convert_timestamp )
|
Streams keys from a timeseries table returning an iterator that yields lists of keys .
|
62,878 |
def get_keys ( self , bucket , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_LIST_KEYS_REQ codec = self . _get_codec ( msg_code ) stream = self . stream_keys ( bucket , timeout = timeout ) return codec . decode_get_keys ( stream )
|
Lists all keys within a bucket .
|
62,879 |
def stream_keys ( self , bucket , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_LIST_KEYS_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_stream_keys ( bucket , timeout ) self . _send_msg ( msg . msg_code , msg . data ) return PbufKeyStream ( self , codec )
|
Streams keys from a bucket returning an iterator that yields lists of keys .
|
62,880 |
def get_buckets ( self , bucket_type = None , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_LIST_BUCKETS_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get_buckets ( bucket_type , timeout , streaming = False ) resp_code , resp = self . _request ( msg , codec ) return resp . buckets
|
Serialize bucket listing request and deserialize response
|
62,881 |
def get_bucket_props ( self , bucket ) : msg_code = riak . pb . messages . MSG_CODE_GET_BUCKET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get_bucket_props ( bucket ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_bucket_props ( resp . props )
|
Serialize bucket property request and deserialize response
|
62,882 |
def set_bucket_props ( self , bucket , props ) : if not self . pb_all_bucket_props ( ) : for key in props : if key not in ( 'n_val' , 'allow_mult' ) : raise NotImplementedError ( 'Server only supports n_val and ' 'allow_mult properties over PBC' ) msg_code = riak . pb . messages . MSG_CODE_SET_BUCKET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_set_bucket_props ( bucket , props ) resp_code , resp = self . _request ( msg , codec ) return True
|
Serialize set bucket property request and deserialize response
|
62,883 |
def clear_bucket_props ( self , bucket ) : if not self . pb_clear_bucket_props ( ) : return False msg_code = riak . pb . messages . MSG_CODE_RESET_BUCKET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_clear_bucket_props ( bucket ) self . _request ( msg , codec ) return True
|
Clear bucket properties resetting them to their defaults
|
62,884 |
def get_bucket_type_props ( self , bucket_type ) : self . _check_bucket_types ( bucket_type ) msg_code = riak . pb . messages . MSG_CODE_GET_BUCKET_TYPE_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get_bucket_type_props ( bucket_type ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_bucket_props ( resp . props )
|
Fetch bucket - type properties
|
62,885 |
def set_bucket_type_props ( self , bucket_type , props ) : self . _check_bucket_types ( bucket_type ) msg_code = riak . pb . messages . MSG_CODE_SET_BUCKET_TYPE_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_set_bucket_type_props ( bucket_type , props ) resp_code , resp = self . _request ( msg , codec ) return True
|
Set bucket - type properties
|
62,886 |
def print_report ( label , user , system , real ) : print ( "{:<12s} {:12f} {:12f} ( {:12f} )" . format ( label , user , system , real ) )
|
Prints the report of one step of a benchmark .
|
62,887 |
def next ( self ) : if self . count == 0 : raise StopIteration elif self . count > 1 : print_rehearsal_header ( ) else : if self . rehearse : gc . collect ( ) print ( "-" * 59 ) print ( ) print_header ( ) self . count -= 1 return self
|
Runs the next iteration of the benchmark .
|
62,888 |
def add_object ( self , obj ) : return self . add_bucket_key_data ( obj . _bucket . _name , obj . _key , None )
|
Adds a RiakObject to the inputs .
|
62,889 |
def add_bucket ( self , bucket , bucket_type = None ) : if not riak . disable_list_exceptions : raise riak . ListError ( ) self . _input_mode = 'bucket' if isinstance ( bucket , riak . RiakBucket ) : if bucket . bucket_type . is_default ( ) : self . _inputs = { 'bucket' : bucket . name } else : self . _inputs = { 'bucket' : [ bucket . bucket_type . name , bucket . name ] } elif bucket_type is not None and bucket_type != "default" : self . _inputs = { 'bucket' : [ bucket_type , bucket ] } else : self . _inputs = { 'bucket' : bucket } return self
|
Adds all keys in a bucket to the inputs .
|
62,890 |
def add_key_filters ( self , key_filters ) : if self . _input_mode == 'query' : raise ValueError ( 'Key filters are not supported in a query.' ) self . _key_filters . extend ( key_filters ) return self
|
Adds key filters to the inputs .
|
62,891 |
def add_key_filter ( self , * args ) : if self . _input_mode == 'query' : raise ValueError ( 'Key filters are not supported in a query.' ) self . _key_filters . append ( args ) return self
|
Add a single key filter to the inputs .
|
62,892 |
def reduce_sort ( self , js_cmp = None , options = None ) : if options is None : options = dict ( ) if js_cmp : options [ 'arg' ] = js_cmp return self . reduce ( "Riak.reduceSort" , options = options )
|
Adds the Javascript built - in Riak . reduceSort to the query as a reduce phase .
|
62,893 |
def reduce_slice ( self , start , end , options = None ) : if options is None : options = dict ( ) options [ 'arg' ] = [ start , end ] return self . reduce ( "Riak.reduceSlice" , options = options )
|
Adds the Javascript built - in Riak . reduceSlice to the query as a reduce phase .
|
62,894 |
def to_array ( self ) : stepdef = { 'keep' : self . _keep , 'language' : self . _language , 'arg' : self . _arg } if self . _language == 'javascript' : if isinstance ( self . _function , list ) : stepdef [ 'bucket' ] = self . _function [ 0 ] stepdef [ 'key' ] = self . _function [ 1 ] elif isinstance ( self . _function , string_types ) : if ( "{" in self . _function ) : stepdef [ 'source' ] = self . _function else : stepdef [ 'name' ] = self . _function elif ( self . _language == 'erlang' and isinstance ( self . _function , list ) ) : stepdef [ 'module' ] = self . _function [ 0 ] stepdef [ 'function' ] = self . _function [ 1 ] elif ( self . _language == 'erlang' and isinstance ( self . _function , string_types ) ) : stepdef [ 'source' ] = self . _function return { self . _type : stepdef }
|
Convert the RiakMapReducePhase to a format that can be output into JSON . Used internally .
|
62,895 |
def to_array ( self ) : stepdef = { 'bucket' : self . _bucket , 'tag' : self . _tag , 'keep' : self . _keep } return { 'link' : stepdef }
|
Convert the RiakLinkPhase to a format that can be output into JSON . Used internally .
|
62,896 |
def last_written_resolver ( riak_object ) : riak_object . siblings = [ max ( riak_object . siblings , key = lambda x : x . last_modified ) , ]
|
A conflict - resolution function that resolves by selecting the most recently - modified sibling by timestamp .
|
62,897 |
def verify_cb ( conn , cert , errnum , depth , ok ) : if not ok : raise SecurityError ( "Could not verify CA certificate {0}" . format ( cert . get_subject ( ) ) ) return ok
|
The default OpenSSL certificate verification callback .
|
62,898 |
def next_page ( self , timeout = None , stream = None ) : if not self . continuation : raise ValueError ( "Cannot get next index page, no continuation" ) if stream is not None : self . stream = stream args = { 'bucket' : self . bucket , 'index' : self . index , 'startkey' : self . startkey , 'endkey' : self . endkey , 'return_terms' : self . return_terms , 'max_results' : self . max_results , 'continuation' : self . continuation , 'timeout' : timeout , 'term_regex' : self . term_regex } if self . stream : return self . client . stream_index ( ** args ) else : return self . client . get_index ( ** args )
|
Fetches the next page using the same parameters as the original query .
|
62,899 |
def _validate_timeout ( timeout , infinity_ok = False ) : if timeout is None : return if timeout == 'infinity' : if infinity_ok : return else : raise ValueError ( 'timeout must be a positive integer ' '("infinity" is not valid)' ) if isinstance ( timeout , six . integer_types ) and timeout > 0 : return raise ValueError ( 'timeout must be a positive integer' )
|
Raises an exception if the given timeout is an invalid value .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.