idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
61,400
def _verify_query ( self , query_params ) : error_message = None if self . state_token is not False : received_state_token = query_params . get ( 'state' ) if received_state_token is None : error_message = 'Bad Request. Missing state parameter.' raise UberIllegalState ( error_message ) if self . state_token != received_state_token : error_message = 'CSRF Error. Expected {}, got {}' error_message = error_message . format ( self . state_token , received_state_token , ) raise UberIllegalState ( error_message ) error = query_params . get ( 'error' ) authorization_code = query_params . get ( auth . CODE_RESPONSE_TYPE ) if error and authorization_code : error_message = ( 'Code and Error query params code and error ' 'can not both be set.' ) raise UberIllegalState ( error_message ) if error is None and authorization_code is None : error_message = 'Neither query parameter code or error is set.' raise UberIllegalState ( error_message ) if error : raise UberIllegalState ( error ) return authorization_code
Verify response from the Uber Auth server .
61,401
def get_authorization_url ( self ) : return self . _build_authorization_request_url ( response_type = auth . TOKEN_RESPONSE_TYPE , redirect_url = self . redirect_url , )
Build URL for authorization request .
61,402
def surge_handler ( response , ** kwargs ) : if response . status_code == codes . conflict : json = response . json ( ) errors = json . get ( 'errors' , [ ] ) error = errors [ 0 ] if errors else json . get ( 'error' ) if error and error . get ( 'code' ) == 'surge' : raise SurgeError ( response ) return response
Error Handler to surface 409 Surge Conflict errors .
61,403
def get_products ( self , latitude , longitude ) : args = OrderedDict ( [ ( 'latitude' , latitude ) , ( 'longitude' , longitude ) , ] ) return self . _api_call ( 'GET' , 'v1.2/products' , args = args )
Get information about the Uber products offered at a given location .
61,404
def get_price_estimates ( self , start_latitude , start_longitude , end_latitude , end_longitude , seat_count = None , ) : args = OrderedDict ( [ ( 'start_latitude' , start_latitude ) , ( 'start_longitude' , start_longitude ) , ( 'end_latitude' , end_latitude ) , ( 'end_longitude' , end_longitude ) , ( 'seat_count' , seat_count ) , ] ) return self . _api_call ( 'GET' , 'v1.2/estimates/price' , args = args )
Get price estimates for products at a given location .
61,405
def get_pickup_time_estimates ( self , start_latitude , start_longitude , product_id = None , ) : args = OrderedDict ( [ ( 'start_latitude' , start_latitude ) , ( 'start_longitude' , start_longitude ) , ( 'product_id' , product_id ) , ] ) return self . _api_call ( 'GET' , 'v1.2/estimates/time' , args = args )
Get pickup time estimates for products at a given location .
61,406
def get_promotions ( self , start_latitude , start_longitude , end_latitude , end_longitude , ) : args = OrderedDict ( [ ( 'start_latitude' , start_latitude ) , ( 'start_longitude' , start_longitude ) , ( 'end_latitude' , end_latitude ) , ( 'end_longitude' , end_longitude ) ] ) return self . _api_call ( 'GET' , 'v1.2/promotions' , args = args )
Get information about the promotions available to a user .
61,407
def get_user_activity ( self , offset = None , limit = None ) : args = { 'offset' : offset , 'limit' : limit , } return self . _api_call ( 'GET' , 'v1.2/history' , args = args )
Get activity about the user s lifetime activity with Uber .
61,408
def estimate_ride ( self , product_id = None , start_latitude = None , start_longitude = None , start_place_id = None , end_latitude = None , end_longitude = None , end_place_id = None , seat_count = None , ) : args = { 'product_id' : product_id , 'start_latitude' : start_latitude , 'start_longitude' : start_longitude , 'start_place_id' : start_place_id , 'end_latitude' : end_latitude , 'end_longitude' : end_longitude , 'end_place_id' : end_place_id , 'seat_count' : seat_count } return self . _api_call ( 'POST' , 'v1.2/requests/estimate' , args = args )
Estimate ride details given a product start and end location .
61,409
def request_ride ( self , product_id = None , start_latitude = None , start_longitude = None , start_place_id = None , start_address = None , start_nickname = None , end_latitude = None , end_longitude = None , end_place_id = None , end_address = None , end_nickname = None , seat_count = None , fare_id = None , surge_confirmation_id = None , payment_method_id = None , ) : args = { 'product_id' : product_id , 'start_latitude' : start_latitude , 'start_longitude' : start_longitude , 'start_place_id' : start_place_id , 'start_address' : start_address , 'start_nickname' : start_nickname , 'end_latitude' : end_latitude , 'end_longitude' : end_longitude , 'end_place_id' : end_place_id , 'end_address' : end_address , 'end_nickname' : end_nickname , 'surge_confirmation_id' : surge_confirmation_id , 'payment_method_id' : payment_method_id , 'seat_count' : seat_count , 'fare_id' : fare_id } return self . _api_call ( 'POST' , 'v1.2/requests' , args = args )
Request a ride on behalf of an Uber user .
61,410
def update_ride ( self , ride_id , end_latitude = None , end_longitude = None , end_place_id = None , ) : args = { } if end_latitude is not None : args . update ( { 'end_latitude' : end_latitude } ) if end_longitude is not None : args . update ( { 'end_longitude' : end_longitude } ) if end_place_id is not None : args . update ( { 'end_place_id' : end_place_id } ) endpoint = 'v1.2/requests/{}' . format ( ride_id ) return self . _api_call ( 'PATCH' , endpoint , args = args )
Update an ongoing ride s destination .
61,411
def update_sandbox_ride ( self , ride_id , new_status ) : if new_status not in VALID_PRODUCT_STATUS : message = '{} is not a valid product status.' raise UberIllegalState ( message . format ( new_status ) ) args = { 'status' : new_status } endpoint = 'v1.2/sandbox/requests/{}' . format ( ride_id ) return self . _api_call ( 'PUT' , endpoint , args = args )
Update the status of an ongoing sandbox request .
61,412
def update_sandbox_product ( self , product_id , surge_multiplier = None , drivers_available = None , ) : args = { 'surge_multiplier' : surge_multiplier , 'drivers_available' : drivers_available , } endpoint = 'v1.2/sandbox/products/{}' . format ( product_id ) return self . _api_call ( 'PUT' , endpoint , args = args )
Update sandbox product availability .
61,413
def revoke_oauth_credential ( self ) : if self . session . token_type == auth . SERVER_TOKEN_TYPE : return credential = self . session . oauth2credential revoke_access_token ( credential )
Revoke the session s OAuth 2 . 0 credentials .
61,414
def get_driver_trips ( self , offset = None , limit = None , from_time = None , to_time = None ) : args = { 'offset' : offset , 'limit' : limit , 'from_time' : from_time , 'to_time' : to_time , } return self . _api_call ( 'GET' , 'v1/partners/trips' , args = args )
Get trips about the authorized Uber driver .
61,415
def get_driver_payments ( self , offset = None , limit = None , from_time = None , to_time = None ) : args = { 'offset' : offset , 'limit' : limit , 'from_time' : from_time , 'to_time' : to_time , } return self . _api_call ( 'GET' , 'v1/partners/payments' , args = args )
Get payments about the authorized Uber driver .
61,416
def validiate_webhook_signature ( self , webhook , signature ) : digester = hmac . new ( self . session . oauth2credential . client_secret , webhook , hashlib . sha256 ) return ( signature == digester . hexdigest ( ) )
Validates a webhook signature from a webhook body + client secret
61,417
def adapt_meta ( self , meta ) : surge = meta . get ( 'surge_confirmation' ) href = surge . get ( 'href' ) surge_id = surge . get ( 'surge_confirmation_id' ) return href , surge_id
Convert meta from error response to href and surge_id attributes .
61,418
def estimate_ride ( api_client ) : try : estimate = api_client . estimate_ride ( product_id = SURGE_PRODUCT_ID , start_latitude = START_LAT , start_longitude = START_LNG , end_latitude = END_LAT , end_longitude = END_LNG , seat_count = 2 ) except ( ClientError , ServerError ) as error : fail_print ( error ) else : success_print ( estimate . json )
Use an UberRidesClient to fetch a ride estimate and print the results .
61,419
def update_surge ( api_client , surge_multiplier ) : try : update_surge = api_client . update_sandbox_product ( SURGE_PRODUCT_ID , surge_multiplier = surge_multiplier , ) except ( ClientError , ServerError ) as error : fail_print ( error ) else : success_print ( update_surge . status_code )
Use an UberRidesClient to update surge and print the results .
61,420
def update_ride ( api_client , ride_status , ride_id ) : try : update_product = api_client . update_sandbox_ride ( ride_id , ride_status ) except ( ClientError , ServerError ) as error : fail_print ( error ) else : message = '{} New status: {}' message = message . format ( update_product . status_code , ride_status ) success_print ( message )
Use an UberRidesClient to update ride status and print the results .
61,421
def get_ride_details ( api_client , ride_id ) : try : ride_details = api_client . get_ride_details ( ride_id ) except ( ClientError , ServerError ) as error : fail_print ( error ) else : success_print ( ride_details . json )
Use an UberRidesClient to get ride details and print the results .
61,422
def generate_data ( method , args ) : data = { } params = { } if method in http . BODY_METHODS : data = dumps ( args ) else : params = args return data , params
Assign arguments to body or URL of an HTTP request .
61,423
def generate_prepared_request ( method , url , headers , data , params , handlers ) : request = Request ( method = method , url = url , headers = headers , data = data , params = params , ) handlers . append ( error_handler ) for handler in handlers : request . register_hook ( 'response' , handler ) return request . prepare ( )
Add handlers and prepare a Request .
61,424
def build_url ( host , path , params = None ) : path = quote ( path ) params = params or { } if params : path = '/{}?{}' . format ( path , urlencode ( params ) ) else : path = '/{}' . format ( path ) if not host . startswith ( http . URL_SCHEME ) : host = '{}{}' . format ( http . URL_SCHEME , host ) return urljoin ( host , path )
Build a URL .
61,425
def error_handler ( response , ** kwargs ) : try : body = response . json ( ) except ValueError : body = { } status_code = response . status_code message = body . get ( 'message' , '' ) fields = body . get ( 'fields' , '' ) error_message = str ( status_code ) + ': ' + message + ' ' + str ( fields ) if 400 <= status_code <= 499 : raise ClientError ( response , error_message ) elif 500 <= status_code <= 599 : raise ServerError ( response , error_message ) return response
Error Handler to surface 4XX and 5XX errors .
61,426
def import_app_credentials ( filename = CREDENTIALS_FILENAME ) : with open ( filename , 'r' ) as config_file : config = safe_load ( config_file ) client_id = config [ 'client_id' ] client_secret = config [ 'client_secret' ] redirect_url = config [ 'redirect_url' ] config_values = [ client_id , client_secret , redirect_url ] for value in config_values : if value in DEFAULT_CONFIG_VALUES : exit ( 'Missing credentials in {}' . format ( filename ) ) credentials = { 'client_id' : client_id , 'client_secret' : client_secret , 'redirect_url' : redirect_url , 'scopes' : set ( config [ 'scopes' ] ) , } return credentials
Import app credentials from configuration file .
61,427
def create_uber_client ( credentials ) : oauth2credential = OAuth2Credential ( client_id = credentials . get ( 'client_id' ) , access_token = credentials . get ( 'access_token' ) , expires_in_seconds = credentials . get ( 'expires_in_seconds' ) , scopes = credentials . get ( 'scopes' ) , grant_type = credentials . get ( 'grant_type' ) , redirect_url = credentials . get ( 'redirect_url' ) , client_secret = credentials . get ( 'client_secret' ) , refresh_token = credentials . get ( 'refresh_token' ) , ) session = Session ( oauth2credential = oauth2credential ) return UberRidesClient ( session , sandbox_mode = True )
Create an UberRidesClient from OAuth 2 . 0 credentials .
61,428
def encrypt ( receiver_pubhex : str , msg : bytes ) -> bytes : disposable_key = generate_key ( ) receiver_pubkey = hex2pub ( receiver_pubhex ) aes_key = derive ( disposable_key , receiver_pubkey ) cipher_text = aes_encrypt ( aes_key , msg ) return disposable_key . public_key . format ( False ) + cipher_text
Encrypt with eth public key
61,429
def decrypt ( receiver_prvhex : str , msg : bytes ) -> bytes : pubkey = msg [ 0 : 65 ] encrypted = msg [ 65 : ] sender_public_key = hex2pub ( pubkey . hex ( ) ) private_key = hex2prv ( receiver_prvhex ) aes_key = derive ( private_key , sender_public_key ) return aes_decrypt ( aes_key , encrypted )
Decrypt with eth private key
61,430
def hex2pub ( pub_hex : str ) -> PublicKey : uncompressed = decode_hex ( pub_hex ) if len ( uncompressed ) == 64 : uncompressed = b"\x04" + uncompressed return PublicKey ( uncompressed )
Convert ethereum hex to EllipticCurvePublicKey The hex should be 65 bytes but ethereum public key only has 64 bytes So have to add \ x04
61,431
def aes_encrypt ( key : bytes , plain_text : bytes ) -> bytes : aes_cipher = AES . new ( key , AES_CIPHER_MODE ) encrypted , tag = aes_cipher . encrypt_and_digest ( plain_text ) cipher_text = bytearray ( ) cipher_text . extend ( aes_cipher . nonce ) cipher_text . extend ( tag ) cipher_text . extend ( encrypted ) return bytes ( cipher_text )
AES - GCM encryption
61,432
def aes_decrypt ( key : bytes , cipher_text : bytes ) -> bytes : nonce = cipher_text [ : 16 ] tag = cipher_text [ 16 : 32 ] ciphered_data = cipher_text [ 32 : ] aes_cipher = AES . new ( key , AES_CIPHER_MODE , nonce = nonce ) return aes_cipher . decrypt_and_verify ( ciphered_data , tag )
AES - GCM decryption
61,433
def apply_scaling ( self , copy = True ) : if copy : return self . multiplier * self . data + self . base if self . multiplier != 1 : self . data *= self . multiplier if self . base != 0 : self . data += self . base return self . data
Scale pixel values to there true DN .
61,434
def specials_mask ( self ) : mask = self . data >= self . specials [ 'Min' ] mask &= self . data <= self . specials [ 'Max' ] return mask
Create a pixel map for special pixels .
61,435
def get_image_array ( self ) : specials_mask = self . specials_mask ( ) data = self . data . copy ( ) data [ specials_mask ] -= data [ specials_mask ] . min ( ) data [ specials_mask ] *= 255 / data [ specials_mask ] . max ( ) data [ data == self . specials [ 'His' ] ] = 255 data [ data == self . specials [ 'Hrs' ] ] = 255 return data . astype ( numpy . uint8 )
Create an array for use in making an image .
61,436
def check_isis_version ( major , minor = 0 , patch = 0 ) : if ISIS_VERSION and ( major , minor , patch ) <= ISIS_VERISON_TUPLE : return msg = 'Version %s.%s.%s of isis required (%s found).' raise VersionError ( msg % ( major , minor , patch , ISIS_VERSION ) )
Checks that the current isis version is equal to or above the suplied version .
61,437
def require_isis_version ( major , minor = 0 , patch = 0 ) : def decorator ( fn ) : @ wraps ( fn ) def wrapper ( * args , ** kwargs ) : check_isis_version ( major , minor , patch ) return fn ( * args , ** kwargs ) return wrapper return decorator
Decorator that ensures a function is called with a minimum isis version .
61,438
def write_file_list ( filename , file_list = [ ] , glob = None ) : if glob : file_list = iglob ( glob ) with open ( filename , 'w' ) as f : for line in file_list : f . write ( line + '\n' )
Write a list of files to a file .
61,439
def file_variations ( filename , extensions ) : ( label , ext ) = splitext ( filename ) return [ label + extention for extention in extensions ]
Create a variation of file names .
61,440
def insert ( self , key , value , data = { } ) : if value < self . min_value or value > self . max_value : raise BoundsError ( 'item value out of bounds' ) item = self . Item ( key , value , data ) index = self . get_bin_index ( value ) self . bins [ index ] . append ( item )
Insert the key into a bin based on the given value .
61,441
def iterkeys ( self ) : def _iterkeys ( bin ) : for item in bin : yield item . key for bin in self . bins : yield _iterkeys ( bin )
An iterator over the keys of each bin .
61,442
def file_request ( self ) : response = requests . get ( self . __base_url , headers = self . __headers , stream = True ) return response . raw . read ( ) , response . headers
Request that retrieve a binary file
61,443
def get_signatures ( self , limit = 100 , offset = 0 , conditions = { } ) : url = self . SIGNS_URL + "?limit=%s&offset=%s" % ( limit , offset ) for key , value in conditions . items ( ) : if key is 'ids' : value = "," . join ( value ) url += '&%s=%s' % ( key , value ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get all signatures
61,444
def get_signature ( self , signature_id ) : connection = Connection ( self . token ) connection . set_url ( self . production , self . SIGNS_ID_URL % signature_id ) return connection . get_request ( )
Get a concrete Signature
61,445
def count_signatures ( self , conditions = { } ) : url = self . SIGNS_COUNT_URL + '?' for key , value in conditions . items ( ) : if key is 'ids' : value = "," . join ( value ) url += '&%s=%s' % ( key , value ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Count all signatures
61,446
def cancel_signature ( self , signature_id ) : connection = Connection ( self . token ) connection . set_url ( self . production , self . SIGNS_CANCEL_URL % signature_id ) return connection . patch_request ( )
Cancel a concrete Signature
61,447
def send_signature_reminder ( self , signature_id ) : connection = Connection ( self . token ) connection . set_url ( self . production , self . SIGNS_SEND_REMINDER_URL % signature_id ) return connection . post_request ( )
Send a reminder email
61,448
def get_branding ( self , branding_id ) : connection = Connection ( self . token ) connection . set_url ( self . production , self . BRANDINGS_ID_URL % branding_id ) return connection . get_request ( )
Get a concrete branding
61,449
def get_brandings ( self ) : connection = Connection ( self . token ) connection . set_url ( self . production , self . BRANDINGS_URL ) return connection . get_request ( )
Get all account brandings
61,450
def create_branding ( self , params ) : connection = Connection ( self . token ) connection . add_header ( 'Content-Type' , 'application/json' ) connection . set_url ( self . production , self . BRANDINGS_URL ) connection . add_params ( params , json_format = True ) return connection . post_request ( )
Create a new branding
61,451
def update_branding ( self , branding_id , params ) : connection = Connection ( self . token ) connection . add_header ( 'Content-Type' , 'application/json' ) connection . set_url ( self . production , self . BRANDINGS_ID_URL % branding_id ) connection . add_params ( params ) return connection . patch_request ( )
Update a existing branding
61,452
def get_templates ( self , limit = 100 , offset = 0 ) : url = self . TEMPLATES_URL + "?limit=%s&offset=%s" % ( limit , offset ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get all account templates
61,453
def get_emails ( self , limit = 100 , offset = 0 , conditions = { } ) : url = self . EMAILS_URL + "?limit=%s&offset=%s" % ( limit , offset ) for key , value in conditions . items ( ) : if key is 'ids' : value = "," . join ( value ) url += '&%s=%s' % ( key , value ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get all certified emails
61,454
def count_emails ( self , conditions = { } ) : url = self . EMAILS_COUNT_URL + "?" for key , value in conditions . items ( ) : if key is 'ids' : value = "," . join ( value ) url += '&%s=%s' % ( key , value ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) connection . set_url ( self . production , url ) return connection . get_request ( )
Count all certified emails
61,455
def get_email ( self , email_id ) : connection = Connection ( self . token ) connection . set_url ( self . production , self . EMAILS_ID_URL % email_id ) return connection . get_request ( )
Get a specific email
61,456
def count_SMS ( self , conditions = { } ) : url = self . SMS_COUNT_URL + "?" for key , value in conditions . items ( ) : if key is 'ids' : value = "," . join ( value ) url += '&%s=%s' % ( key , value ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) connection . set_url ( self . production , url ) return connection . get_request ( )
Count all certified sms
61,457
def get_SMS ( self , limit = 100 , offset = 0 , conditions = { } ) : url = self . SMS_URL + "?limit=%s&offset=%s" % ( limit , offset ) for key , value in conditions . items ( ) : if key is 'ids' : value = "," . join ( value ) url += '&%s=%s' % ( key , value ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get all certified sms
61,458
def get_single_SMS ( self , sms_id ) : connection = Connection ( self . token ) connection . set_url ( self . production , self . SMS_ID_URL % sms_id ) return connection . get_request ( )
Get a specific sms
61,459
def create_SMS ( self , files , recipients , body , params = { } ) : parameters = { } parser = Parser ( ) documents = { } parser . fill_array ( documents , files , 'files' ) recipients = recipients if isinstance ( recipients , list ) else [ recipients ] index = 0 for recipient in recipients : parser . fill_array ( parameters , recipient , 'recipients[%i]' % index ) index += 1 parser . fill_array ( parameters , params , '' ) parameters [ 'body' ] = body connection = Connection ( self . token ) connection . set_url ( self . production , self . SMS_URL ) connection . add_params ( parameters ) connection . add_files ( documents ) return connection . post_request ( )
Create a new certified sms
61,460
def get_users ( self , limit = 100 , offset = 0 ) : url = self . TEAM_USERS_URL + "?limit=%s&offset=%s" % ( limit , offset ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get all users from your current team
61,461
def get_seats ( self , limit = 100 , offset = 0 ) : url = self . TEAM_SEATS_URL + "?limit=%s&offset=%s" % ( limit , offset ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get all seats from your current team
61,462
def get_groups ( self , limit = 100 , offset = 0 ) : url = self . TEAM_GROUPS_URL + "?limit=%s&offset=%s" % ( limit , offset ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get all groups from your current team
61,463
def get_subscriptions ( self , limit = 100 , offset = 0 , params = { } ) : url = self . SUBSCRIPTIONS_URL + "?limit=%s&offset=%s" % ( limit , offset ) for key , value in params . items ( ) : if key is 'ids' : value = "," . join ( value ) url += '&%s=%s' % ( key , value ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get all subscriptions
61,464
def count_subscriptions ( self , params = { } ) : url = self . SUBSCRIPTIONS_COUNT_URL + '?' for key , value in params . items ( ) : if key is 'ids' : value = "," . join ( value ) url += '&%s=%s' % ( key , value ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Count all subscriptions
61,465
def get_subscription ( self , subscription_id ) : url = self . SUBSCRIPTIONS_ID_URL % subscription_id connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get single subscription
61,466
def delete_subscription ( self , subscription_id ) : url = self . SUBSCRIPTIONS_ID_URL % subscription_id connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . delete_request ( )
Delete single subscription
61,467
def get_contacts ( self , limit = 100 , offset = 0 , params = { } ) : url = self . CONTACTS_URL + "?limit=%s&offset=%s" % ( limit , offset ) for key , value in params . items ( ) : if key is 'ids' : value = "," . join ( value ) url += '&%s=%s' % ( key , value ) connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get all account contacts
61,468
def get_contact ( self , contact_id ) : url = self . CONTACTS_ID_URL % contact_id connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . get_request ( )
Get single contact
61,469
def delete_contact ( self , contact_id ) : url = self . CONTACTS_ID_URL % contact_id connection = Connection ( self . token ) connection . set_url ( self . production , url ) return connection . delete_request ( )
Delete single contact
61,470
def main ( ) : logging . captureWarnings ( True ) logging . basicConfig ( format = ( '%(asctime)s - %(name)s - %(levelname)s - ' + '%(message)s' ) , level = logging . INFO ) args = [ 3 , 5 , 10 , 20 ] intermediate_results = grid_map ( computeFactorial , args , quiet = False , max_processes = 4 , queue = 'all.q' ) print ( "reducing result" ) for i , ret in enumerate ( intermediate_results ) : print ( "f({0}) = {1}" . format ( args [ i ] , ret ) )
execute map example
61,471
def main ( ) : logging . captureWarnings ( True ) logging . basicConfig ( format = ( '%(asctime)s - %(name)s - %(levelname)s - ' + '%(message)s' ) , level = logging . INFO ) print ( "=====================================" ) print ( "======== Submit and Wait ========" ) print ( "=====================================" ) print ( "" ) functionJobs = make_jobs ( ) print ( "sending function jobs to cluster" ) print ( "" ) job_outputs = process_jobs ( functionJobs , max_processes = 4 ) print ( "results from each job" ) for ( i , result ) in enumerate ( job_outputs ) : print ( "Job {0}- result: {1}" . format ( i , result ) )
run a set of jobs on cluster
61,472
def execute_cmd ( cmd , ** kwargs ) : yield '$ {}\n' . format ( ' ' . join ( cmd ) ) kwargs [ 'stdout' ] = subprocess . PIPE kwargs [ 'stderr' ] = subprocess . STDOUT proc = subprocess . Popen ( cmd , ** kwargs ) buf = [ ] def flush ( ) : line = b'' . join ( buf ) . decode ( 'utf8' , 'replace' ) buf [ : ] = [ ] return line c_last = '' try : for c in iter ( partial ( proc . stdout . read , 1 ) , b'' ) : if c_last == b'\r' and buf and c != b'\n' : yield flush ( ) buf . append ( c ) if c == b'\n' : yield flush ( ) c_last = c finally : ret = proc . wait ( ) if ret != 0 : raise subprocess . CalledProcessError ( ret , cmd )
Call given command yielding output line by line
61,473
def main ( ) : logging . basicConfig ( format = '[%(asctime)s] %(levelname)s -- %(message)s' , level = logging . DEBUG ) parser = argparse . ArgumentParser ( description = 'Synchronizes a github repository with a local repository.' ) parser . add_argument ( 'git_url' , help = 'Url of the repo to sync' ) parser . add_argument ( 'branch_name' , default = 'master' , help = 'Branch of repo to sync' , nargs = '?' ) parser . add_argument ( 'repo_dir' , default = '.' , help = 'Path to clone repo under' , nargs = '?' ) args = parser . parse_args ( ) for line in GitPuller ( args . git_url , args . branch_name , args . repo_dir ) . pull ( ) : print ( line )
Synchronizes a github repository with a local repository .
61,474
def pull ( self ) : if not os . path . exists ( self . repo_dir ) : yield from self . initialize_repo ( ) else : yield from self . update ( )
Pull selected repo from a remote git repository while preserving user changes
61,475
def initialize_repo ( self ) : logging . info ( 'Repo {} doesn\'t exist. Cloning...' . format ( self . repo_dir ) ) clone_args = [ 'git' , 'clone' ] if self . depth and self . depth > 0 : clone_args . extend ( [ '--depth' , str ( self . depth ) ] ) clone_args . extend ( [ '--branch' , self . branch_name ] ) clone_args . extend ( [ self . git_url , self . repo_dir ] ) yield from execute_cmd ( clone_args ) yield from execute_cmd ( [ 'git' , 'config' , 'user.email' , '[email protected]' ] , cwd = self . repo_dir ) yield from execute_cmd ( [ 'git' , 'config' , 'user.name' , 'nbgitpuller' ] , cwd = self . repo_dir ) logging . info ( 'Repo {} initialized' . format ( self . repo_dir ) )
Clones repository & sets up usernames .
61,476
def repo_is_dirty ( self ) : try : subprocess . check_call ( [ 'git' , 'diff-files' , '--quiet' ] , cwd = self . repo_dir ) return False except subprocess . CalledProcessError : return True
Return true if repo is dirty
61,477
def find_upstream_changed ( self , kind ) : output = subprocess . check_output ( [ 'git' , 'log' , '{}..origin/{}' . format ( self . branch_name , self . branch_name ) , '--oneline' , '--name-status' ] , cwd = self . repo_dir ) . decode ( ) files = [ ] for line in output . split ( '\n' ) : if line . startswith ( kind ) : files . append ( os . path . join ( self . repo_dir , line . split ( '\t' , 1 ) [ 1 ] ) ) return files
Return list of files that have been changed upstream belonging to a particular kind of change
61,478
def rename_local_untracked ( self ) : new_upstream_files = self . find_upstream_changed ( 'A' ) for f in new_upstream_files : if os . path . exists ( f ) : ts = datetime . datetime . now ( ) . strftime ( '__%Y%m%d%H%M%S' ) path_head , path_tail = os . path . split ( f ) path_tail = ts . join ( os . path . splitext ( path_tail ) ) new_file_name = os . path . join ( path_head , path_tail ) os . rename ( f , new_file_name ) yield 'Renamed {} to {} to avoid conflict with upstream' . format ( f , new_file_name )
Rename local untracked files that would require pulls
61,479
def update ( self ) : yield from self . update_remotes ( ) yield from self . rename_local_untracked ( ) yield from self . reset_deleted_files ( ) if self . repo_is_dirty ( ) : yield from self . ensure_lock ( ) yield from execute_cmd ( [ 'git' , 'commit' , '-am' , 'WIP' , '--allow-empty' ] , cwd = self . repo_dir ) yield from self . ensure_lock ( ) yield from execute_cmd ( [ 'git' , 'merge' , '-Xours' , 'origin/{}' . format ( self . branch_name ) ] , cwd = self . repo_dir )
Do the pulling if necessary
61,480
def _get_footer_size ( file_obj ) : file_obj . seek ( - 8 , 2 ) tup = struct . unpack ( b"<i" , file_obj . read ( 4 ) ) return tup [ 0 ]
Read the footer size in bytes which is serialized as little endian .
61,481
def _read_footer ( file_obj ) : footer_size = _get_footer_size ( file_obj ) if logger . isEnabledFor ( logging . DEBUG ) : logger . debug ( "Footer size in bytes: %s" , footer_size ) file_obj . seek ( - ( 8 + footer_size ) , 2 ) tin = TFileTransport ( file_obj ) pin = TCompactProtocolFactory ( ) . get_protocol ( tin ) fmd = parquet_thrift . FileMetaData ( ) fmd . read ( pin ) return fmd
Read the footer from the given file object and returns a FileMetaData object .
61,482
def _read_page_header ( file_obj ) : tin = TFileTransport ( file_obj ) pin = TCompactProtocolFactory ( ) . get_protocol ( tin ) page_header = parquet_thrift . PageHeader ( ) page_header . read ( pin ) return page_header
Read the page_header from the given fo .
61,483
def read_footer ( filename ) : with open ( filename , 'rb' ) as file_obj : if not _check_header_magic_bytes ( file_obj ) or not _check_footer_magic_bytes ( file_obj ) : raise ParquetFormatException ( "{0} is not a valid parquet file " "(missing magic bytes)" . format ( filename ) ) return _read_footer ( file_obj )
Read the footer and return the FileMetaData for the specified filename .
61,484
def _get_offset ( cmd ) : dict_offset = cmd . dictionary_page_offset data_offset = cmd . data_page_offset if dict_offset is None or data_offset < dict_offset : return data_offset return dict_offset
Return the offset into the cmd based upon if it s a dictionary page or a data page .
61,485
def _read_data ( file_obj , fo_encoding , value_count , bit_width ) : vals = [ ] if fo_encoding == parquet_thrift . Encoding . RLE : seen = 0 while seen < value_count : values = encoding . read_rle_bit_packed_hybrid ( file_obj , bit_width ) if values is None : break vals += values seen += len ( values ) elif fo_encoding == parquet_thrift . Encoding . BIT_PACKED : raise NotImplementedError ( "Bit packing not yet supported" ) return vals
Read data from the file - object using the given encoding .
61,486
def _read_dictionary_page ( file_obj , schema_helper , page_header , column_metadata ) : raw_bytes = _read_page ( file_obj , page_header , column_metadata ) io_obj = io . BytesIO ( raw_bytes ) values = encoding . read_plain ( io_obj , column_metadata . type , page_header . dictionary_page_header . num_values ) schema_element = schema_helper . schema_element ( column_metadata . path_in_schema [ - 1 ] ) return convert_column ( values , schema_element ) if schema_element . converted_type is not None else values
Read a page containing dictionary data .
61,487
def _dump ( file_obj , options , out = sys . stdout ) : total_count = 0 writer = None keys = None for row in DictReader ( file_obj , options . col ) : if not keys : keys = row . keys ( ) if not writer : writer = csv . DictWriter ( out , keys , delimiter = u'\t' , quotechar = u'\'' , quoting = csv . QUOTE_MINIMAL ) if options . format == 'csv' else JsonWriter ( out ) if options . format == 'json' else None if total_count == 0 and options . format == "csv" and not options . no_headers : writer . writeheader ( ) if options . limit != - 1 and total_count >= options . limit : return row_unicode = { k : v . decode ( "utf-8" ) if isinstance ( v , bytes ) else v for k , v in row . items ( ) } writer . writerow ( row_unicode ) total_count += 1
Dump to fo with given options .
61,488
def dump ( filename , options , out = sys . stdout ) : with open ( filename , 'rb' ) as file_obj : return _dump ( file_obj , options = options , out = out )
Dump parquet file with given filename using options to out .
61,489
def writerow ( self , row ) : json_text = json . dumps ( row ) if isinstance ( json_text , bytes ) : json_text = json_text . decode ( 'utf-8' ) self . _out . write ( json_text ) self . _out . write ( u'\n' )
Write a single row .
61,490
def read_plain_boolean ( file_obj , count ) : return read_bitpacked ( file_obj , count << 1 , 1 , logger . isEnabledFor ( logging . DEBUG ) )
Read count booleans using the plain encoding .
61,491
def read_plain_int32 ( file_obj , count ) : length = 4 * count data = file_obj . read ( length ) if len ( data ) != length : raise EOFError ( "Expected {} bytes but got {} bytes" . format ( length , len ( data ) ) ) res = struct . unpack ( "<{}i" . format ( count ) . encode ( "utf-8" ) , data ) return res
Read count 32 - bit ints using the plain encoding .
61,492
def read_plain_int64 ( file_obj , count ) : return struct . unpack ( "<{}q" . format ( count ) . encode ( "utf-8" ) , file_obj . read ( 8 * count ) )
Read count 64 - bit ints using the plain encoding .
61,493
def read_plain_int96 ( file_obj , count ) : items = struct . unpack ( b"<" + b"qi" * count , file_obj . read ( 12 * count ) ) return [ q << 32 | i for ( q , i ) in zip ( items [ 0 : : 2 ] , items [ 1 : : 2 ] ) ]
Read count 96 - bit ints using the plain encoding .
61,494
def read_plain_float ( file_obj , count ) : return struct . unpack ( "<{}f" . format ( count ) . encode ( "utf-8" ) , file_obj . read ( 4 * count ) )
Read count 32 - bit floats using the plain encoding .
61,495
def read_plain_byte_array ( file_obj , count ) : return [ file_obj . read ( struct . unpack ( b"<i" , file_obj . read ( 4 ) ) [ 0 ] ) for i in range ( count ) ]
Read count byte arrays using the plain encoding .
61,496
def read_plain ( file_obj , type_ , count ) : if count == 0 : return [ ] conv = DECODE_PLAIN [ type_ ] return conv ( file_obj , count )
Read count items type from the fo using the plain encoding .
61,497
def read_unsigned_var_int ( file_obj ) : result = 0 shift = 0 while True : byte = struct . unpack ( b"<B" , file_obj . read ( 1 ) ) [ 0 ] result |= ( ( byte & 0x7F ) << shift ) if ( byte & 0x80 ) == 0 : break shift += 7 return result
Read a value using the unsigned variable int encoding .
61,498
def read_rle ( file_obj , header , bit_width , debug_logging ) : count = header >> 1 zero_data = b"\x00\x00\x00\x00" width = ( bit_width + 7 ) // 8 data = file_obj . read ( width ) data = data + zero_data [ len ( data ) : ] value = struct . unpack ( b"<i" , data ) [ 0 ] if debug_logging : logger . debug ( "Read RLE group with value %s of byte-width %s and count %s" , value , width , count ) for _ in range ( count ) : yield value
Read a run - length encoded run from the given fo with the given header and bit_width .
61,499
def read_bitpacked_deprecated ( file_obj , byte_count , count , width , debug_logging ) : raw_bytes = array . array ( ARRAY_BYTE_STR , file_obj . read ( byte_count ) ) . tolist ( ) mask = _mask_for_bits ( width ) index = 0 res = [ ] word = 0 bits_in_word = 0 while len ( res ) < count and index <= len ( raw_bytes ) : if debug_logging : logger . debug ( "index = %d" , index ) logger . debug ( "bits in word = %d" , bits_in_word ) logger . debug ( "word = %s" , bin ( word ) ) if bits_in_word >= width : offset = ( bits_in_word - width ) value = ( word & ( mask << offset ) ) >> offset if debug_logging : logger . debug ( "offset = %d" , offset ) logger . debug ( "value = %d (%s)" , value , bin ( value ) ) res . append ( value ) bits_in_word -= width else : word = ( word << 8 ) | raw_bytes [ index ] index += 1 bits_in_word += 8 return res
Read count values from fo using the deprecated bitpacking encoding .