idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
1,900 |
def dns_resolve ( self ) : new_addresses = [ ] for address in self . addresses : try : info = getaddrinfo ( address [ 0 ] , address [ 1 ] , 0 , SOCK_STREAM , IPPROTO_TCP ) except gaierror : raise AddressError ( "Cannot resolve address {!r}" . format ( address ) ) else : for _ , _ , _ , _ , address in info : if len ( address ) == 4 and address [ 3 ] != 0 : continue new_addresses . append ( address ) self . addresses = new_addresses
|
Perform DNS resolution on the contained addresses .
|
1,901 |
def matching_line ( lines , keyword ) : for line in lines : matching = match ( line , keyword ) if matching != None : return matching return None
|
Returns the first matching line in a list of lines .
|
1,902 |
def request ( self , method , path , data = None , files = None , json = None , params = None ) : params = deepcopy ( params ) or { } params [ "raw_json" ] = 1 if isinstance ( data , dict ) : data = deepcopy ( data ) data [ "api_type" ] = "json" data = sorted ( data . items ( ) ) url = urljoin ( self . _requestor . oauth_url , path ) return self . _request_with_retries ( data = data , files = files , json = json , method = method , params = params , url = url , )
|
Return the json content from the resource at path .
|
1,903 |
def request ( self , method , url , params = None , ** kwargs ) : params_key = tuple ( params . items ( ) ) if params else ( ) if method . upper ( ) == "GET" : if ( url , params_key ) in self . get_cache : print ( "Returning cached response for:" , method , url , params ) return self . get_cache [ ( url , params_key ) ] result = super ( ) . request ( method , url , params , ** kwargs ) if method . upper ( ) == "GET" : self . get_cache [ ( url , params_key ) ] = result print ( "Adding entry to the cache:" , method , url , params ) return result
|
Perform a request or return a cached response if available .
|
1,904 |
def parse_routing_info ( cls , records ) : if len ( records ) != 1 : raise RoutingProtocolError ( "Expected exactly one record" ) record = records [ 0 ] routers = [ ] readers = [ ] writers = [ ] try : servers = record [ "servers" ] for server in servers : role = server [ "role" ] addresses = [ ] for address in server [ "addresses" ] : addresses . append ( SocketAddress . parse ( address , DEFAULT_PORT ) ) if role == "ROUTE" : routers . extend ( addresses ) elif role == "READ" : readers . extend ( addresses ) elif role == "WRITE" : writers . extend ( addresses ) ttl = record [ "ttl" ] except ( KeyError , TypeError ) : raise RoutingProtocolError ( "Cannot parse routing info" ) else : return cls ( routers , readers , writers , ttl )
|
Parse the records returned from a getServers call and return a new RoutingTable instance .
|
1,905 |
def is_fresh ( self , access_mode ) : log_debug ( "[#0000] C: <ROUTING> Checking table freshness for %r" , access_mode ) expired = self . last_updated_time + self . ttl <= self . timer ( ) has_server_for_mode = bool ( access_mode == READ_ACCESS and self . readers ) or bool ( access_mode == WRITE_ACCESS and self . writers ) log_debug ( "[#0000] C: <ROUTING> Table expired=%r" , expired ) log_debug ( "[#0000] C: <ROUTING> Table routers=%r" , self . routers ) log_debug ( "[#0000] C: <ROUTING> Table has_server_for_mode=%r" , has_server_for_mode ) return not expired and self . routers and has_server_for_mode
|
Indicator for whether routing information is still usable .
|
1,906 |
def update ( self , new_routing_table ) : self . routers . replace ( new_routing_table . routers ) self . readers . replace ( new_routing_table . readers ) self . writers . replace ( new_routing_table . writers ) self . last_updated_time = self . timer ( ) self . ttl = new_routing_table . ttl log_debug ( "[#0000] S: <ROUTING> table=%r" , self )
|
Update the current routing table with new routing information from a replacement table .
|
1,907 |
def fetch_routing_info ( self , address ) : metadata = { } records = [ ] def fail ( md ) : if md . get ( "code" ) == "Neo.ClientError.Procedure.ProcedureNotFound" : raise RoutingProtocolError ( "Server {!r} does not support routing" . format ( address ) ) else : raise RoutingProtocolError ( "Routing support broken on server {!r}" . format ( address ) ) try : with self . acquire_direct ( address ) as cx : _ , _ , server_version = ( cx . server . agent or "" ) . partition ( "/" ) if server_version and Version . parse ( server_version ) >= Version ( ( 3 , 2 ) ) : log_debug ( "[#%04X] C: <ROUTING> query=%r" , cx . local_port , self . routing_context or { } ) cx . run ( "CALL dbms.cluster.routing.getRoutingTable({context})" , { "context" : self . routing_context } , on_success = metadata . update , on_failure = fail ) else : log_debug ( "[#%04X] C: <ROUTING> query={}" , cx . local_port ) cx . run ( "CALL dbms.cluster.routing.getServers" , { } , on_success = metadata . update , on_failure = fail ) cx . pull_all ( on_success = metadata . update , on_records = records . extend ) cx . sync ( ) routing_info = [ dict ( zip ( metadata . get ( "fields" , ( ) ) , values ) ) for values in records ] log_debug ( "[#%04X] S: <ROUTING> info=%r" , cx . local_port , routing_info ) return routing_info except RoutingProtocolError as error : raise ServiceUnavailable ( * error . args ) except ServiceUnavailable : self . deactivate ( address ) return None
|
Fetch raw routing info from a given router address .
|
1,908 |
def fetch_routing_table ( self , address ) : new_routing_info = self . fetch_routing_info ( address ) if new_routing_info is None : return None new_routing_table = RoutingTable . parse_routing_info ( new_routing_info ) num_routers = len ( new_routing_table . routers ) num_readers = len ( new_routing_table . readers ) num_writers = len ( new_routing_table . writers ) self . missing_writer = ( num_writers == 0 ) if num_routers == 0 : raise RoutingProtocolError ( "No routing servers returned from server %r" % ( address , ) ) if num_readers == 0 : raise RoutingProtocolError ( "No read servers returned from server %r" % ( address , ) ) return new_routing_table
|
Fetch a routing table from a given router address .
|
1,909 |
def update_routing_table_from ( self , * routers ) : for router in routers : new_routing_table = self . fetch_routing_table ( router ) if new_routing_table is not None : self . routing_table . update ( new_routing_table ) return True return False
|
Try to update routing tables with the given routers .
|
1,910 |
def update_routing_table ( self ) : existing_routers = list ( self . routing_table . routers ) has_tried_initial_routers = False if self . missing_writer : has_tried_initial_routers = True if self . update_routing_table_from ( self . initial_address ) : return if self . update_routing_table_from ( * existing_routers ) : return if not has_tried_initial_routers and self . initial_address not in existing_routers : if self . update_routing_table_from ( self . initial_address ) : return raise ServiceUnavailable ( "Unable to retrieve routing information" )
|
Update the routing table from the first router able to provide valid routing information .
|
1,911 |
def ensure_routing_table_is_fresh ( self , access_mode ) : if self . routing_table . is_fresh ( access_mode ) : return False with self . refresh_lock : if self . routing_table . is_fresh ( access_mode ) : if access_mode == READ_ACCESS : self . missing_writer = not self . routing_table . is_fresh ( WRITE_ACCESS ) return False self . update_routing_table ( ) self . update_connection_pool ( ) return True
|
Update the routing table if stale .
|
1,912 |
def deactivate ( self , address ) : log_debug ( "[#0000] C: <ROUTING> Deactivating address %r" , address ) self . routing_table . routers . discard ( address ) self . routing_table . readers . discard ( address ) self . routing_table . writers . discard ( address ) log_debug ( "[#0000] C: <ROUTING> table=%r" , self . routing_table ) super ( RoutingConnectionPool , self ) . deactivate ( address )
|
Deactivate an address from the connection pool if present remove from the routing table and also closing all idle connections to that address .
|
1,913 |
def remove_writer ( self , address ) : log_debug ( "[#0000] C: <ROUTING> Removing writer %r" , address ) self . routing_table . writers . discard ( address ) log_debug ( "[#0000] C: <ROUTING> table=%r" , self . routing_table )
|
Remove a writer address from the routing table if present .
|
1,914 |
def handle ( self , error , connection ) : error_class = error . __class__ if error_class in ( ConnectionExpired , ServiceUnavailable , DatabaseUnavailableError ) : self . deactivate ( connection . address ) elif error_class in ( NotALeaderError , ForbiddenOnReadOnlyDatabaseError ) : self . remove_writer ( connection . address )
|
Handle any cleanup or similar activity related to an error occurring on a pooled connection .
|
1,915 |
def point_type ( name , fields , srid_map ) : def srid ( self ) : try : return srid_map [ len ( self ) ] except KeyError : return None attributes = { "srid" : property ( srid ) } for index , subclass_field in enumerate ( fields ) : def accessor ( self , i = index , f = subclass_field ) : try : return self [ i ] except IndexError : raise AttributeError ( f ) for field_alias in { subclass_field , "xyz" [ index ] } : attributes [ field_alias ] = property ( accessor ) cls = type ( name , ( Point , ) , attributes ) with __srid_table_lock : for dim , srid in srid_map . items ( ) : __srid_table [ srid ] = ( cls , dim ) return cls
|
Dynamically create a Point subclass .
|
1,916 |
def main ( ) : args = parse_args ( ) configure_logging ( args . debug ) src_path = args . src_path dest_path = args . dest_path old_str1 = '\\"size\\":' + args . old_size old_str2 = '\\"size\\": ' + args . old_size new_str = '\\"size\\":' + args . new_size logging . info ( 'Input path: %s' , src_path ) logging . info ( 'Output path: %s' , dest_path ) logging . info ( 'old str: %s' , old_str1 ) logging . info ( 'old str: %s' , old_str2 ) logging . info ( 'new str: %s' , new_str ) if os . path . abspath ( src_path ) == os . path . abspath ( dest_path ) : logging . error ( 'source and destination directiories must be different' ) sys . exit ( 1 ) json_files = [ f for f in os . listdir ( src_path ) if f . endswith ( '.json' ) ] for filename in json_files : in_file_path = os . path . join ( src_path , filename ) in_file_path = os . path . join ( src_path , filename ) out_file_path = os . path . join ( dest_path , filename ) logging . info ( 'INPUT FILE: %s' , in_file_path ) logging . info ( 'OUTPUT FILE: %s' , out_file_path ) pretty = utils . beautify ( filename = in_file_path ) pretty_replaced = utils . replace ( pretty , old_str1 , new_str ) pretty_replaced = utils . replace ( pretty_replaced , old_str2 , new_str ) with open ( out_file_path , 'w' ) as output_file : output_file . write ( pretty_replaced ) logging . info ( 'This is the end.' )
|
Read a directory containing json files for Kibana panels beautify them and replace size value in aggregations as specified through corresponding params params .
|
1,917 |
def signal_handler ( signal_name , frame ) : sys . stdout . flush ( ) print ( "\nSIGINT in frame signal received. Quitting..." ) sys . stdout . flush ( ) sys . exit ( 0 )
|
Quit signal handler .
|
1,918 |
def graph_format ( new_mem , old_mem , is_firstiteration = True ) : if is_firstiteration : output = " n/a " elif new_mem - old_mem > 50000000 : output = " +++++" elif new_mem - old_mem > 20000000 : output = " ++++ " elif new_mem - old_mem > 5000000 : output = " +++ " elif new_mem - old_mem > 1000000 : output = " ++ " elif new_mem - old_mem > 50000 : output = " + " elif old_mem - new_mem > 10000000 : output = "--- " elif old_mem - new_mem > 2000000 : output = " -- " elif old_mem - new_mem > 100000 : output = " - " else : output = " " return output
|
Show changes graphically in memory consumption
|
1,919 |
def get_cur_mem_use ( ) : lines = open ( "/proc/meminfo" , 'r' ) . readlines ( ) emptySpace = re . compile ( '[ ]+' ) for line in lines : if "MemTotal" in line : memtotal = float ( emptySpace . split ( line ) [ 1 ] ) if "SwapFree" in line : swapfree = float ( emptySpace . split ( line ) [ 1 ] ) if "SwapTotal" in line : swaptotal = float ( emptySpace . split ( line ) [ 1 ] ) if "MemFree" in line : memfree = float ( emptySpace . split ( line ) [ 1 ] ) if "Cached" in line and not "SwapCached" in line : cached = float ( emptySpace . split ( line ) [ 1 ] ) ramoccup = 1.0 - ( memfree + cached ) / memtotal if swaptotal == 0 : swapoccup = 0 else : swapoccup = 1.0 - swapfree / swaptotal strramoccup = str ( round ( ramoccup * 100.0 , 1 ) ) strswapoccup = str ( round ( swapoccup * 100.0 , 1 ) ) return float ( memtotal ) , strramoccup , strswapoccup
|
return utilization of memory
|
1,920 |
def check_py_version ( ) : try : if sys . version_info >= ( 2 , 7 ) : return except : pass print ( " " ) print ( " ERROR - memtop needs python version at least 2.7" ) print ( ( "Chances are that you can install newer version from your " "repositories, or even that you have some newer version " "installed yet." ) ) print ( "(one way to find out which versions are installed is to try " "following: 'which python2.7' , 'which python3' and so...)" ) print ( " " ) sys . exit ( - 1 )
|
Check if a propper Python version is used .
|
1,921 |
def character ( prompt = None , empty = False ) : s = _prompt_input ( prompt ) if empty and not s : return None elif len ( s ) == 1 : return s else : return character ( prompt = prompt , empty = empty )
|
Prompt a single character .
|
1,922 |
def email ( prompt = None , empty = False , mode = "simple" ) : if mode == "simple" : s = _prompt_input ( prompt ) if empty and not s : return None else : if RE_EMAIL_SIMPLE . match ( s ) : return s else : return email ( prompt = prompt , empty = empty , mode = mode ) else : raise ValueError
|
Prompt an email address .
|
1,923 |
def integer ( prompt = None , empty = False ) : s = _prompt_input ( prompt ) if empty and not s : return None else : try : return int ( s ) except ValueError : return integer ( prompt = prompt , empty = empty )
|
Prompt an integer .
|
1,924 |
def real ( prompt = None , empty = False ) : s = _prompt_input ( prompt ) if empty and not s : return None else : try : return float ( s ) except ValueError : return real ( prompt = prompt , empty = empty )
|
Prompt a real number .
|
1,925 |
def regex ( pattern , prompt = None , empty = False , flags = 0 ) : s = _prompt_input ( prompt ) if empty and not s : return None else : m = re . match ( pattern , s , flags = flags ) if m : return m else : return regex ( pattern , prompt = prompt , empty = empty , flags = flags )
|
Prompt a string that matches a regular expression .
|
1,926 |
def secret ( prompt = None , empty = False ) : if prompt is None : prompt = PROMPT s = getpass . getpass ( prompt = prompt ) if empty and not s : return None else : if s : return s else : return secret ( prompt = prompt , empty = empty )
|
Prompt a string without echoing .
|
1,927 |
def string ( prompt = None , empty = False ) : s = _prompt_input ( prompt ) if empty and not s : return None else : if s : return s else : return string ( prompt = prompt , empty = empty )
|
Prompt a string .
|
1,928 |
def _get_cache_plus_key ( self ) : key = getattr ( self , '_cache_key' , self . key_from_query ( ) ) return self . _cache . cache , key
|
Return a cache region plus key .
|
1,929 |
def get_value ( self , merge = True , createfunc = None , expiration_time = None , ignore_expiration = False ) : cache , cache_key = self . _get_cache_plus_key ( ) assert not ignore_expiration or not createfunc , "Can't ignore expiration and also provide createfunc" if ignore_expiration or not createfunc : cached_value = cache . get ( cache_key , expiration_time = expiration_time , ignore_expiration = ignore_expiration ) else : cached_value = cache . get ( cache_key ) if not cached_value : cached_value = createfunc ( ) cache . set ( cache_key , cached_value , timeout = expiration_time ) if cached_value and merge : cached_value = self . merge_result ( cached_value , load = False ) return cached_value
|
Return the value from the cache for this query .
|
1,930 |
def set_value ( self , value ) : cache , cache_key = self . _get_cache_plus_key ( ) cache . set ( cache_key , value )
|
Set the value in the cache for this query .
|
1,931 |
def key_from_query ( self , qualifier = None ) : stmt = self . with_labels ( ) . statement compiled = stmt . compile ( ) params = compiled . params values = [ str ( compiled ) ] for k in sorted ( params ) : values . append ( repr ( params [ k ] ) ) key = u" " . join ( values ) return md5 ( key . encode ( 'utf8' ) ) . hexdigest ( )
|
Given a Query create a cache key .
|
1,932 |
def process_query_conditionally ( self , query ) : if query . _current_path : mapper , prop = query . _current_path [ - 2 : ] for cls in mapper . class_ . __mro__ : k = ( cls , prop . key ) relationship_option = self . _relationship_options . get ( k ) if relationship_option : query . _cache = relationship_option break
|
Process a Query that is used within a lazy loader .
|
1,933 |
def fit ( self , t , y , dy = 1 , presorted = False ) : self . t , self . y , self . dy = self . _validate_inputs ( t , y , dy , presorted ) self . _fit ( self . t , self . y , self . dy ) return self
|
Fit the smoother
|
1,934 |
def predict ( self , t ) : t = np . asarray ( t ) return self . _predict ( np . ravel ( t ) ) . reshape ( t . shape )
|
Predict the smoothed function value at time t
|
1,935 |
def cv_residuals ( self , cv = True ) : vals = self . cv_values ( cv ) return ( self . y - vals ) / self . dy
|
Return the residuals of the cross - validation for the fit data
|
1,936 |
def cv_error ( self , cv = True , skip_endpoints = True ) : resids = self . cv_residuals ( cv ) if skip_endpoints : resids = resids [ 1 : - 1 ] return np . mean ( abs ( resids ) )
|
Return the sum of cross - validation residuals for the input data
|
1,937 |
def arcfour_drop ( key , n = 3072 ) : af = arcfour ( key ) [ af . next ( ) for c in range ( n ) ] return af
|
Return a generator for the RC4 - drop pseudorandom keystream given by the key and number of bytes to drop passed as arguments . Dropped bytes default to the more conservative 3072 NOT the SCAN default of 768 .
|
1,938 |
def reconnect ( self ) : self . lock . acquire ( ) if self . use_ssl : self . client = http . client . HTTPSConnection ( self . host , self . port , context = self . ssl_context ) else : self . client = http . client . HTTPConnection ( self . host , self . port ) self . lock . release ( )
|
Reconnect to the remote server .
|
1,939 |
def call ( self , method , * args , ** kwargs ) : if kwargs : options = self . encode ( dict ( args = args , kwargs = kwargs ) ) else : options = self . encode ( args ) headers = { } if self . headers : headers . update ( self . headers ) headers [ 'Content-Type' ] = self . serializer . content_type headers [ 'Content-Length' ] = str ( len ( options ) ) headers [ 'Connection' ] = 'close' if self . username is not None and self . password is not None : headers [ 'Authorization' ] = 'Basic ' + base64 . b64encode ( ( self . username + ':' + self . password ) . encode ( 'UTF-8' ) ) . decode ( 'UTF-8' ) method = os . path . join ( self . uri_base , method ) self . logger . debug ( 'calling RPC method: ' + method [ 1 : ] ) try : with self . lock : self . client . request ( 'RPC' , method , options , headers ) resp = self . client . getresponse ( ) except http . client . ImproperConnectionState : raise RPCConnectionError ( 'improper connection state' ) if resp . status != 200 : raise RPCError ( resp . reason , resp . status ) resp_data = resp . read ( ) resp_data = self . decode ( resp_data ) if not ( 'exception_occurred' in resp_data and 'result' in resp_data ) : raise RPCError ( 'missing response information' , resp . status ) if resp_data [ 'exception_occurred' ] : raise RPCError ( 'remote method incurred an exception' , resp . status , remote_exception = resp_data [ 'exception' ] ) return resp_data [ 'result' ]
|
Issue a call to the remote end point to execute the specified procedure .
|
1,940 |
def cache_call_refresh ( self , method , * options ) : options_hash = self . encode ( options ) if len ( options_hash ) > 20 : options_hash = hashlib . new ( 'sha1' , options ) . digest ( ) options_hash = sqlite3 . Binary ( options_hash ) with self . cache_lock : cursor = self . cache_db . cursor ( ) cursor . execute ( 'DELETE FROM cache WHERE method = ? AND options_hash = ?' , ( method , options_hash ) ) return_value = self . call ( method , * options ) store_return_value = sqlite3 . Binary ( self . encode ( return_value ) ) with self . cache_lock : cursor = self . cache_db . cursor ( ) cursor . execute ( 'INSERT INTO cache (method, options_hash, return_value) VALUES (?, ?, ?)' , ( method , options_hash , store_return_value ) ) self . cache_db . commit ( ) return return_value
|
Call a remote method and update the local cache with the result if it already existed .
|
1,941 |
def cache_clear ( self ) : with self . cache_lock : cursor = self . cache_db . cursor ( ) cursor . execute ( 'DELETE FROM cache' ) self . cache_db . commit ( ) self . logger . info ( 'the RPC cache has been purged' ) return
|
Purge the local store of all cached function information .
|
1,942 |
def respond_file ( self , file_path , attachment = False , query = None ) : del query file_path = os . path . abspath ( file_path ) try : file_obj = open ( file_path , 'rb' ) except IOError : self . respond_not_found ( ) return self . send_response ( 200 ) self . send_header ( 'Content-Type' , self . guess_mime_type ( file_path ) ) fs = os . fstat ( file_obj . fileno ( ) ) self . send_header ( 'Content-Length' , str ( fs [ 6 ] ) ) if attachment : file_name = os . path . basename ( file_path ) self . send_header ( 'Content-Disposition' , 'attachment; filename=' + file_name ) self . send_header ( 'Last-Modified' , self . date_time_string ( fs . st_mtime ) ) self . end_headers ( ) shutil . copyfileobj ( file_obj , self . wfile ) file_obj . close ( ) return
|
Respond to the client by serving a file either directly or as an attachment .
|
1,943 |
def respond_list_directory ( self , dir_path , query = None ) : del query try : dir_contents = os . listdir ( dir_path ) except os . error : self . respond_not_found ( ) return if os . path . normpath ( dir_path ) != self . __config [ 'serve_files_root' ] : dir_contents . append ( '..' ) dir_contents . sort ( key = lambda a : a . lower ( ) ) displaypath = html . escape ( urllib . parse . unquote ( self . path ) , quote = True ) f = io . BytesIO ( ) encoding = sys . getfilesystemencoding ( ) f . write ( b'<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n' ) f . write ( b'<html>\n<title>Directory listing for ' + displaypath . encode ( encoding ) + b'</title>\n' ) f . write ( b'<body>\n<h2>Directory listing for ' + displaypath . encode ( encoding ) + b'</h2>\n' ) f . write ( b'<hr>\n<ul>\n' ) for name in dir_contents : fullname = os . path . join ( dir_path , name ) displayname = linkname = name if os . path . isdir ( fullname ) : displayname = name + "/" linkname = name + "/" if os . path . islink ( fullname ) : displayname = name + "@" f . write ( ( '<li><a href="' + urllib . parse . quote ( linkname ) + '">' + html . escape ( displayname , quote = True ) + '</a>\n' ) . encode ( encoding ) ) f . write ( b'</ul>\n<hr>\n</body>\n</html>\n' ) length = f . tell ( ) f . seek ( 0 ) self . send_response ( 200 ) self . send_header ( 'Content-Type' , 'text/html; charset=' + encoding ) self . send_header ( 'Content-Length' , length ) self . end_headers ( ) shutil . copyfileobj ( f , self . wfile ) f . close ( ) return
|
Respond to the client with an HTML page listing the contents of the specified directory .
|
1,944 |
def respond_redirect ( self , location = '/' ) : self . send_response ( 301 ) self . send_header ( 'Content-Length' , 0 ) self . send_header ( 'Location' , location ) self . end_headers ( ) return
|
Respond to the client with a 301 message and redirect them with a Location header .
|
1,945 |
def respond_server_error ( self , status = None , status_line = None , message = None ) : ( ex_type , ex_value , ex_traceback ) = sys . exc_info ( ) if ex_type : ( ex_file_name , ex_line , _ , _ ) = traceback . extract_tb ( ex_traceback ) [ - 1 ] line_info = "{0}:{1}" . format ( ex_file_name , ex_line ) log_msg = "encountered {0} in {1}" . format ( repr ( ex_value ) , line_info ) self . server . logger . error ( log_msg , exc_info = True ) status = ( status or 500 ) status_line = ( status_line or http . client . responses . get ( status , 'Internal Server Error' ) ) . strip ( ) self . send_response ( status , status_line ) message = ( message or status_line ) if isinstance ( message , ( str , bytes ) ) : self . send_header ( 'Content-Length' , len ( message ) ) self . end_headers ( ) if isinstance ( message , str ) : self . wfile . write ( message . encode ( sys . getdefaultencoding ( ) ) ) else : self . wfile . write ( message ) elif hasattr ( message , 'fileno' ) : fs = os . fstat ( message . fileno ( ) ) self . send_header ( 'Content-Length' , fs [ 6 ] ) self . end_headers ( ) shutil . copyfileobj ( message , self . wfile ) else : self . end_headers ( ) return
|
Handle an internal server error logging a traceback if executed within an exception handler .
|
1,946 |
def respond_unauthorized ( self , request_authentication = False ) : headers = { } if request_authentication : headers [ 'WWW-Authenticate' ] = 'Basic realm="' + self . __config [ 'server_version' ] + '"' self . send_response_full ( b'Unauthorized' , status = 401 , headers = headers ) return
|
Respond to the client that the request is unauthorized .
|
1,947 |
def dispatch_handler ( self , query = None ) : query = ( query or { } ) self . path = self . path . split ( '?' , 1 ) [ 0 ] self . path = self . path . split ( '#' , 1 ) [ 0 ] original_path = urllib . parse . unquote ( self . path ) self . path = posixpath . normpath ( original_path ) words = self . path . split ( '/' ) words = filter ( None , words ) tmp_path = '' for word in words : _ , word = os . path . splitdrive ( word ) _ , word = os . path . split ( word ) if word in ( os . curdir , os . pardir ) : continue tmp_path = os . path . join ( tmp_path , word ) self . path = tmp_path if self . path == 'robots.txt' and self . __config [ 'serve_robots_txt' ] : self . send_response_full ( self . __config [ 'robots_txt' ] ) return self . cookies = http . cookies . SimpleCookie ( self . headers . get ( 'cookie' , '' ) ) handler , is_method = self . __get_handler ( is_rpc = False ) if handler is not None : try : handler ( * ( ( query , ) if is_method else ( self , query ) ) ) except Exception : self . respond_server_error ( ) return if not self . __config [ 'serve_files' ] : self . respond_not_found ( ) return file_path = self . __config [ 'serve_files_root' ] file_path = os . path . join ( file_path , tmp_path ) if os . path . isfile ( file_path ) and os . access ( file_path , os . R_OK ) : self . respond_file ( file_path , query = query ) return elif os . path . isdir ( file_path ) and os . access ( file_path , os . R_OK ) : if not original_path . endswith ( '/' ) : destination = self . path + '/' if self . command == 'GET' and self . query_data : destination += '?' + urllib . parse . urlencode ( self . query_data , True ) self . respond_redirect ( destination ) return for index in [ 'index.html' , 'index.htm' ] : index = os . path . join ( file_path , index ) if os . path . isfile ( index ) and os . access ( index , os . R_OK ) : self . respond_file ( index , query = query ) return if self . __config [ 'serve_files_list_directories' ] : self . respond_list_directory ( file_path , query = query ) return self . respond_not_found ( ) return
|
Dispatch functions based on the established handler_map . It is generally not necessary to override this function and doing so will prevent any handlers from being executed . This function is executed automatically when requests of either GET HEAD or POST are received .
|
1,948 |
def guess_mime_type ( self , path ) : _ , ext = posixpath . splitext ( path ) if ext in self . extensions_map : return self . extensions_map [ ext ] ext = ext . lower ( ) return self . extensions_map [ ext if ext in self . extensions_map else '' ]
|
Guess an appropriate MIME type based on the extension of the provided path .
|
1,949 |
def check_authorization ( self ) : try : store = self . __config . get ( 'basic_auth' ) if store is None : return True auth_info = self . headers . get ( 'Authorization' ) if not auth_info : return False auth_info = auth_info . split ( ) if len ( auth_info ) != 2 or auth_info [ 0 ] != 'Basic' : return False auth_info = base64 . b64decode ( auth_info [ 1 ] ) . decode ( sys . getdefaultencoding ( ) ) username = auth_info . split ( ':' ) [ 0 ] password = ':' . join ( auth_info . split ( ':' ) [ 1 : ] ) password_bytes = password . encode ( sys . getdefaultencoding ( ) ) if hasattr ( self , 'custom_authentication' ) : if self . custom_authentication ( username , password ) : self . basic_auth_user = username return True return False if not username in store : self . server . logger . warning ( 'received invalid username: ' + username ) return False password_data = store [ username ] if password_data [ 'type' ] == 'plain' : if password == password_data [ 'value' ] : self . basic_auth_user = username return True elif hashlib . new ( password_data [ 'type' ] , password_bytes ) . digest ( ) == password_data [ 'value' ] : self . basic_auth_user = username return True self . server . logger . warning ( 'received invalid password from user: ' + username ) except Exception : pass return False
|
Check for the presence of a basic auth Authorization header and if the credentials contained within in are valid .
|
1,950 |
def cookie_get ( self , name ) : if not hasattr ( self , 'cookies' ) : return None if self . cookies . get ( name ) : return self . cookies . get ( name ) . value return None
|
Check for a cookie value by name .
|
1,951 |
def cookie_set ( self , name , value ) : if not self . headers_active : raise RuntimeError ( 'headers have already been ended' ) cookie = "{0}={1}; Path=/; HttpOnly" . format ( name , value ) self . send_header ( 'Set-Cookie' , cookie )
|
Set the value of a client cookie . This can only be called while headers can be sent .
|
1,952 |
def get_content_type_charset ( self , default = 'UTF-8' ) : encoding = default header = self . headers . get ( 'Content-Type' , '' ) idx = header . find ( 'charset=' ) if idx > 0 : encoding = ( header [ idx + 8 : ] . split ( ' ' , 1 ) [ 0 ] or encoding ) return encoding
|
Inspect the Content - Type header to retrieve the charset that the client has specified .
|
1,953 |
def close ( self ) : if not self . connected : return self . connected = False if self . handler . wfile . closed : return if select . select ( [ ] , [ self . handler . wfile ] , [ ] , 0 ) [ 1 ] : with self . lock : self . handler . wfile . write ( b'\x88\x00' ) self . handler . wfile . flush ( ) self . on_closed ( )
|
Close the web socket connection and stop processing results . If the connection is still open a WebSocket close message will be sent to the peer .
|
1,954 |
def send_message ( self , opcode , message ) : if not isinstance ( message , bytes ) : message = message . encode ( 'utf-8' ) length = len ( message ) if not select . select ( [ ] , [ self . handler . wfile ] , [ ] , 0 ) [ 1 ] : self . logger . error ( 'the socket is not ready for writing' ) self . close ( ) return buffer = b'' buffer += struct . pack ( 'B' , 0x80 + opcode ) if length <= 125 : buffer += struct . pack ( 'B' , length ) elif 126 <= length <= 65535 : buffer += struct . pack ( '>BH' , 126 , length ) else : buffer += struct . pack ( '>BQ' , 127 , length ) buffer += message self . _last_sent_opcode = opcode self . lock . acquire ( ) try : self . handler . wfile . write ( buffer ) self . handler . wfile . flush ( ) except Exception : self . logger . error ( 'an error occurred while sending a message' , exc_info = True ) self . close ( ) finally : self . lock . release ( )
|
Send a message to the peer over the socket .
|
1,955 |
def on_message ( self , opcode , message ) : self . logger . debug ( "processing {0} (opcode: 0x{1:02x}) message" . format ( self . _opcode_names . get ( opcode , 'UNKNOWN' ) , opcode ) ) if opcode == self . _opcode_close : self . close ( ) elif opcode == self . _opcode_ping : if len ( message ) > 125 : self . close ( ) return self . send_message ( self . _opcode_pong , message ) elif opcode == self . _opcode_pong : pass elif opcode == self . _opcode_binary : self . on_message_binary ( message ) elif opcode == self . _opcode_text : try : message = self . _decode_string ( message ) except UnicodeDecodeError : self . logger . warning ( 'closing connection due to invalid unicode within a text message' ) self . close ( ) else : self . on_message_text ( message ) elif opcode == self . _opcode_continue : self . close ( ) else : self . logger . warning ( "received unknown opcode: {0} (0x{0:02x})" . format ( opcode ) ) self . close ( )
|
The primary dispatch function to handle incoming WebSocket messages .
|
1,956 |
def from_content_type ( cls , content_type ) : name = content_type options = { } if ';' in content_type : name , options_str = content_type . split ( ';' , 1 ) for part in options_str . split ( ';' ) : part = part . strip ( ) if '=' in part : key , value = part . split ( '=' ) else : key , value = ( part , None ) options [ key ] = value if name . endswith ( '+zlib' ) : options [ 'compression' ] = 'zlib' name = name [ : - 5 ] return cls ( name , charset = options . get ( 'charset' , 'UTF-8' ) , compression = options . get ( 'compression' ) )
|
Build a serializer object from a MIME Content - Type string .
|
1,957 |
def dumps ( self , data ) : data = g_serializer_drivers [ self . name ] [ 'dumps' ] ( data ) if sys . version_info [ 0 ] == 3 and isinstance ( data , str ) : data = data . encode ( self . _charset ) if self . _compression == 'zlib' : data = zlib . compress ( data ) assert isinstance ( data , bytes ) return data
|
Serialize a python data type for transmission or storage .
|
1,958 |
def loads ( self , data ) : if not isinstance ( data , bytes ) : raise TypeError ( "loads() argument 1 must be bytes, not {0}" . format ( type ( data ) . __name__ ) ) if self . _compression == 'zlib' : data = zlib . decompress ( data ) if sys . version_info [ 0 ] == 3 and self . name . startswith ( 'application/' ) : data = data . decode ( self . _charset ) data = g_serializer_drivers [ self . name ] [ 'loads' ] ( data , ( self . _charset if sys . version_info [ 0 ] == 3 else None ) ) if isinstance ( data , list ) : data = tuple ( data ) return data
|
Deserialize the data into it s original python object .
|
1,959 |
def shutdown ( self ) : self . __should_stop . set ( ) if self . __server_thread == threading . current_thread ( ) : self . __is_shutdown . set ( ) self . __is_running . clear ( ) else : if self . __wakeup_fd is not None : os . write ( self . __wakeup_fd . write_fd , b'\x00' ) self . __is_shutdown . wait ( ) if self . __wakeup_fd is not None : self . __wakeup_fd . close ( ) self . __wakeup_fd = None for server in self . sub_servers : server . shutdown ( )
|
Shutdown the server and stop responding to requests .
|
1,960 |
def auth_set ( self , status ) : if not bool ( status ) : self . __config [ 'basic_auth' ] = None self . logger . info ( 'basic authentication has been disabled' ) else : self . __config [ 'basic_auth' ] = { } self . logger . info ( 'basic authentication has been enabled' )
|
Enable or disable requiring authentication on all incoming requests .
|
1,961 |
def auth_delete_creds ( self , username = None ) : if not username : self . __config [ 'basic_auth' ] = { } self . logger . info ( 'basic authentication database has been cleared of all entries' ) return del self . __config [ 'basic_auth' ] [ username ]
|
Delete the credentials for a specific username if specified or all stored credentials .
|
1,962 |
def setattr_context ( obj , ** kwargs ) : old_kwargs = dict ( [ ( key , getattr ( obj , key ) ) for key in kwargs ] ) [ setattr ( obj , key , val ) for key , val in kwargs . items ( ) ] try : yield finally : [ setattr ( obj , key , val ) for key , val in old_kwargs . items ( ) ]
|
Context manager to temporarily change the values of object attributes while executing a function .
|
1,963 |
def validate_inputs ( * arrays , ** kwargs ) : arrays = np . broadcast_arrays ( * arrays ) sort_by = kwargs . pop ( 'sort_by' , None ) if kwargs : raise ValueError ( "unrecognized arguments: {0}" . format ( kwargs . keys ( ) ) ) if arrays [ 0 ] . ndim != 1 : raise ValueError ( "Input arrays should be one-dimensional." ) if sort_by is not None : isort = np . argsort ( sort_by ) if isort . shape != arrays [ 0 ] . shape : raise ValueError ( "sort shape must equal array shape." ) arrays = tuple ( [ a [ isort ] for a in arrays ] ) return arrays
|
Validate input arrays
|
1,964 |
def _prep_smooth ( t , y , dy , span , t_out , span_out , period ) : if period : t = t % period if t_out is not None : t_out = t_out % period t , y , dy = validate_inputs ( t , y , dy , sort_by = t ) if span_out is not None : if t_out is None : raise ValueError ( "Must specify t_out when span_out is given" ) if span is not None : raise ValueError ( "Must specify only one of span, span_out" ) span , t_out = np . broadcast_arrays ( span_out , t_out ) indices = np . searchsorted ( t , t_out ) elif span is None : raise ValueError ( "Must specify either span_out or span" ) else : indices = None return t , y , dy , span , t_out , span_out , indices
|
Private function to prepare & check variables for smooth utilities
|
1,965 |
def moving_average_smooth ( t , y , dy , span = None , cv = True , t_out = None , span_out = None , period = None ) : prep = _prep_smooth ( t , y , dy , span , t_out , span_out , period ) t , y , dy , span , t_out , span_out , indices = prep w = 1. / ( dy ** 2 ) w , yw = windowed_sum ( [ w , y * w ] , t = t , span = span , subtract_mid = cv , indices = indices , period = period ) if t_out is None or span_out is not None : return yw / w else : i = np . minimum ( len ( t ) - 1 , np . searchsorted ( t , t_out ) ) return yw [ i ] / w [ i ]
|
Perform a moving - average smooth of the data
|
1,966 |
def linear_smooth ( t , y , dy , span = None , cv = True , t_out = None , span_out = None , period = None ) : t_input = t prep = _prep_smooth ( t , y , dy , span , t_out , span_out , period ) t , y , dy , span , t_out , span_out , indices = prep if period : t_input = np . asarray ( t_input ) % period w = 1. / ( dy ** 2 ) w , yw , tw , tyw , ttw = windowed_sum ( [ w , y * w , w , y * w , w ] , t = t , tpowers = [ 0 , 0 , 1 , 1 , 2 ] , span = span , indices = indices , subtract_mid = cv , period = period ) denominator = ( w * ttw - tw * tw ) slope = ( tyw * w - tw * yw ) intercept = ( ttw * yw - tyw * tw ) if np . any ( denominator == 0 ) : raise ValueError ( "Zero denominator in linear smooth. This usually " "indicates that the input contains duplicate points." ) if t_out is None : return ( slope * t_input + intercept ) / denominator elif span_out is not None : return ( slope * t_out + intercept ) / denominator else : i = np . minimum ( len ( t ) - 1 , np . searchsorted ( t , t_out ) ) return ( slope [ i ] * t_out + intercept [ i ] ) / denominator [ i ]
|
Perform a linear smooth of the data
|
1,967 |
def multinterp ( x , y , xquery , slow = False ) : x , y , xquery = map ( np . asarray , ( x , y , xquery ) ) assert x . ndim == 1 assert xquery . ndim == 1 assert y . shape == x . shape + xquery . shape xquery = np . clip ( xquery , x . min ( ) , x . max ( ) ) if slow : from scipy . interpolate import interp1d return np . array ( [ interp1d ( x , y ) ( xq ) for xq , y in zip ( xquery , y . T ) ] ) elif len ( x ) == 3 : yq_lower = y [ 0 ] + ( xquery - x [ 0 ] ) * ( y [ 1 ] - y [ 0 ] ) / ( x [ 1 ] - x [ 0 ] ) yq_upper = y [ 1 ] + ( xquery - x [ 1 ] ) * ( y [ 2 ] - y [ 1 ] ) / ( x [ 2 ] - x [ 1 ] ) return np . where ( xquery < x [ 1 ] , yq_lower , yq_upper ) else : i = np . clip ( np . searchsorted ( x , xquery , side = 'right' ) - 1 , 0 , len ( x ) - 2 ) j = np . arange ( len ( xquery ) ) return y [ i , j ] + ( ( xquery - x [ i ] ) * ( y [ i + 1 , j ] - y [ i , j ] ) / ( x [ i + 1 ] - x [ i ] ) )
|
Multiple linear interpolations
|
1,968 |
def _create_session ( self , test_connection = False ) : session = consulate . Session ( host = self . host , port = self . port ) if test_connection : session . status . leader ( ) return session
|
Create a consulate . session object and query for its leader to ensure that the connection is made .
|
1,969 |
def apply_remote_config ( self , namespace = None ) : if namespace is None : namespace = "config/{service}/{environment}/" . format ( service = os . environ . get ( 'SERVICE' , 'generic_service' ) , environment = os . environ . get ( 'ENVIRONMENT' , 'generic_environment' ) ) for k , v in iteritems ( self . session . kv . find ( namespace ) ) : k = k . replace ( namespace , '' ) try : self . app . config [ k ] = json . loads ( v ) except ( TypeError , ValueError ) : self . app . logger . warning ( "Couldn't de-serialize {} to json, using raw value" . format ( v ) ) self . app . config [ k ] = v msg = "Set {k}={v} from consul kv '{ns}'" . format ( k = k , v = v , ns = namespace , ) self . app . logger . debug ( msg )
|
Applies all config values defined in consul s kv store to self . app .
|
1,970 |
def register_service ( self , ** kwargs ) : kwargs . setdefault ( 'name' , self . app . name ) self . session . agent . service . register ( ** kwargs )
|
register this service with consul kwargs passed to Consul . agent . service . register
|
1,971 |
def _resolve ( self ) : endpoints = { } r = self . resolver . query ( self . service , 'SRV' ) for rec in r . response . additional : name = rec . name . to_text ( ) addr = rec . items [ 0 ] . address endpoints [ name ] = { 'addr' : addr } for rec in r . response . answer [ 0 ] . items : name = '.' . join ( rec . target . labels ) endpoints [ name ] [ 'port' ] = rec . port return [ 'http://{ip}:{port}' . format ( ip = v [ 'addr' ] , port = v [ 'port' ] ) for v in endpoints . values ( ) ]
|
Query the consul DNS server for the service IP and port
|
1,972 |
def crop ( gens , seconds = 5 , cropper = None ) : if hasattr ( gens , "next" ) : gens = ( gens , ) if cropper == None : cropper = lambda gen : itertools . islice ( gen , 0 , seconds * sampler . FRAME_RATE ) cropped = [ cropper ( gen ) for gen in gens ] return cropped [ 0 ] if len ( cropped ) == 1 else cropped
|
Crop the generator to a finite number of frames
|
1,973 |
def crop_at_zero_crossing ( gen , seconds = 5 , error = 0.1 ) : source = iter ( gen ) buffer_length = int ( 2 * error * sampler . FRAME_RATE ) start = itertools . islice ( source , 0 , int ( ( seconds - error ) * sampler . FRAME_RATE ) ) end = itertools . islice ( source , 0 , buffer_length ) for sample in start : yield sample end = list ( end ) best = sorted ( enumerate ( end ) , key = lambda x : ( math . fabs ( x [ 1 ] ) , abs ( ( buffer_length / 2 ) - x [ 0 ] ) ) ) print best [ : 10 ] print best [ 0 ] [ 0 ] for sample in end [ : best [ 0 ] [ 0 ] + 1 ] : yield sample
|
Crop the generator ending at a zero - crossing
|
1,974 |
def volume ( gen , dB = 0 ) : if not hasattr ( dB , 'next' ) : scale = 10 ** ( dB / 20. ) else : def scale_gen ( ) : while True : yield 10 ** ( next ( dB ) / 20. ) scale = scale_gen ( ) return envelope ( gen , scale )
|
Change the volume of gen by dB decibles
|
1,975 |
def mixer ( inputs , mix = None ) : if mix == None : mix = ( [ constant ( 1.0 / len ( inputs ) ) ] * len ( inputs ) , ) duped_inputs = zip ( * [ itertools . tee ( i , len ( mix ) ) for i in inputs ] ) return [ sum ( * [ multiply ( m , i ) for m , i in zip ( channel_mix , channel_inputs ) ] ) for channel_mix , channel_inputs in zip ( mix , duped_inputs ) ]
|
Mix inputs together based on mix tuple
|
1,976 |
def channelize ( gen , channels ) : def pick ( g , channel ) : for samples in g : yield samples [ channel ] return [ pick ( gen_copy , channel ) for channel , gen_copy in enumerate ( itertools . tee ( gen , channels ) ) ]
|
Break multi - channel generator into one sub - generator per channel
|
1,977 |
def file_is_seekable ( f ) : try : f . tell ( ) logger . info ( "File is seekable!" ) except IOError , e : if e . errno == errno . ESPIPE : return False else : raise return True
|
Returns True if file f is seekable and False if not Useful to determine for example if f is STDOUT to a pipe .
|
1,978 |
def sample ( generator , min = - 1 , max = 1 , width = SAMPLE_WIDTH ) : fmt = { 1 : '<B' , 2 : '<h' , 4 : '<i' } [ width ] return ( struct . pack ( fmt , int ( sample ) ) for sample in normalize ( hard_clip ( generator , min , max ) , min , max , - 2 ** ( width * 8 - 1 ) , 2 ** ( width * 8 - 1 ) - 1 ) )
|
Convert audio waveform generator into packed sample generator .
|
1,979 |
def sample_all ( generators , * args , ** kwargs ) : return [ sample ( gen , * args , ** kwargs ) for gen in generators ]
|
Convert list of audio waveform generators into list of packed sample generators .
|
1,980 |
def buffer ( stream , buffer_size = BUFFER_SIZE ) : i = iter ( stream ) return iter ( lambda : "" . join ( itertools . islice ( i , buffer_size ) ) , "" )
|
Buffer the generator into byte strings of buffer_size samples
|
1,981 |
def wave_module_patched ( ) : f = StringIO ( ) w = wave . open ( f , "wb" ) w . setparams ( ( 1 , 2 , 44100 , 0 , "NONE" , "no compression" ) ) patched = True try : w . setnframes ( ( 0xFFFFFFFF - 36 ) / w . getnchannels ( ) / w . getsampwidth ( ) ) w . _ensure_header_written ( 0 ) except struct . error : patched = False logger . info ( "Error setting wave data size to 0xFFFFFFFF; wave module unpatched, setting sata size to 0x7FFFFFFF" ) w . setnframes ( ( 0x7FFFFFFF - 36 ) / w . getnchannels ( ) / w . getsampwidth ( ) ) w . _ensure_header_written ( 0 ) return patched
|
True if wave module can write data size of 0xFFFFFFFF False otherwise .
|
1,982 |
def cache_finite_samples ( f ) : cache = { } def wrap ( * args ) : key = FRAME_RATE , args if key not in cache : cache [ key ] = [ sample for sample in f ( * args ) ] return ( sample for sample in cache [ key ] ) return wrap
|
Decorator to cache audio samples produced by the wrapped generator .
|
1,983 |
def play ( channels , blocking = True , raw_samples = False ) : if not pyaudio_loaded : raise Exception ( "Soundcard playback requires PyAudio. Install with `pip install pyaudio`." ) channel_count = 1 if hasattr ( channels , "next" ) else len ( channels ) wavgen = wav_samples ( channels , raw_samples = raw_samples ) p = pyaudio . PyAudio ( ) stream = p . open ( format = p . get_format_from_width ( SAMPLE_WIDTH ) , channels = channel_count , rate = FRAME_RATE , output = True , stream_callback = _pyaudio_callback ( wavgen ) if not blocking else None ) if blocking : try : for chunk in buffer ( wavgen , 1024 ) : stream . write ( chunk ) except Exception : raise finally : if not stream . is_stopped ( ) : stream . stop_stream ( ) try : stream . close ( ) except Exception : pass else : return stream
|
Play the contents of the generator using PyAudio
|
1,984 |
def _pad_arrays ( t , arrays , indices , span , period ) : N = len ( t ) if indices is None : indices = np . arange ( N ) pad_left = max ( 0 , 0 - np . min ( indices - span // 2 ) ) pad_right = max ( 0 , np . max ( indices + span - span // 2 ) - ( N - 1 ) ) if pad_left + pad_right > 0 : Nright , pad_right = divmod ( pad_right , N ) Nleft , pad_left = divmod ( pad_left , N ) t = np . concatenate ( [ t [ N - pad_left : ] - ( Nleft + 1 ) * period ] + [ t + i * period for i in range ( - Nleft , Nright + 1 ) ] + [ t [ : pad_right ] + ( Nright + 1 ) * period ] ) arrays = [ np . concatenate ( [ a [ N - pad_left : ] ] + ( Nleft + Nright + 1 ) * [ a ] + [ a [ : pad_right ] ] ) for a in arrays ] pad_left = pad_left % N Nright = pad_right / N pad_right = pad_right % N return ( t , arrays , slice ( pad_left + Nleft * N , pad_left + ( Nleft + 1 ) * N ) ) else : return ( t , arrays , slice ( None ) )
|
Internal routine to pad arrays for periodic models .
|
1,985 |
def get_i2c_bus_numbers ( glober = glob . glob ) : res = [ ] for device in glober ( "/dev/i2c-*" ) : r = re . match ( "/dev/i2c-([\d]){1,2}" , device ) res . append ( int ( r . group ( 1 ) ) ) return res
|
Search all the available I2C devices in the system
|
1,986 |
def get_led_register_from_name ( self , name ) : res = re . match ( '^led_([0-9]{1,2})$' , name ) if res is None : raise AttributeError ( "Unknown attribute: '%s'" % name ) led_num = int ( res . group ( 1 ) ) if led_num < 0 or led_num > 15 : raise AttributeError ( "Unknown attribute: '%s'" % name ) return self . calc_led_register ( led_num )
|
Parse the name for led number
|
1,987 |
def set_pwm ( self , led_num , value ) : self . __check_range ( 'led_number' , led_num ) self . __check_range ( 'led_value' , value ) register_low = self . calc_led_register ( led_num ) self . write ( register_low , value_low ( value ) ) self . write ( register_low + 1 , value_high ( value ) )
|
Set PWM value for the specified LED
|
1,988 |
def get_pwm ( self , led_num ) : self . __check_range ( 'led_number' , led_num ) register_low = self . calc_led_register ( led_num ) return self . __get_led_value ( register_low )
|
Generic getter for all LED PWM value
|
1,989 |
def sleep ( self ) : logger . debug ( "Sleep the controller" ) self . write ( Registers . MODE_1 , self . mode_1 | ( 1 << Mode1 . SLEEP ) )
|
Send the controller to sleep
|
1,990 |
def write ( self , reg , value ) : self . __check_range ( 'register_value' , value ) logger . debug ( "Write '%s' to register '%s'" % ( value , reg ) ) self . __bus . write_byte_data ( self . __address , reg , value )
|
Write raw byte value to the specified register
|
1,991 |
def set_pwm_frequency ( self , value ) : self . __check_range ( 'pwm_frequency' , value ) reg_val = self . calc_pre_scale ( value ) logger . debug ( "Calculated prescale value is %s" % reg_val ) self . sleep ( ) self . write ( Registers . PRE_SCALE , reg_val ) self . wake ( )
|
Set the frequency for all PWM output
|
1,992 |
def check_valid_color ( color ) : if color in list ( mcolors . CSS4_COLORS . keys ( ) ) + [ "#4CB391" ] : logging . info ( "Nanoplotter: Valid color {}." . format ( color ) ) return color else : logging . info ( "Nanoplotter: Invalid color {}, using default." . format ( color ) ) sys . stderr . write ( "Invalid color {}, using default.\n" . format ( color ) ) return "#4CB391"
|
Check if the color provided by the user is valid .
|
1,993 |
def check_valid_format ( figformat ) : fig = plt . figure ( ) if figformat in list ( fig . canvas . get_supported_filetypes ( ) . keys ( ) ) : logging . info ( "Nanoplotter: valid output format {}" . format ( figformat ) ) return figformat else : logging . info ( "Nanoplotter: invalid output format {}" . format ( figformat ) ) sys . stderr . write ( "Invalid format {}, using default.\n" . format ( figformat ) ) return "png"
|
Check if the specified figure format is valid .
|
1,994 |
def spatial_heatmap ( array , path , title = None , color = "Greens" , figformat = "png" ) : logging . info ( "Nanoplotter: Creating heatmap of reads per channel using {} reads." . format ( array . size ) ) activity_map = Plot ( path = path + "." + figformat , title = "Number of reads generated per channel" ) layout = make_layout ( maxval = np . amax ( array ) ) valueCounts = pd . value_counts ( pd . Series ( array ) ) for entry in valueCounts . keys ( ) : layout . template [ np . where ( layout . structure == entry ) ] = valueCounts [ entry ] plt . figure ( ) ax = sns . heatmap ( data = pd . DataFrame ( layout . template , index = layout . yticks , columns = layout . xticks ) , xticklabels = "auto" , yticklabels = "auto" , square = True , cbar_kws = { "orientation" : "horizontal" } , cmap = color , linewidths = 0.20 ) ax . set_title ( title or activity_map . title ) activity_map . fig = ax . get_figure ( ) activity_map . save ( format = figformat ) plt . close ( "all" ) return [ activity_map ]
|
Taking channel information and creating post run channel activity plots .
|
1,995 |
def check_valid_time_and_sort ( df , timescol , days = 5 , warning = True ) : timediff = ( df [ timescol ] . max ( ) - df [ timescol ] . min ( ) ) . days if timediff < days : return df . sort_values ( timescol ) . reset_index ( drop = True ) . reset_index ( ) else : if warning : sys . stderr . write ( "\nWarning: data generated is from more than {} days.\n" . format ( str ( days ) ) ) sys . stderr . write ( "Likely this indicates you are combining multiple runs.\n" ) sys . stderr . write ( "Plots based on time are invalid and therefore truncated to first {} days.\n\n" . format ( str ( days ) ) ) logging . warning ( "Time plots truncated to first {} days: invalid timespan: {} days" . format ( str ( days ) , str ( timediff ) ) ) return df [ df [ timescol ] < timedelta ( days = days ) ] . sort_values ( timescol ) . reset_index ( drop = True ) . reset_index ( )
|
Check if the data contains reads created within the same days timeframe .
|
1,996 |
def time_plots ( df , path , title = None , color = "#4CB391" , figformat = "png" , log_length = False , plot_settings = None ) : dfs = check_valid_time_and_sort ( df , "start_time" ) logging . info ( "Nanoplotter: Creating timeplots using {} reads." . format ( len ( dfs ) ) ) cumyields = cumulative_yield ( dfs = dfs . set_index ( "start_time" ) , path = path , figformat = figformat , title = title , color = color ) reads_pores_over_time = plot_over_time ( dfs = dfs . set_index ( "start_time" ) , path = path , figformat = figformat , title = title , color = color ) violins = violin_plots_over_time ( dfs = dfs , path = path , figformat = figformat , title = title , log_length = log_length , plot_settings = plot_settings ) return cumyields + reads_pores_over_time + violins
|
Making plots of time vs read length time vs quality and cumulative yield .
|
1,997 |
def violin_or_box_plot ( df , y , figformat , path , y_name , title = None , plot = "violin" , log = False , palette = None ) : comp = Plot ( path = path + "NanoComp_" + y . replace ( ' ' , '_' ) + '.' + figformat , title = "Comparing {}" . format ( y ) ) if y == "quals" : comp . title = "Comparing base call quality scores" if plot == 'violin' : logging . info ( "Nanoplotter: Creating violin plot for {}." . format ( y ) ) process_violin_and_box ( ax = sns . violinplot ( x = "dataset" , y = y , data = df , inner = None , cut = 0 , palette = palette , linewidth = 0 ) , log = log , plot_obj = comp , title = title , y_name = y_name , figformat = figformat , ymax = np . amax ( df [ y ] ) ) elif plot == 'box' : logging . info ( "Nanoplotter: Creating box plot for {}." . format ( y ) ) process_violin_and_box ( ax = sns . boxplot ( x = "dataset" , y = y , data = df , palette = palette ) , log = log , plot_obj = comp , title = title , y_name = y_name , figformat = figformat , ymax = np . amax ( df [ y ] ) ) elif plot == 'ridge' : logging . info ( "Nanoplotter: Creating ridges plot for {}." . format ( y ) ) comp . fig , axes = joypy . joyplot ( df , by = "dataset" , column = y , title = title or comp . title , x_range = [ - 0.05 , np . amax ( df [ y ] ) ] ) if log : xticks = [ float ( i . get_text ( ) ) for i in axes [ - 1 ] . get_xticklabels ( ) ] axes [ - 1 ] . set_xticklabels ( [ 10 ** i for i in xticks ] ) axes [ - 1 ] . set_xticklabels ( axes [ - 1 ] . get_xticklabels ( ) , rotation = 30 , ha = 'center' ) comp . save ( format = figformat ) else : logging . error ( "Unknown comp plot type {}" . format ( plot ) ) sys . exit ( "Unknown comp plot type {}" . format ( plot ) ) plt . close ( "all" ) return [ comp ]
|
Create a violin or boxplot from the received DataFrame .
|
1,998 |
def output_barplot ( df , figformat , path , title = None , palette = None ) : logging . info ( "Nanoplotter: Creating barplots for number of reads and total throughput." ) read_count = Plot ( path = path + "NanoComp_number_of_reads." + figformat , title = "Comparing number of reads" ) ax = sns . countplot ( x = "dataset" , data = df , palette = palette ) ax . set ( ylabel = 'Number of reads' , title = title or read_count . title ) plt . xticks ( rotation = 30 , ha = 'center' ) read_count . fig = ax . get_figure ( ) read_count . save ( format = figformat ) plt . close ( "all" ) throughput_bases = Plot ( path = path + "NanoComp_total_throughput." + figformat , title = "Comparing throughput in gigabases" ) if "aligned_lengths" in df : throughput = df . groupby ( 'dataset' ) [ 'aligned_lengths' ] . sum ( ) ylabel = 'Total gigabase aligned' else : throughput = df . groupby ( 'dataset' ) [ 'lengths' ] . sum ( ) ylabel = 'Total gigabase sequenced' ax = sns . barplot ( x = list ( throughput . index ) , y = throughput / 1e9 , palette = palette , order = df [ "dataset" ] . unique ( ) ) ax . set ( ylabel = ylabel , title = title or throughput_bases . title ) plt . xticks ( rotation = 30 , ha = 'center' ) throughput_bases . fig = ax . get_figure ( ) throughput_bases . save ( format = figformat ) plt . close ( "all" ) return read_count , throughput_bases
|
Create barplots based on number of reads and total sum of nucleotides sequenced .
|
1,999 |
def overlay_histogram ( df , path , palette = None ) : if palette is None : palette = plotly . colors . DEFAULT_PLOTLY_COLORS * 5 hist = Plot ( path = path + "NanoComp_OverlayHistogram.html" , title = "Histogram of read lengths" ) hist . html , hist . fig = plot_overlay_histogram ( df , palette , title = hist . title ) hist . save ( ) hist_norm = Plot ( path = path + "NanoComp_OverlayHistogram_Normalized.html" , title = "Normalized histogram of read lengths" ) hist_norm . html , hist_norm . fig = plot_overlay_histogram ( df , palette , title = hist_norm . title , histnorm = "probability" ) hist_norm . save ( ) log_hist = Plot ( path = path + "NanoComp_OverlayLogHistogram.html" , title = "Histogram of log transformed read lengths" ) log_hist . html , log_hist . fig = plot_log_histogram ( df , palette , title = log_hist . title ) log_hist . save ( ) log_hist_norm = Plot ( path = path + "NanoComp_OverlayLogHistogram_Normalized.html" , title = "Normalized histogram of log transformed read lengths" ) log_hist_norm . html , log_hist_norm . fig = plot_log_histogram ( df , palette , title = log_hist_norm . title , histnorm = "probability" ) log_hist_norm . save ( ) return [ hist , hist_norm , log_hist , log_hist_norm ]
|
Use plotly to create an overlay of length histograms Return html code but also save as png
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.