idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
2,200 |
def list_dynamodb ( region , filter_by_kwargs ) : conn = boto . dynamodb . connect_to_region ( region ) tables = conn . list_tables ( ) return lookup ( tables , filter_by = filter_by_kwargs )
|
List all DynamoDB tables .
|
2,201 |
def comittoapi ( api ) : global USED_API assert USED_API is None , "committoapi called again!" check = [ "PyQt4" , "PyQt5" , "PySide" , "PySide2" ] assert api in [ QT_API_PYQT5 , QT_API_PYQT4 , QT_API_PYSIDE , QT_API_PYSIDE2 ] for name in check : if name . lower ( ) != api and name in sys . modules : raise RuntimeError ( "{} was already imported. Cannot commit to {}!" . format ( name , api ) ) else : api = _intern ( api ) USED_API = api AnyQt . __SELECTED_API = api AnyQt . USED_API = api
|
Commit to the use of specified Qt api .
|
2,202 |
def get_metadata ( dist ) : if not dist . has_metadata ( 'PKG-INFO' ) : return msg = email . message_from_string ( dist . get_metadata ( 'PKG-INFO' ) ) metadata = { } for header in [ l for l in msg . _headers ] : metadata [ header [ 0 ] ] = header [ 1 ] return metadata
|
Return dictionary of metadata for given dist
|
2,203 |
def add_options ( self , parser ) : parser . add_option ( "--with-%s" % self . name , action = "store_true" , dest = self . enable_opt , help = "Enable plugin %s: %s" % ( self . __class__ . __name__ , self . help ( ) ) )
|
Add command - line options for this plugin .
|
2,204 |
def configure ( self , options , conf ) : self . conf = conf if hasattr ( options , self . enable_opt ) : self . enabled = getattr ( options , self . enable_opt )
|
Configure the plugin and system based on selected options .
|
2,205 |
def raise_for_status ( status : int , headers : MutableMapping , data : MutableMapping ) -> None : if status != 200 : if status == 429 : if isinstance ( data , str ) : error = data else : error = data . get ( "error" , "ratelimited" ) try : retry_after = int ( headers . get ( "Retry-After" , 1 ) ) except ValueError : retry_after = 1 raise exceptions . RateLimited ( retry_after , error , status , headers , data ) else : raise exceptions . HTTPException ( status , headers , data )
|
Check request response status
|
2,206 |
def raise_for_api_error ( headers : MutableMapping , data : MutableMapping ) -> None : if not data [ "ok" ] : raise exceptions . SlackAPIError ( data . get ( "error" , "unknow_error" ) , headers , data ) if "warning" in data : LOG . warning ( "Slack API WARNING: %s" , data [ "warning" ] )
|
Check request response for Slack API error
|
2,207 |
def decode_body ( headers : MutableMapping , body : bytes ) -> dict : type_ , encoding = parse_content_type ( headers ) decoded_body = body . decode ( encoding ) if type_ == "application/json" : payload = json . loads ( decoded_body ) else : if decoded_body == "ok" : payload = { "ok" : True } else : payload = { "ok" : False , "data" : decoded_body } return payload
|
Decode the response body
|
2,208 |
def parse_content_type ( headers : MutableMapping ) -> Tuple [ Optional [ str ] , str ] : content_type = headers . get ( "content-type" ) if not content_type : return None , "utf-8" else : type_ , parameters = cgi . parse_header ( content_type ) encoding = parameters . get ( "charset" , "utf-8" ) return type_ , encoding
|
Find content - type and encoding of the response
|
2,209 |
def prepare_request ( url : Union [ str , methods ] , data : Optional [ MutableMapping ] , headers : Optional [ MutableMapping ] , global_headers : MutableMapping , token : str , as_json : Optional [ bool ] = None , ) -> Tuple [ str , Union [ str , MutableMapping ] , MutableMapping ] : if isinstance ( url , methods ) : as_json = as_json or url . value [ 3 ] real_url = url . value [ 0 ] else : real_url = url as_json = False if not headers : headers = { ** global_headers } else : headers = { ** global_headers , ** headers } payload : Optional [ Union [ str , MutableMapping ] ] = None if real_url . startswith ( HOOK_URL ) or ( real_url . startswith ( ROOT_URL ) and as_json ) : payload , headers = _prepare_json_request ( data , token , headers ) elif real_url . startswith ( ROOT_URL ) and not as_json : payload = _prepare_form_encoded_request ( data , token ) else : real_url = ROOT_URL + real_url payload = _prepare_form_encoded_request ( data , token ) return real_url , payload , headers
|
Prepare outgoing request
|
2,210 |
def decode_response ( status : int , headers : MutableMapping , body : bytes ) -> dict : data = decode_body ( headers , body ) raise_for_status ( status , headers , data ) raise_for_api_error ( headers , data ) return data
|
Decode incoming response
|
2,211 |
def prepare_iter_request ( url : Union [ methods , str ] , data : MutableMapping , * , iterkey : Optional [ str ] = None , itermode : Optional [ str ] = None , limit : int = 200 , itervalue : Optional [ Union [ str , int ] ] = None , ) -> Tuple [ MutableMapping , str , str ] : itermode , iterkey = find_iteration ( url , itermode , iterkey ) if itermode == "cursor" : data [ "limit" ] = limit if itervalue : data [ "cursor" ] = itervalue elif itermode == "page" : data [ "count" ] = limit if itervalue : data [ "page" ] = itervalue elif itermode == "timeline" : data [ "count" ] = limit if itervalue : data [ "latest" ] = itervalue return data , iterkey , itermode
|
Prepare outgoing iteration request
|
2,212 |
def decode_iter_request ( data : dict ) -> Optional [ Union [ str , int ] ] : if "response_metadata" in data : return data [ "response_metadata" ] . get ( "next_cursor" ) elif "paging" in data : current_page = int ( data [ "paging" ] . get ( "page" , 1 ) ) max_page = int ( data [ "paging" ] . get ( "pages" , 1 ) ) if current_page < max_page : return current_page + 1 elif "has_more" in data and data [ "has_more" ] and "latest" in data : return data [ "messages" ] [ - 1 ] [ "ts" ] return None
|
Decode incoming response from an iteration request
|
2,213 |
def discard_event ( event : events . Event , bot_id : str = None ) -> bool : if event [ "type" ] in SKIP_EVENTS : return True elif bot_id and isinstance ( event , events . Message ) : if event . get ( "bot_id" ) == bot_id : LOG . debug ( "Ignoring event: %s" , event ) return True elif "message" in event and event [ "message" ] . get ( "bot_id" ) == bot_id : LOG . debug ( "Ignoring event: %s" , event ) return True return False
|
Check if the incoming event needs to be discarded
|
2,214 |
def validate_request_signature ( body : str , headers : MutableMapping , signing_secret : str ) -> None : request_timestamp = int ( headers [ "X-Slack-Request-Timestamp" ] ) if ( int ( time . time ( ) ) - request_timestamp ) > ( 60 * 5 ) : raise exceptions . InvalidTimestamp ( timestamp = request_timestamp ) slack_signature = headers [ "X-Slack-Signature" ] calculated_signature = ( "v0=" + hmac . new ( signing_secret . encode ( "utf-8" ) , f . encode ( "utf-8" ) , digestmod = hashlib . sha256 , ) . hexdigest ( ) ) if not hmac . compare_digest ( slack_signature , calculated_signature ) : raise exceptions . InvalidSlackSignature ( slack_signature , calculated_signature )
|
Validate incoming request signature using the application signing secret .
|
2,215 |
def get_backup_file_time_tag ( file_name , custom_prefix = "backup" ) : name_string = file_name [ len ( custom_prefix ) : ] time_tag = name_string . split ( "." , 1 ) [ 0 ] return datetime . strptime ( time_tag , DATETIME_FORMAT )
|
Returns a datetime object computed from a file name string with formatting based on DATETIME_FORMAT .
|
2,216 |
def get_download_uri ( package_name , version , source , index_url = None ) : tmpdir = None force_scan = True develop_ok = False if not index_url : index_url = 'http://cheeseshop.python.org/pypi' if version : pkg_spec = "%s==%s" % ( package_name , version ) else : pkg_spec = package_name req = pkg_resources . Requirement . parse ( pkg_spec ) pkg_index = MyPackageIndex ( index_url ) try : pkg_index . fetch_distribution ( req , tmpdir , force_scan , source , develop_ok ) except DownloadURI as url : clean_url = url . value . split ( "#" ) [ 0 ] if not source and not clean_url . endswith ( ".egg" ) and not clean_url . endswith ( ".EGG" ) : return else : return clean_url
|
Use setuptools to search for a package s URI
|
2,217 |
def get_pkglist ( ) : dists = Distributions ( ) projects = [ ] for ( dist , _active ) in dists . get_distributions ( "all" ) : if dist . project_name not in projects : projects . append ( dist . project_name ) return projects
|
Return list of all installed packages
|
2,218 |
def register ( self , command : str , handler : Any ) : if not command . startswith ( "/" ) : command = f"/{command}" LOG . info ( "Registering %s to %s" , command , handler ) self . _routes [ command ] . append ( handler )
|
Register a new handler for a specific slash command
|
2,219 |
def setpreferredapi ( api ) : global __PREFERRED_API if __SELECTED_API is not None : raise RuntimeError ( "A Qt api {} was already selected" . format ( __SELECTED_API ) ) if api . lower ( ) not in { "pyqt4" , "pyqt5" , "pyside" , "pyside2" } : raise ValueError ( api ) __PREFERRED_API = api . lower ( )
|
Set the preferred Qt API .
|
2,220 |
def selectapi ( api ) : global __SELECTED_API , USED_API if api . lower ( ) not in { "pyqt4" , "pyqt5" , "pyside" , "pyside2" } : raise ValueError ( api ) if __SELECTED_API is not None and __SELECTED_API . lower ( ) != api . lower ( ) : raise RuntimeError ( "A Qt API {} was already selected" . format ( __SELECTED_API ) ) elif __SELECTED_API is None : __SELECTED_API = api . lower ( ) from . import _api USED_API = _api . USED_API
|
Select an Qt API to use .
|
2,221 |
def get_highest_version ( versions ) : sorted_versions = [ ] for ver in versions : sorted_versions . append ( ( pkg_resources . parse_version ( ver ) , ver ) ) sorted_versions = sorted ( sorted_versions ) sorted_versions . reverse ( ) return sorted_versions [ 0 ] [ 1 ]
|
Returns highest available version for a package in a list of versions Uses pkg_resources to parse the versions
|
2,222 |
def get_distributions ( self , show , pkg_name = "" , version = "" ) : for name , dist in self . get_alpha ( show , pkg_name , version ) : ver = dist . version for package in self . environment [ dist . project_name ] : if ver == package . version : if show == "nonactive" and dist not in self . working_set : yield ( dist , self . query_activated ( dist ) ) elif show == "active" and dist in self . working_set : yield ( dist , self . query_activated ( dist ) ) elif show == "all" : yield ( dist , self . query_activated ( dist ) )
|
Yield installed packages
|
2,223 |
def get_alpha ( self , show , pkg_name = "" , version = "" ) : alpha_list = [ ] for dist in self . get_packages ( show ) : if pkg_name and dist . project_name != pkg_name : pass elif version and dist . version != version : pass else : alpha_list . append ( ( dist . project_name + dist . version , dist ) ) alpha_list . sort ( ) return alpha_list
|
Return list of alphabetized packages
|
2,224 |
def get_packages ( self , show ) : if show == 'nonactive' or show == "all" : all_packages = [ ] for package in self . environment : for i in range ( len ( self . environment [ package ] ) ) : if self . environment [ package ] [ i ] : all_packages . append ( self . environment [ package ] [ i ] ) return all_packages else : return self . working_set
|
Return list of Distributions filtered by active status or all
|
2,225 |
def case_sensitive_name ( self , package_name ) : if len ( self . environment [ package_name ] ) : return self . environment [ package_name ] [ 0 ] . project_name
|
Return case - sensitive package name given any - case package name
|
2,226 |
def cache_incr ( self , key ) : cache . set ( key , cache . get ( key , 0 ) + 1 , self . expire_after ( ) )
|
Non - atomic cache increment operation . Not optimal but consistent across different cache backends .
|
2,227 |
def call_plugins ( plugins , method , * arg , ** kw ) : for plug in plugins : func = getattr ( plug , method , None ) if func is None : continue result = func ( * arg , ** kw ) if result is not None : return result return None
|
Call all method on plugins in list that define it with provided arguments . The first response that is not None is returned .
|
2,228 |
def load_plugins ( builtin = True , others = True ) : for entry_point in pkg_resources . iter_entry_points ( 'yolk.plugins' ) : try : plugin = entry_point . load ( ) except KeyboardInterrupt : raise except Exception as err_msg : warn ( "Unable to load plugin %s: %s" % ( entry_point , err_msg ) , RuntimeWarning ) continue if plugin . __module__ . startswith ( 'yolk.plugins' ) : if builtin : yield plugin elif others : yield plugin
|
Load plugins either builtin others or both .
|
2,229 |
def s3_connect ( bucket_name , s3_access_key_id , s3_secret_key ) : conn = connect_s3 ( s3_access_key_id , s3_secret_key ) try : return conn . get_bucket ( bucket_name ) except S3ResponseError as e : if e . status == 403 : raise Exception ( "Bad Amazon S3 credentials." ) raise
|
Returns a Boto connection to the provided S3 bucket .
|
2,230 |
def s3_list ( s3_bucket , s3_access_key_id , s3_secret_key , prefix = None ) : bucket = s3_connect ( s3_bucket , s3_access_key_id , s3_secret_key ) return sorted ( [ key . name for key in bucket . list ( ) if key . name . endswith ( ".tbz" ) and ( prefix is None or key . name . startswith ( prefix ) ) ] )
|
Lists the contents of the S3 bucket that end in . tbz and match the passed prefix if any .
|
2,231 |
def s3_download ( output_file_path , s3_bucket , s3_access_key_id , s3_secret_key , s3_file_key = None , prefix = None ) : bucket = s3_connect ( s3_bucket , s3_access_key_id , s3_secret_key ) if not s3_file_key : keys = s3_list ( s3_bucket , s3_access_key_id , s3_secret_key , prefix ) if not keys : raise Exception ( "Target S3 bucket is empty" ) s3_file_key = keys [ - 1 ] key = Key ( bucket , s3_file_key ) with open ( output_file_path , "w+" ) as f : f . write ( key . read ( ) )
|
Downloads the file matching the provided key in the provided bucket from Amazon S3 . If s3_file_key is none it downloads the last file from the provided bucket with the . tbz extension filtering by prefix if it is provided .
|
2,232 |
def s3_upload ( source_file_path , bucket_name , s3_access_key_id , s3_secret_key ) : key = s3_key ( bucket_name , s3_access_key_id , s3_secret_key ) file_name = source_file_path . split ( "/" ) [ - 1 ] key . key = file_name if key . exists ( ) : raise Exception ( "s3 key %s already exists for current period." % ( file_name ) ) key . set_contents_from_filename ( source_file_path )
|
Uploads the to Amazon S3 the contents of the provided file keyed with the name of the file .
|
2,233 |
def validate_pypi_opts ( opt_parser ) : ( options , remaining_args ) = opt_parser . parse_args ( ) options_pkg_specs = [ options . versions_available , options . query_metadata_pypi , options . show_download_links , options . browse_website , options . fetch , options . show_deps , ] for pkg_spec in options_pkg_specs : if pkg_spec : return pkg_spec
|
Check parse options that require pkg_spec
|
2,234 |
def write ( self , inline ) : frame = inspect . currentframe ( ) . f_back if frame : mod = frame . f_globals . get ( '__name__' ) else : mod = sys . _getframe ( 0 ) . f_globals . get ( '__name__' ) if not mod in self . modulenames : self . stdout . write ( inline )
|
Write a line to stdout if it isn t in a blacklist
|
2,235 |
def get_plugin ( self , method ) : all_plugins = [ ] for entry_point in pkg_resources . iter_entry_points ( 'yolk.plugins' ) : plugin_obj = entry_point . load ( ) plugin = plugin_obj ( ) plugin . configure ( self . options , None ) if plugin . enabled : if not hasattr ( plugin , method ) : self . logger . warn ( "Error: plugin has no method: %s" % method ) plugin = None else : all_plugins . append ( plugin ) return all_plugins
|
Return plugin object if CLI option is activated and method exists
|
2,236 |
def set_log_level ( self ) : if self . options . debug : self . logger . setLevel ( logging . DEBUG ) elif self . options . quiet : self . logger . setLevel ( logging . ERROR ) else : self . logger . setLevel ( logging . INFO ) self . logger . addHandler ( logging . StreamHandler ( ) ) return self . logger
|
Set log level according to command - line options
|
2,237 |
def run ( self ) : opt_parser = setup_opt_parser ( ) ( self . options , remaining_args ) = opt_parser . parse_args ( ) logger = self . set_log_level ( ) pkg_spec = validate_pypi_opts ( opt_parser ) if not pkg_spec : pkg_spec = remaining_args self . pkg_spec = pkg_spec if not self . options . pypi_search and ( len ( sys . argv ) == 1 or len ( remaining_args ) > 2 ) : opt_parser . print_help ( ) return 2 if self . options . show_deps or self . options . show_all or self . options . show_active or self . options . show_non_active or ( self . options . show_updates and pkg_spec ) : want_installed = True else : want_installed = False if not want_installed or self . options . show_updates : self . pypi = CheeseShop ( self . options . debug ) if pkg_spec : ( self . project_name , self . version , self . all_versions ) = self . parse_pkg_ver ( want_installed ) if want_installed and not self . project_name : logger . error ( "%s is not installed." % pkg_spec [ 0 ] ) return 1 commands = [ 'show_deps' , 'query_metadata_pypi' , 'fetch' , 'versions_available' , 'show_updates' , 'browse_website' , 'show_download_links' , 'pypi_search' , 'show_pypi_changelog' , 'show_pypi_releases' , 'yolk_version' , 'show_all' , 'show_active' , 'show_non_active' , 'show_entry_map' , 'show_entry_points' ] for action in commands : if getattr ( self . options , action ) : return getattr ( self , action ) ( ) opt_parser . print_help ( )
|
Perform actions based on CLI options
|
2,238 |
def show_updates ( self ) : dists = Distributions ( ) if self . project_name : pkg_list = [ self . project_name ] else : pkg_list = get_pkglist ( ) found = None for pkg in pkg_list : for ( dist , active ) in dists . get_distributions ( "all" , pkg , dists . get_highest_installed ( pkg ) ) : ( project_name , versions ) = self . pypi . query_versions_pypi ( dist . project_name ) if versions : newest = get_highest_version ( versions ) if newest != dist . version : if pkg_resources . parse_version ( dist . version ) < pkg_resources . parse_version ( newest ) : found = True print ( " %s %s (%s)" % ( project_name , dist . version , newest ) ) if not found and self . project_name : self . logger . info ( "You have the latest version installed." ) elif not found : self . logger . info ( "No newer packages found at The Cheese Shop" ) return 0
|
Check installed packages for available updates on PyPI
|
2,239 |
def show_distributions ( self , show ) : show_metadata = self . options . metadata plugins = self . get_plugin ( "add_column" ) ignores = [ "/UNIONFS" , "/KNOPPIX.IMG" ] workingenv = os . environ . get ( 'WORKING_ENV' ) if workingenv : ignores . append ( workingenv ) dists = Distributions ( ) results = None for ( dist , active ) in dists . get_distributions ( show , self . project_name , self . version ) : metadata = get_metadata ( dist ) for prefix in ignores : if dist . location . startswith ( prefix ) : dist . location = dist . location . replace ( prefix , "" ) if dist . location . lower ( ) . startswith ( get_python_lib ( ) . lower ( ) ) : develop = "" else : develop = dist . location if metadata : add_column_text = "" for my_plugin in plugins : add_column_text += my_plugin . add_column ( dist ) + " " self . print_metadata ( metadata , develop , active , add_column_text ) else : print ( str ( dist ) + " has no metadata" ) results = True if not results and self . project_name : if self . version : pkg_spec = "%s==%s" % ( self . project_name , self . version ) else : pkg_spec = "%s" % self . project_name if show == "all" : self . logger . error ( "There are no versions of %s installed." % pkg_spec ) else : self . logger . error ( "There are no %s versions of %s installed." % ( show , pkg_spec ) ) return 2 elif show == "all" and results and self . options . fields : print ( "Versions with '*' are non-active." ) print ( "Versions with '!' are deployed in development mode." )
|
Show list of installed activated OR non - activated packages
|
2,240 |
def show_pypi_changelog ( self ) : hours = self . options . show_pypi_changelog if not hours . isdigit ( ) : self . logger . error ( "Error: You must supply an integer." ) return 1 try : changelog = self . pypi . changelog ( int ( hours ) ) except XMLRPCFault as err_msg : self . logger . error ( err_msg ) self . logger . error ( "ERROR: Couldn't retrieve changelog." ) return 1 last_pkg = '' for entry in changelog : pkg = entry [ 0 ] if pkg != last_pkg : print ( "%s %s\n\t%s" % ( entry [ 0 ] , entry [ 1 ] , entry [ 3 ] ) ) last_pkg = pkg else : print ( "\t%s" % entry [ 3 ] ) return 0
|
Show detailed PyPI ChangeLog for the last hours
|
2,241 |
def show_pypi_releases ( self ) : try : hours = int ( self . options . show_pypi_releases ) except ValueError : self . logger . error ( "ERROR: You must supply an integer." ) return 1 try : latest_releases = self . pypi . updated_releases ( hours ) except XMLRPCFault as err_msg : self . logger . error ( err_msg ) self . logger . error ( "ERROR: Couldn't retrieve latest releases." ) return 1 for release in latest_releases : print ( "%s %s" % ( release [ 0 ] , release [ 1 ] ) ) return 0
|
Show PyPI releases for the last number of hours
|
2,242 |
def fetch ( self ) : source = True directory = "." if self . options . file_type == "svn" : version = "dev" svn_uri = get_download_uri ( self . project_name , "dev" , True ) if svn_uri : directory = self . project_name + "_svn" return self . fetch_svn ( svn_uri , directory ) else : self . logger . error ( "ERROR: No subversion repository found for %s" % self . project_name ) return 1 elif self . options . file_type == "source" : source = True elif self . options . file_type == "egg" : source = False uri = get_download_uri ( self . project_name , self . version , source ) if uri : return self . fetch_uri ( directory , uri ) else : self . logger . error ( "No %s URI found for package: %s " % ( self . options . file_type , self . project_name ) ) return 1
|
Download a package
|
2,243 |
def fetch_uri ( self , directory , uri ) : filename = os . path . basename ( urlparse ( uri ) [ 2 ] ) if os . path . exists ( filename ) : self . logger . error ( "ERROR: File exists: " + filename ) return 1 try : downloaded_filename , headers = urlretrieve ( uri , filename ) self . logger . info ( "Downloaded ./" + filename ) except IOError as err_msg : self . logger . error ( "Error downloading package %s from URL %s" % ( filename , uri ) ) self . logger . error ( str ( err_msg ) ) return 1 if headers . gettype ( ) in [ "text/html" ] : dfile = open ( downloaded_filename ) if re . search ( "404 Not Found" , "" . join ( dfile . readlines ( ) ) ) : dfile . close ( ) self . logger . error ( "'404 Not Found' error" ) return 1 dfile . close ( ) return 0
|
Use urllib . urlretrieve to download package to file in sandbox dir .
|
2,244 |
def fetch_svn ( self , svn_uri , directory ) : if not command_successful ( "svn --version" ) : self . logger . error ( "ERROR: Do you have subversion installed?" ) return 1 if os . path . exists ( directory ) : self . logger . error ( "ERROR: Checkout directory exists - %s" % directory ) return 1 try : os . mkdir ( directory ) except OSError as err_msg : self . logger . error ( "ERROR: " + str ( err_msg ) ) return 1 cwd = os . path . realpath ( os . curdir ) os . chdir ( directory ) self . logger . info ( "Doing subversion checkout for %s" % svn_uri ) status , output = run_command ( "/usr/bin/svn co %s" % svn_uri ) self . logger . info ( output ) os . chdir ( cwd ) self . logger . info ( "subversion checkout is in directory './%s'" % directory ) return 0
|
Fetch subversion repository
|
2,245 |
def browse_website ( self , browser = None ) : if len ( self . all_versions ) : metadata = self . pypi . release_data ( self . project_name , self . all_versions [ 0 ] ) self . logger . debug ( "DEBUG: browser: %s" % browser ) if metadata . has_key ( "home_page" ) : self . logger . info ( "Launching browser: %s" % metadata [ "home_page" ] ) if browser == 'konqueror' : browser = webbrowser . Konqueror ( ) else : browser = webbrowser . get ( ) browser . open ( metadata [ "home_page" ] , 2 ) return 0 self . logger . error ( "No homepage URL found." ) return 1
|
Launch web browser at project s homepage
|
2,246 |
def query_metadata_pypi ( self ) : if self . version and self . version in self . all_versions : metadata = self . pypi . release_data ( self . project_name , self . version ) else : metadata = self . pypi . release_data ( self . project_name , self . all_versions [ 0 ] ) if metadata : for key in metadata . keys ( ) : if not self . options . fields or ( self . options . fields and self . options . fields == key ) : print ( "%s: %s" % ( key , metadata [ key ] ) ) return 0
|
Show pkg metadata queried from PyPI
|
2,247 |
def versions_available ( self ) : if self . version : spec = "%s==%s" % ( self . project_name , self . version ) else : spec = self . project_name if self . all_versions and self . version in self . all_versions : print_pkg_versions ( self . project_name , [ self . version ] ) elif not self . version and self . all_versions : print_pkg_versions ( self . project_name , self . all_versions ) else : if self . version : self . logger . error ( "No pacakge found for version %s" % self . version ) else : self . logger . error ( "No pacakge found for %s" % self . project_name ) return 1 return 0
|
Query PyPI for a particular version or all versions of a package
|
2,248 |
def show_entry_map ( self ) : pprinter = pprint . PrettyPrinter ( ) try : entry_map = pkg_resources . get_entry_map ( self . options . show_entry_map ) if entry_map : pprinter . pprint ( entry_map ) except pkg_resources . DistributionNotFound : self . logger . error ( "Distribution not found: %s" % self . options . show_entry_map ) return 1 return 0
|
Show entry map for a package
|
2,249 |
def show_entry_points ( self ) : found = False for entry_point in pkg_resources . iter_entry_points ( self . options . show_entry_points ) : found = True try : plugin = entry_point . load ( ) print ( plugin . __module__ ) print ( " %s" % entry_point ) if plugin . __doc__ : print ( plugin . __doc__ ) print except ImportError : pass if not found : self . logger . error ( "No entry points found for %s" % self . options . show_entry_points ) return 1 return 0
|
Show entry points for a module
|
2,250 |
def parse_pkg_ver ( self , want_installed ) : all_versions = [ ] arg_str = ( "" ) . join ( self . pkg_spec ) if "==" not in arg_str : project_name = arg_str version = None else : ( project_name , version ) = arg_str . split ( "==" ) project_name = project_name . strip ( ) version = version . strip ( ) if want_installed : dists = Distributions ( ) project_name = dists . case_sensitive_name ( project_name ) else : ( project_name , all_versions ) = self . pypi . query_versions_pypi ( project_name ) if not len ( all_versions ) : msg = "I'm afraid we have no '%s' at " % project_name msg += "The Cheese Shop. A little Red Leicester, perhaps?" self . logger . error ( msg ) sys . exit ( 2 ) return ( project_name , version , all_versions )
|
Return tuple with project_name and version from CLI args If the user gave the wrong case for the project name this corrects it
|
2,251 |
def install_backport_hook ( api ) : if api == USED_API : raise ValueError sys . meta_path . insert ( 0 , ImportHookBackport ( api ) )
|
Install a backport import hook for Qt4 api
|
2,252 |
def install_deny_hook ( api ) : if api == USED_API : raise ValueError sys . meta_path . insert ( 0 , ImportHookDeny ( api ) )
|
Install a deny import hook for Qt api .
|
2,253 |
def run_command ( cmd , env = None , max_timeout = None ) : arglist = cmd . split ( ) output = os . tmpfile ( ) try : pipe = Popen ( arglist , stdout = output , stderr = STDOUT , env = env ) except Exception as errmsg : return 1 , errmsg if max_timeout : start = time . time ( ) while pipe . poll ( ) is None : time . sleep ( 0.1 ) if time . time ( ) - start > max_timeout : os . kill ( pipe . pid , signal . SIGINT ) pipe . wait ( ) return 1 , "Time exceeded" pipe . wait ( ) output . seek ( 0 ) return pipe . returncode , output . read ( )
|
Run command and return its return status code and its output
|
2,254 |
async def iter ( self , url : Union [ str , methods ] , data : Optional [ MutableMapping ] = None , headers : Optional [ MutableMapping ] = None , * , limit : int = 200 , iterkey : Optional [ str ] = None , itermode : Optional [ str ] = None , minimum_time : Optional [ int ] = None , as_json : Optional [ bool ] = None ) -> AsyncIterator [ dict ] : itervalue = None if not data : data = { } last_request_time = None while True : current_time = time . time ( ) if ( minimum_time and last_request_time and last_request_time + minimum_time > current_time ) : await self . sleep ( last_request_time + minimum_time - current_time ) data , iterkey , itermode = sansio . prepare_iter_request ( url , data , iterkey = iterkey , itermode = itermode , limit = limit , itervalue = itervalue , ) last_request_time = time . time ( ) response_data = await self . query ( url , data , headers , as_json ) itervalue = sansio . decode_iter_request ( response_data ) for item in response_data [ iterkey ] : yield item if not itervalue : break
|
Iterate over a slack API method supporting pagination
|
2,255 |
async def _incoming_from_rtm ( self , url : str , bot_id : str ) -> AsyncIterator [ events . Event ] : async for data in self . _rtm ( url ) : event = events . Event . from_rtm ( json . loads ( data ) ) if sansio . need_reconnect ( event ) : break elif sansio . discard_event ( event , bot_id ) : continue else : yield event
|
Connect and discard incoming RTM event if necessary .
|
2,256 |
def package_manager_owns ( self , dist ) : if dist . location . lower ( ) == get_python_lib ( ) . lower ( ) : filename = os . path . join ( dist . location , dist . egg_name ( ) + ".egg-info" ) else : filename = dist . location status , output = getstatusoutput ( "/usr/bin/acmefile -q %s" % filename ) if status == 0 : return self . name else : return ""
|
Returns True if package manager owns file Returns False if package manager does not own file
|
2,257 |
def filter_url ( pkg_type , url ) : bad_stuff = [ "?modtime" , "#md5=" ] for junk in bad_stuff : if junk in url : url = url . split ( junk ) [ 0 ] break if url . endswith ( "-dev" ) : url = url . split ( "#egg=" ) [ 0 ] if pkg_type == "all" : return url elif pkg_type == "source" : valid_source_types = [ ".tgz" , ".tar.gz" , ".zip" , ".tbz2" , ".tar.bz2" ] for extension in valid_source_types : if url . lower ( ) . endswith ( extension ) : return url elif pkg_type == "egg" : if url . lower ( ) . endswith ( ".egg" ) : return url
|
Returns URL of specified file type source egg or all
|
2,258 |
def request ( self , host , handler , request_body , verbose ) : self . verbose = verbose url = 'http://' + host + handler request = urllib2 . Request ( url ) request . add_data ( request_body ) request . add_header ( 'User-Agent' , self . user_agent ) request . add_header ( 'Content-Type' , 'text/xml' ) proxy_handler = urllib2 . ProxyHandler ( ) opener = urllib2 . build_opener ( proxy_handler ) fhandle = opener . open ( request ) return ( self . parse_response ( fhandle ) )
|
Send xml - rpc request using proxy
|
2,259 |
def get_cache ( self ) : if self . no_cache : self . pkg_list = self . list_packages ( ) return if not os . path . exists ( self . yolk_dir ) : os . mkdir ( self . yolk_dir ) if os . path . exists ( self . pkg_cache_file ) : self . pkg_list = self . query_cached_package_list ( ) else : self . logger . debug ( "DEBUG: Fetching package list cache from PyPi..." ) self . fetch_pkg_list ( )
|
Get a package name list from disk cache or PyPI
|
2,260 |
def get_xmlrpc_server ( self ) : check_proxy_setting ( ) if os . environ . has_key ( 'XMLRPC_DEBUG' ) : debug = 1 else : debug = 0 try : return xmlrpclib . Server ( XML_RPC_SERVER , transport = ProxyTransport ( ) , verbose = debug ) except IOError : self . logger ( "ERROR: Can't connect to XML-RPC server: %s" % XML_RPC_SERVER )
|
Returns PyPI s XML - RPC server instance
|
2,261 |
def query_versions_pypi ( self , package_name ) : if not package_name in self . pkg_list : self . logger . debug ( "Package %s not in cache, querying PyPI..." % package_name ) self . fetch_pkg_list ( ) versions = [ ] for pypi_pkg in self . pkg_list : if pypi_pkg . lower ( ) == package_name . lower ( ) : if self . debug : self . logger . debug ( "DEBUG: %s" % package_name ) versions = self . package_releases ( pypi_pkg ) package_name = pypi_pkg break return ( package_name , versions )
|
Fetch list of available versions for a package from The CheeseShop
|
2,262 |
def query_cached_package_list ( self ) : if self . debug : self . logger . debug ( "DEBUG: reading pickled cache file" ) return cPickle . load ( open ( self . pkg_cache_file , "r" ) )
|
Return list of pickled package names from PYPI
|
2,263 |
def fetch_pkg_list ( self ) : self . logger . debug ( "DEBUG: Fetching package name list from PyPI" ) package_list = self . list_packages ( ) cPickle . dump ( package_list , open ( self . pkg_cache_file , "w" ) ) self . pkg_list = package_list
|
Fetch and cache master list of package names from PYPI
|
2,264 |
def search ( self , spec , operator ) : return self . xmlrpc . search ( spec , operator . lower ( ) )
|
Query PYPI via XMLRPC interface using search spec
|
2,265 |
def release_data ( self , package_name , version ) : try : return self . xmlrpc . release_data ( package_name , version ) except xmlrpclib . Fault : return
|
Query PYPI via XMLRPC interface for a pkg s metadata
|
2,266 |
def package_releases ( self , package_name ) : if self . debug : self . logger . debug ( "DEBUG: querying PyPI for versions of " + package_name ) return self . xmlrpc . package_releases ( package_name )
|
Query PYPI via XMLRPC interface for a pkg s available versions
|
2,267 |
def clone ( self ) -> "Event" : return self . __class__ ( copy . deepcopy ( self . event ) , copy . deepcopy ( self . metadata ) )
|
Clone the event
|
2,268 |
def from_rtm ( cls , raw_event : MutableMapping ) -> "Event" : if raw_event [ "type" ] . startswith ( "message" ) : return Message ( raw_event ) else : return Event ( raw_event )
|
Create an event with data coming from the RTM API .
|
2,269 |
def from_http ( cls , raw_body : MutableMapping , verification_token : Optional [ str ] = None , team_id : Optional [ str ] = None , ) -> "Event" : if verification_token and raw_body [ "token" ] != verification_token : raise exceptions . FailedVerification ( raw_body [ "token" ] , raw_body [ "team_id" ] ) if team_id and raw_body [ "team_id" ] != team_id : raise exceptions . FailedVerification ( raw_body [ "token" ] , raw_body [ "team_id" ] ) if raw_body [ "event" ] [ "type" ] . startswith ( "message" ) : return Message ( raw_body [ "event" ] , metadata = raw_body ) else : return Event ( raw_body [ "event" ] , metadata = raw_body )
|
Create an event with data coming from the HTTP Event API .
|
2,270 |
def response ( self , in_thread : Optional [ bool ] = None ) -> "Message" : data = { "channel" : self [ "channel" ] } if in_thread : if "message" in self : data [ "thread_ts" ] = ( self [ "message" ] . get ( "thread_ts" ) or self [ "message" ] [ "ts" ] ) else : data [ "thread_ts" ] = self . get ( "thread_ts" ) or self [ "ts" ] elif in_thread is None : if "message" in self and "thread_ts" in self [ "message" ] : data [ "thread_ts" ] = self [ "message" ] [ "thread_ts" ] elif "thread_ts" in self : data [ "thread_ts" ] = self [ "thread_ts" ] return Message ( data )
|
Create a response message .
|
2,271 |
def serialize ( self ) -> dict : data = { ** self } if "attachments" in self : data [ "attachments" ] = json . dumps ( self [ "attachments" ] ) return data
|
Serialize the message for sending to slack API
|
2,272 |
def query ( self , url : Union [ str , methods ] , data : Optional [ MutableMapping ] = None , headers : Optional [ MutableMapping ] = None , as_json : Optional [ bool ] = None , ) -> dict : url , body , headers = sansio . prepare_request ( url = url , data = data , headers = headers , global_headers = self . _headers , token = self . _token , ) return self . _make_query ( url , body , headers )
|
Query the slack API
|
2,273 |
def rtm ( self , url : Optional [ str ] = None , bot_id : Optional [ str ] = None ) -> Iterator [ events . Event ] : while True : bot_id = bot_id or self . _find_bot_id ( ) url = url or self . _find_rtm_url ( ) for event in self . _incoming_from_rtm ( url , bot_id ) : yield event url = None
|
Iterate over event from the RTM API
|
2,274 |
def get_config ( config_file ) : def load ( fp ) : try : return yaml . safe_load ( fp ) except yaml . YAMLError as e : sys . stderr . write ( text_type ( e ) ) sys . exit ( 1 ) if config_file == '-' : return load ( sys . stdin ) if not os . path . exists ( config_file ) : sys . stderr . write ( 'ERROR: Must either run next to config.yaml or' ' specify a config file.\n' + __doc__ ) sys . exit ( 2 ) with open ( config_file ) as fp : return load ( fp )
|
Get configuration from a file .
|
2,275 |
def get_options ( config_options , local_options , cli_options ) : options = DEFAULT_OPTIONS . copy ( ) if config_options is not None : options . update ( config_options ) if local_options is not None : options . update ( local_options ) if cli_options is not None : options . update ( cli_options ) return options
|
Figure out what options to use based on the four places it can come from .
|
2,276 |
def output_results ( results , metric , options ) : formatter = options [ 'Formatter' ] context = metric . copy ( ) try : context [ 'dimension' ] = list ( metric [ 'Dimensions' ] . values ( ) ) [ 0 ] except AttributeError : context [ 'dimension' ] = '' for result in results : stat_keys = metric [ 'Statistics' ] if not isinstance ( stat_keys , list ) : stat_keys = [ stat_keys ] for statistic in stat_keys : context [ 'statistic' ] = statistic context [ 'Unit' ] = result [ 'Unit' ] metric_name = ( formatter % context ) . replace ( '/' , '.' ) . lower ( ) line = '{0} {1} {2}\n' . format ( metric_name , result [ statistic ] , timegm ( result [ 'Timestamp' ] . timetuple ( ) ) , ) sys . stdout . write ( line )
|
Output the results to stdout .
|
2,277 |
def download_to_path ( self , gsuri , localpath , binary_mode = False , tmpdir = None ) : bucket_name , gs_rel_path = self . parse_uri ( gsuri ) bucket = self . _client . get_bucket ( bucket_name ) ablob = bucket . get_blob ( gs_rel_path ) if not ablob : raise GoogleStorageIOError ( "No such file on Google Storage: '{}'" . format ( gs_rel_path ) ) tmp_fid , tmp_file_path = tempfile . mkstemp ( text = ( not binary_mode ) , dir = tmpdir ) ablob . chunk_size = 1 << 30 ablob . download_to_filename ( client = self . _client , filename = tmp_file_path ) return os . rename ( tmp_file_path , localpath )
|
This method is analogous to gsutil cp gsuri localpath but in a programatically accesible way . The only difference is that we have to make a guess about the encoding of the file to not upset downstream file operations . If you are downloading a VCF then False is great . If this is a BAM file you are asking for you should enable the binary_mode to make sure file doesn t get corrupted .
|
2,278 |
def full_name ( self ) : formatted_user = [ ] if self . user . first_name is not None : formatted_user . append ( self . user . first_name ) if self . user . last_name is not None : formatted_user . append ( self . user . last_name ) return " " . join ( formatted_user )
|
Returns the first and last name of the user separated by a space .
|
2,279 |
def use_defaults ( func ) : @ wraps ( func ) def wrapper ( row , cohort , filter_fn = None , normalized_per_mb = None , ** kwargs ) : filter_fn = first_not_none_param ( [ filter_fn , cohort . filter_fn ] , no_filter ) normalized_per_mb = first_not_none_param ( [ normalized_per_mb , cohort . normalized_per_mb ] , False ) return func ( row = row , cohort = cohort , filter_fn = filter_fn , normalized_per_mb = normalized_per_mb , ** kwargs ) return wrapper
|
Decorator for functions that should automatically fall back to the Cohort - default filter_fn and normalized_per_mb if not specified .
|
2,280 |
def count_variants_function_builder ( function_name , filterable_variant_function = None ) : @ count_function def count ( row , cohort , filter_fn , normalized_per_mb , ** kwargs ) : def count_filter_fn ( filterable_variant , ** kwargs ) : assert filter_fn is not None , "filter_fn should never be None, but it is." return ( ( filterable_variant_function ( filterable_variant ) if filterable_variant_function is not None else True ) and filter_fn ( filterable_variant , ** kwargs ) ) patient_id = row [ "patient_id" ] return cohort . load_variants ( patients = [ cohort . patient_from_id ( patient_id ) ] , filter_fn = count_filter_fn , ** kwargs ) count . __name__ = function_name count . __doc__ = str ( "" . join ( inspect . getsourcelines ( filterable_variant_function ) [ 0 ] ) ) if filterable_variant_function is not None else "" return count
|
Creates a function that counts variants that are filtered by the provided filterable_variant_function . The filterable_variant_function is a function that takes a filterable_variant and returns True or False .
|
2,281 |
def count_effects_function_builder ( function_name , only_nonsynonymous , filterable_effect_function = None ) : @ count_function def count ( row , cohort , filter_fn , normalized_per_mb , ** kwargs ) : def count_filter_fn ( filterable_effect , ** kwargs ) : assert filter_fn is not None , "filter_fn should never be None, but it is." return ( ( filterable_effect_function ( filterable_effect ) if filterable_effect_function is not None else True ) and filter_fn ( filterable_effect , ** kwargs ) ) patient_id = row [ "patient_id" ] return cohort . load_effects ( only_nonsynonymous = only_nonsynonymous , patients = [ cohort . patient_from_id ( patient_id ) ] , filter_fn = count_filter_fn , ** kwargs ) count . __name__ = function_name count . __doc__ = ( ( "only_nonsynonymous=%s\n" % only_nonsynonymous ) + str ( "" . join ( inspect . getsourcelines ( filterable_effect_function ) [ 0 ] ) ) if filterable_effect_function is not None else "" ) count . only_nonsynonymous = only_nonsynonymous count . filterable_effect_function = filterable_effect_function return count
|
Create a function that counts effects that are filtered by the provided filterable_effect_function . The filterable_effect_function is a function that takes a filterable_effect and returns True or False .
|
2,282 |
def bootstrap_auc ( df , col , pred_col , n_bootstrap = 1000 ) : scores = np . zeros ( n_bootstrap ) old_len = len ( df ) df . dropna ( subset = [ col ] , inplace = True ) new_len = len ( df ) if new_len < old_len : logger . info ( "Dropping NaN values in %s to go from %d to %d rows" % ( col , old_len , new_len ) ) preds = df [ pred_col ] . astype ( int ) for i in range ( n_bootstrap ) : sampled_counts , sampled_pred = resample ( df [ col ] , preds ) if is_single_class ( sampled_pred , col = pred_col ) : continue scores [ i ] = roc_auc_score ( sampled_pred , sampled_counts ) return scores
|
Calculate the boostrapped AUC for a given col trying to predict a pred_col .
|
2,283 |
def new_worker ( self , name : str ) : if not self . running : return self . immediate_worker worker = self . _new_worker ( name ) self . _start_worker ( worker ) return worker
|
Creates a new Worker and start a new Thread with it . Returns the Worker .
|
2,284 |
def new_worker_pool ( self , name : str , min_workers : int = 0 , max_workers : int = 1 , max_seconds_idle : int = DEFAULT_WORKER_POOL_MAX_SECONDS_IDLE ) : if not self . running : return self . immediate_worker worker = self . _new_worker_pool ( name , min_workers , max_workers , max_seconds_idle ) self . _start_worker_pool ( worker ) return worker
|
Creates a new worker pool and starts it . Returns the Worker that schedules works to the pool .
|
2,285 |
def as_dataframe ( self , on = None , join_with = None , join_how = None , return_cols = False , rename_cols = False , keep_paren_contents = True , ** kwargs ) : df = self . _as_dataframe_unmodified ( join_with = join_with , join_how = join_how ) if on is None : return DataFrameHolder . return_obj ( None , df , return_cols ) if type ( on ) == str : return DataFrameHolder . return_obj ( on , df , return_cols ) def apply_func ( on , col , df ) : on_argnames = on . __code__ . co_varnames if "cohort" not in on_argnames : func = lambda row : on ( row = row , ** kwargs ) else : func = lambda row : on ( row = row , cohort = self , ** kwargs ) if self . show_progress : tqdm . pandas ( desc = col ) df [ col ] = df . progress_apply ( func , axis = 1 ) else : df [ col ] = df . apply ( func , axis = 1 ) return DataFrameHolder ( col , df ) def func_name ( func , num = 0 ) : return func . __name__ if not is_lambda ( func ) else "column_%d" % num def is_lambda ( func ) : return func . __name__ == ( lambda : None ) . __name__ if type ( on ) == FunctionType : return apply_func ( on , func_name ( on ) , df ) . return_self ( return_cols ) if len ( kwargs ) > 0 : logger . warning ( "Note: kwargs used with multiple functions; passing them to all functions" ) if type ( on ) == dict : cols = [ ] for key , value in on . items ( ) : if type ( value ) == str : df [ key ] = df [ value ] col = key elif type ( value ) == FunctionType : col , df = apply_func ( on = value , col = key , df = df ) else : raise ValueError ( "A value of `on`, %s, is not a str or function" % str ( value ) ) cols . append ( col ) if type ( on ) == list : cols = [ ] for i , elem in enumerate ( on ) : if type ( elem ) == str : col = elem elif type ( elem ) == FunctionType : col = func_name ( elem , i ) col , df = apply_func ( on = elem , col = col , df = df ) cols . append ( col ) if rename_cols : rename_dict = _strip_column_names ( df . columns , keep_paren_contents = keep_paren_contents ) df . rename ( columns = rename_dict , inplace = True ) cols = [ rename_dict [ col ] for col in cols ] return DataFrameHolder ( cols , df ) . return_self ( return_cols )
|
Return this Cohort as a DataFrame and optionally include additional columns using on .
|
2,286 |
def load_dataframe ( self , df_loader_name ) : logger . debug ( "loading dataframe: {}" . format ( df_loader_name ) ) df_loaders = [ df_loader for df_loader in self . df_loaders if df_loader . name == df_loader_name ] if len ( df_loaders ) == 0 : raise ValueError ( "No DataFrameLoader with name %s" % df_loader_name ) if len ( df_loaders ) > 1 : raise ValueError ( "Multiple DataFrameLoaders with name %s" % df_loader_name ) return df_loaders [ 0 ] . load_dataframe ( )
|
Instead of joining a DataFrameJoiner with the Cohort in as_dataframe sometimes we may want to just directly load a particular DataFrame .
|
2,287 |
def _get_function_name ( self , fn , default = "None" ) : if fn is None : fn_name = default else : fn_name = fn . __name__ return fn_name
|
Return name of function using default value if function not defined
|
2,288 |
def load_variants ( self , patients = None , filter_fn = None , ** kwargs ) : filter_fn = first_not_none_param ( [ filter_fn , self . filter_fn ] , no_filter ) filter_fn_name = self . _get_function_name ( filter_fn ) logger . debug ( "loading variants with filter_fn: {}" . format ( filter_fn_name ) ) patient_variants = { } for patient in self . iter_patients ( patients ) : variants = self . _load_single_patient_variants ( patient , filter_fn , ** kwargs ) if variants is not None : patient_variants [ patient . id ] = variants return patient_variants
|
Load a dictionary of patient_id to varcode . VariantCollection
|
2,289 |
def _hash_filter_fn ( self , filter_fn , ** kwargs ) : filter_fn_name = self . _get_function_name ( filter_fn , default = "filter-none" ) logger . debug ( "Computing hash for filter_fn: {} with kwargs {}" . format ( filter_fn_name , str ( dict ( ** kwargs ) ) ) ) fn_source = str ( dill . source . getsource ( filter_fn ) ) pickled_fn_source = pickle . dumps ( fn_source ) hashed_fn_source = int ( hashlib . sha1 ( pickled_fn_source ) . hexdigest ( ) , 16 ) % ( 10 ** 11 ) kw_dict = dict ( ** kwargs ) kw_hash = list ( ) if not kw_dict : kw_hash = [ "default" ] else : [ kw_hash . append ( "{}-{}" . format ( key , h ) ) for ( key , h ) in sorted ( kw_dict . items ( ) ) ] closure = [ ] nonlocals = inspect . getclosurevars ( filter_fn ) . nonlocals for ( key , val ) in nonlocals . items ( ) : if inspect . isfunction ( val ) : closure . append ( self . _hash_filter_fn ( val ) ) closure . sort ( ) closure_str = "null" if len ( closure ) == 0 else "-" . join ( closure ) hashed_fn = "." . join ( [ "-" . join ( [ filter_fn_name , str ( hashed_fn_source ) ] ) , "." . join ( kw_hash ) , closure_str ] ) return hashed_fn
|
Construct string representing state of filter_fn Used to cache filtered variants or effects uniquely depending on filter fn values
|
2,290 |
def _load_single_patient_variants ( self , patient , filter_fn , use_cache = True , ** kwargs ) : if filter_fn is None : use_filtered_cache = False else : filter_fn_name = self . _get_function_name ( filter_fn ) logger . debug ( "loading variants for patient {} with filter_fn {}" . format ( patient . id , filter_fn_name ) ) use_filtered_cache = use_cache if use_filtered_cache : logger . debug ( "... identifying filtered-cache file name" ) try : filtered_cache_file_name = "%s-variants.%s.pkl" % ( self . merge_type , self . _hash_filter_fn ( filter_fn , ** kwargs ) ) except : logger . warning ( "... error identifying filtered-cache file name for patient {}: {}" . format ( patient . id , filter_fn_name ) ) use_filtered_cache = False else : logger . debug ( "... trying to load filtered variants from cache: {}" . format ( filtered_cache_file_name ) ) try : cached = self . load_from_cache ( self . cache_names [ "variant" ] , patient . id , filtered_cache_file_name ) if cached is not None : return cached except : logger . warning ( "Error loading variants from cache for patient: {}" . format ( patient . id ) ) pass logger . debug ( "... getting merged variants for: {}" . format ( patient . id ) ) merged_variants = self . _load_single_patient_merged_variants ( patient , use_cache = use_cache ) if merged_variants is None : logger . info ( "Variants did not exist for patient %s" % patient . id ) return None logger . debug ( "... applying filters to variants for: {}" . format ( patient . id ) ) filtered_variants = filter_variants ( variant_collection = merged_variants , patient = patient , filter_fn = filter_fn , ** kwargs ) if use_filtered_cache : logger . debug ( "... saving filtered variants to cache: {}" . format ( filtered_cache_file_name ) ) self . save_to_cache ( filtered_variants , self . cache_names [ "variant" ] , patient . id , filtered_cache_file_name ) return filtered_variants
|
Load filtered merged variants for a single patient optionally using cache
|
2,291 |
def _load_single_patient_merged_variants ( self , patient , use_cache = True ) : logger . debug ( "loading merged variants for patient {}" . format ( patient . id ) ) no_variants = False try : if use_cache : variant_cache_file_name = "%s-variants.pkl" % ( self . merge_type ) merged_variants = self . load_from_cache ( self . cache_names [ "variant" ] , patient . id , variant_cache_file_name ) if merged_variants is not None : return merged_variants variant_collections = [ ] optional_maf_cols = [ "t_ref_count" , "t_alt_count" , "n_ref_count" , "n_alt_count" ] if self . additional_maf_cols is not None : optional_maf_cols . extend ( self . additional_maf_cols ) for patient_variants in patient . variants_list : if type ( patient_variants ) == str : if ".vcf" in patient_variants : try : variant_collections . append ( varcode . load_vcf_fast ( patient_variants ) ) except StopIteration as e : logger . warning ( "Empty VCF (or possibly a VCF error) for patient {}: {}" . format ( patient . id , str ( e ) ) ) elif ".maf" in patient_variants : variant_collections . append ( varcode . load_maf ( patient_variants , optional_cols = optional_maf_cols , encoding = "latin-1" ) ) else : raise ValueError ( "Don't know how to read %s" % patient_variants ) elif type ( patient_variants ) == VariantCollection : variant_collections . append ( patient_variants ) else : raise ValueError ( "Don't know how to read %s" % patient_variants ) if len ( variant_collections ) == 0 : no_variants = True elif len ( variant_collections ) == 1 : variants = variant_collections [ 0 ] merged_variants = variants else : merged_variants = self . _merge_variant_collections ( variant_collections , self . merge_type ) except IOError : no_variants = True if no_variants : print ( "Variants did not exist for patient %s" % patient . id ) merged_variants = None if use_cache : self . save_to_cache ( merged_variants , self . cache_names [ "variant" ] , patient . id , variant_cache_file_name ) return merged_variants
|
Load merged variants for a single patient optionally using cache
|
2,292 |
def load_polyphen_annotations ( self , as_dataframe = False , filter_fn = None ) : filter_fn = first_not_none_param ( [ filter_fn , self . filter_fn ] , no_filter ) patient_annotations = { } for patient in self : annotations = self . _load_single_patient_polyphen ( patient , filter_fn = filter_fn ) if annotations is not None : annotations [ "patient_id" ] = patient . id patient_annotations [ patient . id ] = annotations if as_dataframe : return pd . concat ( patient_annotations . values ( ) ) return patient_annotations
|
Load a dataframe containing polyphen2 annotations for all variants
|
2,293 |
def load_effects ( self , patients = None , only_nonsynonymous = False , all_effects = False , filter_fn = None , ** kwargs ) : filter_fn = first_not_none_param ( [ filter_fn , self . filter_fn ] , no_filter ) filter_fn_name = self . _get_function_name ( filter_fn ) logger . debug ( "loading effects with filter_fn {}" . format ( filter_fn_name ) ) patient_effects = { } for patient in self . iter_patients ( patients ) : effects = self . _load_single_patient_effects ( patient , only_nonsynonymous , all_effects , filter_fn , ** kwargs ) if effects is not None : patient_effects [ patient . id ] = effects return patient_effects
|
Load a dictionary of patient_id to varcode . EffectCollection
|
2,294 |
def load_kallisto ( self ) : kallisto_data = pd . concat ( [ self . _load_single_patient_kallisto ( patient ) for patient in self ] , copy = False ) if self . kallisto_ensembl_version is None : raise ValueError ( "Required a kallisto_ensembl_version but none was specified" ) ensembl_release = cached_release ( self . kallisto_ensembl_version ) kallisto_data [ "gene_name" ] = kallisto_data [ "target_id" ] . map ( lambda t : ensembl_release . gene_name_of_transcript_id ( t ) ) kallisto_data = kallisto_data . groupby ( [ "patient_id" , "gene_name" ] ) [ [ "est_counts" ] ] . sum ( ) . reset_index ( ) return kallisto_data
|
Load Kallisto transcript quantification data for a cohort
|
2,295 |
def _load_single_patient_kallisto ( self , patient ) : data = pd . read_csv ( patient . tumor_sample . kallisto_path , sep = "\t" ) data [ "patient_id" ] = patient . id return data
|
Load Kallisto gene quantification given a patient
|
2,296 |
def load_cufflinks ( self , filter_ok = True ) : return pd . concat ( [ self . _load_single_patient_cufflinks ( patient , filter_ok ) for patient in self ] , copy = False )
|
Load a Cufflinks gene expression data for a cohort
|
2,297 |
def _load_single_patient_cufflinks ( self , patient , filter_ok ) : data = pd . read_csv ( patient . tumor_sample . cufflinks_path , sep = "\t" ) data [ "patient_id" ] = patient . id if filter_ok : data = data [ data [ "FPKM_status" ] == "OK" ] return data
|
Load Cufflinks gene quantification given a patient
|
2,298 |
def get_filtered_isovar_epitopes ( self , epitopes , ic50_cutoff ) : mutant_binding_predictions = [ ] for binding_prediction in epitopes : peptide = binding_prediction . peptide peptide_offset = binding_prediction . offset isovar_row = dict ( binding_prediction . source_sequence_key ) is_mutant = contains_mutant_residues ( peptide_start_in_protein = peptide_offset , peptide_length = len ( peptide ) , mutation_start_in_protein = isovar_row [ "variant_aa_interval_start" ] , mutation_end_in_protein = isovar_row [ "variant_aa_interval_end" ] ) if is_mutant and binding_prediction . value <= ic50_cutoff : mutant_binding_predictions . append ( binding_prediction ) return EpitopeCollection ( mutant_binding_predictions )
|
Mostly replicates topiary . build_epitope_collection_from_binding_predictions
|
2,299 |
def plot_roc_curve ( self , on , bootstrap_samples = 100 , ax = None , ** kwargs ) : plot_col , df = self . as_dataframe ( on , return_cols = True , ** kwargs ) df = filter_not_null ( df , "benefit" ) df = filter_not_null ( df , plot_col ) df . benefit = df . benefit . astype ( bool ) return roc_curve_plot ( df , plot_col , "benefit" , bootstrap_samples , ax = ax )
|
Plot an ROC curve for benefit and a given variable
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.