idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
61,000 |
def get_by_symbol ( self , symbol : str ) -> Commodity : assert isinstance ( symbol , str ) query = ( self . currencies_query . filter ( Commodity . mnemonic == symbol ) ) return query . one ( )
|
Loads currency by symbol
|
61,001 |
def import_fx_rates ( self , rates : List [ PriceModel ] ) : have_new_rates = False base_currency = self . get_default_currency ( ) for rate in rates : assert isinstance ( rate , PriceModel ) currency = self . get_by_symbol ( rate . symbol ) amount = rate . value has_rate = currency . prices . filter ( Price . date == rate . datetime . date ( ) ) . first ( ) if not has_rate : log ( INFO , "Creating entry for %s, %s, %s, %s" , base_currency . mnemonic , currency . mnemonic , rate . datetime . date ( ) , amount ) inverted_rate = 1 / amount inverted_rate = inverted_rate . quantize ( Decimal ( '.00000000' ) ) price = Price ( commodity = currency , currency = base_currency , date = rate . datetime . date ( ) , value = str ( inverted_rate ) ) have_new_rates = True if have_new_rates : log ( INFO , "Saving new prices..." ) self . book . flush ( ) self . book . save ( ) else : log ( INFO , "No prices imported." )
|
Imports the given prices into database . Write operation!
|
61,002 |
def __get_default_currency ( self ) : if sys . platform == "win32" : def_curr = self . book [ "default-currency" ] = self . __get_default_currency_windows ( ) else : def_curr = self . book [ "default-currency" ] = self . __get_locale_currency ( ) return def_curr
|
Read the default currency from GnuCash preferences
|
61,003 |
def __get_registry_key ( self , key ) : import winreg root = winreg . OpenKey ( winreg . HKEY_CURRENT_USER , r'SOFTWARE\GSettings\org\gnucash\general' , 0 , winreg . KEY_READ ) [ pathname , regtype ] = ( winreg . QueryValueEx ( root , key ) ) winreg . CloseKey ( root ) return pathname
|
Read currency from windows registry
|
61,004 |
def get_for_accounts ( self , accounts : List [ Account ] ) : account_ids = [ acc . guid for acc in accounts ] query = ( self . query . filter ( Split . account_guid . in_ ( account_ids ) ) ) splits = query . all ( ) return splits
|
Get all splits for the given accounts
|
61,005 |
def __get_model_for_portfolio_value ( input_model : PortfolioValueInputModel ) -> PortfolioValueViewModel : result = PortfolioValueViewModel ( ) result . filter = input_model ref_datum = Datum ( ) ref_datum . from_datetime ( input_model . as_of_date ) ref_date = ref_datum . end_of_day ( ) result . stock_rows = [ ] with BookAggregate ( ) as svc : book = svc . book stocks_svc = svc . securities if input_model . stock : symbols = input_model . stock . split ( "," ) stocks = stocks_svc . get_stocks ( symbols ) else : stocks = stocks_svc . get_all ( ) for stock in stocks : row : StockViewModel = portfoliovalue . get_stock_model_from ( book , stock , as_of_date = ref_date ) if row and row . balance > 0 : result . stock_rows . append ( row ) return result
|
loads the data for portfolio value
|
61,006 |
def __load_settings ( self ) : file_path = self . file_path try : self . data = json . load ( open ( file_path ) ) except FileNotFoundError : print ( "Could not load" , file_path )
|
Load settings from . json file
|
61,007 |
def file_exists ( self ) -> bool : cfg_path = self . file_path assert cfg_path return path . isfile ( cfg_path )
|
Check if the settings file exists or not
|
61,008 |
def save ( self ) : content = self . dumps ( ) fileutils . save_text_to_file ( content , self . file_path )
|
Saves the settings contents
|
61,009 |
def database_path ( self ) : filename = self . database_filename db_path = ":memory:" if filename == ":memory:" else ( path . abspath ( path . join ( __file__ , "../.." , ".." , "data" , filename ) ) ) return db_path
|
Full database path . Includes the default location + the database filename .
|
61,010 |
def file_path ( self ) -> str : user_dir = self . __get_user_path ( ) file_path = path . abspath ( path . join ( user_dir , self . FILENAME ) ) return file_path
|
Settings file absolute path
|
61,011 |
def dumps ( self ) -> str : return json . dumps ( self . data , sort_keys = True , indent = 4 )
|
Dumps the json content as a string
|
61,012 |
def __copy_template ( self ) : import shutil template_filename = "settings.json.template" template_path = path . abspath ( path . join ( __file__ , ".." , ".." , "config" , template_filename ) ) settings_path = self . file_path shutil . copyfile ( template_path , settings_path ) self . __ensure_file_exists ( )
|
Copy the settings template into the user s directory
|
61,013 |
def is_not_empty ( self , value , strict = False ) : value = stringify ( value ) if value is not None : return self . shout ( 'Value %r is empty' , strict , value )
|
if value is not empty
|
61,014 |
def is_numeric ( self , value , strict = False ) : value = stringify ( value ) if value is not None : if value . isnumeric ( ) : return self . shout ( 'value %r is not numeric' , strict , value )
|
if value is numeric
|
61,015 |
def is_integer ( self , value , strict = False ) : if value is not None : if isinstance ( value , numbers . Number ) : return value = stringify ( value ) if value is not None and value . isnumeric ( ) : return self . shout ( 'value %r is not an integer' , strict , value )
|
if value is an integer
|
61,016 |
def match_date ( self , value , strict = False ) : value = stringify ( value ) try : parse ( value ) except Exception : self . shout ( 'Value %r is not a valid date' , strict , value )
|
if value is a date
|
61,017 |
def match_regexp ( self , value , q , strict = False ) : value = stringify ( value ) mr = re . compile ( q ) if value is not None : if mr . match ( value ) : return self . shout ( '%r not matching the regexp %r' , strict , value , q )
|
if value matches a regexp q
|
61,018 |
def has_length ( self , value , q , strict = False ) : value = stringify ( value ) if value is not None : if len ( value ) == q : return self . shout ( 'Value %r not matching length %r' , strict , value , q )
|
if value has a length of q
|
61,019 |
def must_contain ( self , value , q , strict = False ) : if value is not None : if value . find ( q ) != - 1 : return self . shout ( 'Value %r does not contain %r' , strict , value , q )
|
if value must contain q
|
61,020 |
def extract ( context , data ) : with context . http . rehash ( data ) as result : file_path = result . file_path content_type = result . content_type extract_dir = random_filename ( context . work_path ) if content_type in ZIP_MIME_TYPES : extracted_files = extract_zip ( file_path , extract_dir ) elif content_type in TAR_MIME_TYPES : extracted_files = extract_tar ( file_path , extract_dir , context ) elif content_type in SEVENZIP_MIME_TYPES : extracted_files = extract_7zip ( file_path , extract_dir , context ) else : context . log . warning ( "Unsupported archive content type: %s" , content_type ) return extracted_content_hashes = { } for path in extracted_files : relative_path = os . path . relpath ( path , extract_dir ) content_hash = context . store_file ( path ) extracted_content_hashes [ relative_path ] = content_hash data [ 'content_hash' ] = content_hash data [ 'file_name' ] = relative_path context . emit ( data = data . copy ( ) )
|
Extract a compressed file
|
61,021 |
def size ( cls , crawler ) : key = make_key ( 'queue_pending' , crawler ) return unpack_int ( conn . get ( key ) )
|
Total operations pending for this crawler
|
61,022 |
def read_word ( image , whitelist = None , chars = None , spaces = False ) : from tesserocr import PyTessBaseAPI api = PyTessBaseAPI ( ) api . SetPageSegMode ( 8 ) if whitelist is not None : api . SetVariable ( "tessedit_char_whitelist" , whitelist ) api . SetImage ( image ) api . Recognize ( ) guess = api . GetUTF8Text ( ) if not spaces : guess = '' . join ( [ c for c in guess if c != " " ] ) guess = guess . strip ( ) if chars is not None and len ( guess ) != chars : return guess , None return guess , api . MeanTextConf ( )
|
OCR a single word from an image . Useful for captchas . Image should be pre - processed to remove noise etc .
|
61,023 |
def read_char ( image , whitelist = None ) : from tesserocr import PyTessBaseAPI api = PyTessBaseAPI ( ) api . SetPageSegMode ( 10 ) if whitelist is not None : api . SetVariable ( "tessedit_char_whitelist" , whitelist ) api . SetImage ( image ) api . Recognize ( ) return api . GetUTF8Text ( ) . strip ( )
|
OCR a single character from an image . Useful for captchas .
|
61,024 |
def get ( self , name , default = None ) : value = self . params . get ( name , default ) if isinstance ( value , str ) : value = os . path . expandvars ( value ) return value
|
Get a configuration value and expand environment variables .
|
61,025 |
def emit ( self , rule = 'pass' , stage = None , data = { } , delay = None , optional = False ) : if stage is None : stage = self . stage . handlers . get ( rule ) if optional and stage is None : return if stage is None or stage not in self . crawler . stages : self . log . info ( "No next stage: %s (%s)" % ( stage , rule ) ) return state = self . dump_state ( ) delay = delay or self . crawler . delay Queue . queue ( stage , state , data , delay )
|
Invoke the next stage either based on a handling rule or by calling the pass rule by default .
|
61,026 |
def recurse ( self , data = { } , delay = None ) : return self . emit ( stage = self . stage . name , data = data , delay = delay )
|
Have a stage invoke itself with a modified set of arguments .
|
61,027 |
def execute ( self , data ) : if Crawl . is_aborted ( self . crawler , self . run_id ) : return try : Crawl . operation_start ( self . crawler , self . stage , self . run_id ) self . log . info ( '[%s->%s(%s)]: %s' , self . crawler . name , self . stage . name , self . stage . method_name , self . run_id ) return self . stage . method ( self , data ) except Exception as exc : self . emit_exception ( exc ) finally : Crawl . operation_end ( self . crawler , self . run_id ) shutil . rmtree ( self . work_path )
|
Execute the crawler and create a database record of having done so .
|
61,028 |
def skip_incremental ( self , * criteria ) : if not self . incremental : return False key = make_key ( * criteria ) if key is None : return False if self . check_tag ( key ) : return True self . set_tag ( key , None ) return False
|
Perform an incremental check on a set of criteria .
|
61,029 |
def store_data ( self , data , encoding = 'utf-8' ) : path = random_filename ( self . work_path ) try : with open ( path , 'wb' ) as fh : if isinstance ( data , str ) : data = data . encode ( encoding ) if data is not None : fh . write ( data ) return self . store_file ( path ) finally : try : os . unlink ( path ) except OSError : pass
|
Put the given content into a file possibly encoding it as UTF - 8 in the process .
|
61,030 |
def check_due ( self ) : if self . disabled : return False if self . is_running : return False if self . delta is None : return False last_run = self . last_run if last_run is None : return True now = datetime . utcnow ( ) if now > last_run + self . delta : return True return False
|
Check if the last execution of this crawler is older than the scheduled interval .
|
61,031 |
def flush ( self ) : Queue . flush ( self ) Event . delete ( self ) Crawl . flush ( self )
|
Delete all run - time data generated by this crawler .
|
61,032 |
def run ( self , incremental = None , run_id = None ) : state = { 'crawler' : self . name , 'run_id' : run_id , 'incremental' : settings . INCREMENTAL } if incremental is not None : state [ 'incremental' ] = incremental self . cancel ( ) Event . delete ( self ) Queue . queue ( self . init_stage , state , { } )
|
Queue the execution of a particular crawler .
|
61,033 |
def fetch ( context , data ) : url = data . get ( 'url' ) attempt = data . pop ( 'retry_attempt' , 1 ) try : result = context . http . get ( url , lazy = True ) rules = context . get ( 'rules' , { 'match_all' : { } } ) if not Rule . get_rule ( rules ) . apply ( result ) : context . log . info ( 'Fetch skip: %r' , result . url ) return if not result . ok : err = ( result . url , result . status_code ) context . emit_warning ( "Fetch fail [%s]: HTTP %s" % err ) if not context . params . get ( 'emit_errors' , False ) : return else : context . log . info ( "Fetched [%s]: %r" , result . status_code , result . url ) data . update ( result . serialize ( ) ) if url != result . url : tag = make_key ( context . run_id , url ) context . set_tag ( tag , None ) context . emit ( data = data ) except RequestException as ce : retries = int ( context . get ( 'retry' , 3 ) ) if retries >= attempt : context . log . warn ( "Retry: %s (error: %s)" , url , ce ) data [ 'retry_attempt' ] = attempt + 1 context . recurse ( data = data , delay = 2 ** attempt ) else : context . emit_warning ( "Fetch fail [%s]: %s" % ( url , ce ) )
|
Do an HTTP GET on the url specified in the inbound data .
|
61,034 |
def dav_index ( context , data ) : url = data . get ( 'url' ) result = context . http . request ( 'PROPFIND' , url ) for resp in result . xml . findall ( './{DAV:}response' ) : href = resp . findtext ( './{DAV:}href' ) if href is None : continue rurl = urljoin ( url , href ) rdata = data . copy ( ) rdata [ 'url' ] = rurl rdata [ 'foreign_id' ] = rurl if rdata [ 'url' ] == url : continue if resp . find ( './/{DAV:}collection' ) is not None : rdata [ 'parent_foreign_id' ] = rurl context . log . info ( "Fetching contents of folder: %s" % rurl ) context . recurse ( data = rdata ) else : rdata [ 'parent_foreign_id' ] = url fetch ( context , rdata )
|
List files in a WebDAV directory .
|
61,035 |
def session ( context , data ) : context . http . reset ( ) user = context . get ( 'user' ) password = context . get ( 'password' ) if user is not None and password is not None : context . http . session . auth = ( user , password ) user_agent = context . get ( 'user_agent' ) if user_agent is not None : context . http . session . headers [ 'User-Agent' ] = user_agent referer = context . get ( 'url' ) if referer is not None : context . http . session . headers [ 'Referer' ] = referer proxy = context . get ( 'proxy' ) if proxy is not None : proxies = { 'http' : proxy , 'https' : proxy } context . http . session . proxies = proxies context . http . save ( ) context . emit ( data = data )
|
Set some HTTP parameters for all subsequent requests .
|
61,036 |
def save ( cls , crawler , stage , level , run_id , error = None , message = None ) : event = { 'stage' : stage . name , 'level' : level , 'timestamp' : pack_now ( ) , 'error' : error , 'message' : message } data = dump_json ( event ) conn . lpush ( make_key ( crawler , "events" ) , data ) conn . lpush ( make_key ( crawler , "events" , level ) , data ) conn . lpush ( make_key ( crawler , "events" , stage ) , data ) conn . lpush ( make_key ( crawler , "events" , stage , level ) , data ) conn . lpush ( make_key ( crawler , "events" , run_id ) , data ) conn . lpush ( make_key ( crawler , "events" , run_id , level ) , data ) return event
|
Create an event possibly based on an exception .
|
61,037 |
def get_stage_events ( cls , crawler , stage_name , start , end , level = None ) : key = make_key ( crawler , "events" , stage_name , level ) return cls . event_list ( key , start , end )
|
events from a particular stage
|
61,038 |
def get_run_events ( cls , crawler , run_id , start , end , level = None ) : key = make_key ( crawler , "events" , run_id , level ) return cls . event_list ( key , start , end )
|
Events from a particular run
|
61,039 |
def soviet_checksum ( code ) : def sum_digits ( code , offset = 1 ) : total = 0 for digit , index in zip ( code [ : 7 ] , count ( offset ) ) : total += int ( digit ) * index summed = ( total / 11 * 11 ) return total - summed check = sum_digits ( code , 1 ) if check == 10 : check = sum_digits ( code , 3 ) if check == 10 : return code + '0' return code + str ( check )
|
Courtesy of Sir Vlad Lavrov .
|
61,040 |
def search_results_total ( html , xpath , check , delimiter ) : for container in html . findall ( xpath ) : if check in container . findtext ( '.' ) : text = container . findtext ( '.' ) . split ( delimiter ) total = int ( text [ - 1 ] . strip ( ) ) return total
|
Get the total number of results from the DOM of a search index .
|
61,041 |
def search_results_last_url ( html , xpath , label ) : for container in html . findall ( xpath ) : if container . text_content ( ) . strip ( ) == label : return container . find ( './/a' ) . get ( 'href' )
|
Get the URL of the last button in a search results listing .
|
61,042 |
def op_count ( cls , crawler , stage = None ) : if stage : total_ops = conn . get ( make_key ( crawler , stage ) ) else : total_ops = conn . get ( make_key ( crawler , "total_ops" ) ) return unpack_int ( total_ops )
|
Total operations performed for this crawler
|
61,043 |
def index ( ) : crawlers = [ ] for crawler in manager : data = Event . get_counts ( crawler ) data [ 'last_active' ] = crawler . last_run data [ 'total_ops' ] = crawler . op_count data [ 'running' ] = crawler . is_running data [ 'crawler' ] = crawler crawlers . append ( data ) return render_template ( 'index.html' , crawlers = crawlers )
|
Generate a list of all crawlers alphabetically with op counts .
|
61,044 |
def clean_html ( context , data ) : doc = _get_html_document ( context , data ) if doc is None : context . emit ( data = data ) return remove_paths = context . params . get ( 'remove_paths' ) for path in ensure_list ( remove_paths ) : for el in doc . findall ( path ) : el . drop_tree ( ) html_text = html . tostring ( doc , pretty_print = True ) content_hash = context . store_data ( html_text ) data [ 'content_hash' ] = content_hash context . emit ( data = data )
|
Clean an HTML DOM and store the changed version .
|
61,045 |
def execute ( cls , stage , state , data , next_allowed_exec_time = None ) : try : context = Context . from_state ( state , stage ) now = datetime . utcnow ( ) if next_allowed_exec_time and now < next_allowed_exec_time : Queue . queue ( stage , state , data , delay = next_allowed_exec_time ) elif context . crawler . disabled : pass elif context . stage . rate_limit : try : with rate_limiter ( context ) : context . execute ( data ) except RateLimitException : delay = max ( 1 , 1.0 / context . stage . rate_limit ) delay = random . randint ( 1 , int ( delay ) ) context . log . info ( "Rate limit exceeded, delaying %d sec." , delay ) Queue . queue ( stage , state , data , delay = delay ) else : context . execute ( data ) except Exception : log . exception ( "Task failed to execute:" ) finally : Queue . decr_pending ( context . crawler ) if not context . crawler . is_running : context . crawler . aggregate ( context )
|
Execute the operation rate limiting allowing .
|
61,046 |
def _recursive_upsert ( context , params , data ) : children = params . get ( "children" , { } ) nested_calls = [ ] for child_params in children : key = child_params . get ( "key" ) child_data_list = ensure_list ( data . pop ( key ) ) if isinstance ( child_data_list , dict ) : child_data_list = [ child_data_list ] if not ( isinstance ( child_data_list , list ) and all ( isinstance ( i , dict ) for i in child_data_list ) ) : context . log . warn ( "Expecting a dict or a lost of dicts as children for key" , key ) continue if child_data_list : table_suffix = child_params . get ( "table_suffix" , key ) child_params [ "table" ] = params . get ( "table" ) + "_" + table_suffix inherit = child_params . get ( "inherit" , { } ) for child_data in child_data_list : for dest , src in inherit . items ( ) : child_data [ dest ] = data . get ( src ) nested_calls . append ( ( child_params , child_data ) ) _upsert ( context , params , data ) for child_params , child_data in nested_calls : _recursive_upsert ( context , child_params , child_data )
|
Insert or update nested dicts recursively into db tables
|
61,047 |
def db ( context , data ) : table = context . params . get ( "table" , context . crawler . name ) params = context . params params [ "table" ] = table _recursive_upsert ( context , params , data )
|
Insert or update data as a row into specified db table
|
61,048 |
def cli ( debug , cache , incremental ) : settings . HTTP_CACHE = cache settings . INCREMENTAL = incremental settings . DEBUG = debug if settings . DEBUG : logging . basicConfig ( level = logging . DEBUG ) else : logging . basicConfig ( level = logging . INFO ) init_memorious ( )
|
Crawler framework for documents and structured scrapers .
|
61,049 |
def run ( crawler ) : crawler = get_crawler ( crawler ) crawler . run ( ) if is_sync_mode ( ) : TaskRunner . run_sync ( )
|
Run a specified crawler .
|
61,050 |
def index ( ) : crawler_list = [ ] for crawler in manager : is_due = 'yes' if crawler . check_due ( ) else 'no' if crawler . disabled : is_due = 'off' crawler_list . append ( [ crawler . name , crawler . description , crawler . schedule , is_due , Queue . size ( crawler ) ] ) headers = [ 'Name' , 'Description' , 'Schedule' , 'Due' , 'Pending' ] print ( tabulate ( crawler_list , headers = headers ) )
|
List the available crawlers .
|
61,051 |
def scheduled ( wait = False ) : manager . run_scheduled ( ) while wait : manager . run_scheduled ( ) time . sleep ( settings . SCHEDULER_INTERVAL )
|
Run crawlers that are due .
|
61,052 |
def _get_directory_path ( context ) : path = os . path . join ( settings . BASE_PATH , 'store' ) path = context . params . get ( 'path' , path ) path = os . path . join ( path , context . crawler . name ) path = os . path . abspath ( os . path . expandvars ( path ) ) try : os . makedirs ( path ) except Exception : pass return path
|
Get the storage path fro the output .
|
61,053 |
def directory ( context , data ) : with context . http . rehash ( data ) as result : if not result . ok : return content_hash = data . get ( 'content_hash' ) if content_hash is None : context . emit_warning ( "No content hash in data." ) return path = _get_directory_path ( context ) file_name = data . get ( 'file_name' , result . file_name ) file_name = safe_filename ( file_name , default = 'raw' ) file_name = '%s.%s' % ( content_hash , file_name ) data [ '_file_name' ] = file_name file_path = os . path . join ( path , file_name ) if not os . path . exists ( file_path ) : shutil . copyfile ( result . file_path , file_path ) context . log . info ( "Store [directory]: %s" , file_name ) meta_path = os . path . join ( path , '%s.json' % content_hash ) with open ( meta_path , 'w' ) as fh : json . dump ( data , fh )
|
Store the collected files to a given directory .
|
61,054 |
def seed ( context , data ) : for key in ( 'url' , 'urls' ) : for url in ensure_list ( context . params . get ( key ) ) : url = url % data context . emit ( data = { 'url' : url } )
|
Initialize a crawler with a set of seed URLs .
|
61,055 |
def enumerate ( context , data ) : items = ensure_list ( context . params . get ( 'items' ) ) for item in items : data [ 'item' ] = item context . emit ( data = data )
|
Iterate through a set of items and emit each one of them .
|
61,056 |
def sequence ( context , data ) : number = data . get ( 'number' , context . params . get ( 'start' , 1 ) ) stop = context . params . get ( 'stop' ) step = context . params . get ( 'step' , 1 ) delay = context . params . get ( 'delay' ) prefix = context . params . get ( 'tag' ) while True : tag = None if prefix is None else '%s:%s' % ( prefix , number ) if tag is None or not context . check_tag ( tag ) : context . emit ( data = { 'number' : number } ) if tag is not None : context . set_tag ( tag , True ) number = number + step if step > 0 and number >= stop : break if step < 0 and number <= stop : break if delay is not None : context . recurse ( data = { 'number' : number } , delay = delay ) break
|
Generate a sequence of numbers .
|
61,057 |
def fetch ( self ) : if self . _file_path is not None : return self . _file_path temp_path = self . context . work_path if self . _content_hash is not None : self . _file_path = storage . load_file ( self . _content_hash , temp_path = temp_path ) return self . _file_path if self . response is not None : self . _file_path = random_filename ( temp_path ) content_hash = sha1 ( ) with open ( self . _file_path , 'wb' ) as fh : for chunk in self . response . iter_content ( chunk_size = 8192 ) : content_hash . update ( chunk ) fh . write ( chunk ) self . _remove_file = True chash = content_hash . hexdigest ( ) self . _content_hash = storage . archive_file ( self . _file_path , content_hash = chash ) if self . http . cache and self . ok : self . context . set_tag ( self . request_id , self . serialize ( ) ) self . retrieved_at = datetime . utcnow ( ) . isoformat ( ) return self . _file_path
|
Lazily trigger download of the data when requested .
|
61,058 |
def make_key ( * criteria ) : criteria = [ stringify ( c ) for c in criteria ] criteria = [ c for c in criteria if c is not None ] if len ( criteria ) : return ':' . join ( criteria )
|
Make a string key out of many criteria .
|
61,059 |
def random_filename ( path = None ) : filename = uuid4 ( ) . hex if path is not None : filename = os . path . join ( path , filename ) return filename
|
Make a UUID - based file name which is extremely unlikely to exist already .
|
61,060 |
def sample_vMF ( mu , kappa , num_samples ) : dim = len ( mu ) result = np . zeros ( ( num_samples , dim ) ) for nn in range ( num_samples ) : w = _sample_weight ( kappa , dim ) v = _sample_orthonormal_to ( mu ) result [ nn , : ] = v * np . sqrt ( 1. - w ** 2 ) + w * mu return result
|
Generate num_samples N - dimensional samples from von Mises Fisher distribution around center mu \ in R^N with concentration kappa .
|
61,061 |
def _sample_weight ( kappa , dim ) : dim = dim - 1 b = dim / ( np . sqrt ( 4. * kappa ** 2 + dim ** 2 ) + 2 * kappa ) x = ( 1. - b ) / ( 1. + b ) c = kappa * x + dim * np . log ( 1 - x ** 2 ) while True : z = np . random . beta ( dim / 2. , dim / 2. ) w = ( 1. - ( 1. + b ) * z ) / ( 1. - ( 1. - b ) * z ) u = np . random . uniform ( low = 0 , high = 1 ) if kappa * w + dim * np . log ( 1. - x * w ) - c >= np . log ( u ) : return w
|
Rejection sampling scheme for sampling distance from center on surface of the sphere .
|
61,062 |
def _sample_orthonormal_to ( mu ) : v = np . random . randn ( mu . shape [ 0 ] ) proj_mu_v = mu * np . dot ( mu , v ) / np . linalg . norm ( mu ) orthto = v - proj_mu_v return orthto / np . linalg . norm ( orthto )
|
Sample point on sphere orthogonal to mu .
|
61,063 |
def _spherical_kmeans_single_lloyd ( X , n_clusters , sample_weight = None , max_iter = 300 , init = "k-means++" , verbose = False , x_squared_norms = None , random_state = None , tol = 1e-4 , precompute_distances = True , ) : random_state = check_random_state ( random_state ) sample_weight = _check_sample_weight ( X , sample_weight ) best_labels , best_inertia , best_centers = None , None , None centers = _init_centroids ( X , n_clusters , init , random_state = random_state , x_squared_norms = x_squared_norms ) if verbose : print ( "Initialization complete" ) distances = np . zeros ( shape = ( X . shape [ 0 ] , ) , dtype = X . dtype ) for i in range ( max_iter ) : centers_old = centers . copy ( ) labels , inertia = _labels_inertia ( X , sample_weight , x_squared_norms , centers , precompute_distances = precompute_distances , distances = distances , ) if sp . issparse ( X ) : centers = _k_means . _centers_sparse ( X , sample_weight , labels , n_clusters , distances ) else : centers = _k_means . _centers_dense ( X , sample_weight , labels , n_clusters , distances ) centers = normalize ( centers ) if verbose : print ( "Iteration %2d, inertia %.3f" % ( i , inertia ) ) if best_inertia is None or inertia < best_inertia : best_labels = labels . copy ( ) best_centers = centers . copy ( ) best_inertia = inertia center_shift_total = squared_norm ( centers_old - centers ) if center_shift_total <= tol : if verbose : print ( "Converged at iteration %d: " "center shift %e within tolerance %e" % ( i , center_shift_total , tol ) ) break if center_shift_total > 0 : best_labels , best_inertia = _labels_inertia ( X , sample_weight , x_squared_norms , best_centers , precompute_distances = precompute_distances , distances = distances , ) return best_labels , best_inertia , best_centers , i + 1
|
Modified from sklearn . cluster . k_means_ . k_means_single_lloyd .
|
61,064 |
def spherical_k_means ( X , n_clusters , sample_weight = None , init = "k-means++" , n_init = 10 , max_iter = 300 , verbose = False , tol = 1e-4 , random_state = None , copy_x = True , n_jobs = 1 , algorithm = "auto" , return_n_iter = False , ) : if n_init <= 0 : raise ValueError ( "Invalid number of initializations." " n_init=%d must be bigger than zero." % n_init ) random_state = check_random_state ( random_state ) if max_iter <= 0 : raise ValueError ( "Number of iterations should be a positive number," " got %d instead" % max_iter ) best_inertia = np . infty order = "C" if copy_x else None X = check_array ( X , accept_sparse = "csr" , dtype = [ np . float64 , np . float32 ] , order = order , copy = copy_x ) if _num_samples ( X ) < n_clusters : raise ValueError ( "n_samples=%d should be >= n_clusters=%d" % ( _num_samples ( X ) , n_clusters ) ) tol = _tolerance ( X , tol ) if hasattr ( init , "__array__" ) : init = check_array ( init , dtype = X . dtype . type , order = "C" , copy = True ) _validate_center_shape ( X , n_clusters , init ) if n_init != 1 : warnings . warn ( "Explicit initial center position passed: " "performing only one init in k-means instead of n_init=%d" % n_init , RuntimeWarning , stacklevel = 2 , ) n_init = 1 x_squared_norms = row_norms ( X , squared = True ) if n_jobs == 1 : for it in range ( n_init ) : labels , inertia , centers , n_iter_ = _spherical_kmeans_single_lloyd ( X , n_clusters , sample_weight , max_iter = max_iter , init = init , verbose = verbose , tol = tol , x_squared_norms = x_squared_norms , random_state = random_state , ) if best_inertia is None or inertia < best_inertia : best_labels = labels . copy ( ) best_centers = centers . copy ( ) best_inertia = inertia best_n_iter = n_iter_ else : seeds = random_state . randint ( np . iinfo ( np . int32 ) . max , size = n_init ) results = Parallel ( n_jobs = n_jobs , verbose = 0 ) ( delayed ( _spherical_kmeans_single_lloyd ) ( X , n_clusters , sample_weight , max_iter = max_iter , init = init , verbose = verbose , tol = tol , x_squared_norms = x_squared_norms , random_state = seed , ) for seed in seeds ) labels , inertia , centers , n_iters = zip ( * results ) best = np . argmin ( inertia ) best_labels = labels [ best ] best_inertia = inertia [ best ] best_centers = centers [ best ] best_n_iter = n_iters [ best ] if return_n_iter : return best_centers , best_labels , best_inertia , best_n_iter else : return best_centers , best_labels , best_inertia
|
Modified from sklearn . cluster . k_means_ . k_means .
|
61,065 |
def fit ( self , X , y = None , sample_weight = None ) : if self . normalize : X = normalize ( X ) random_state = check_random_state ( self . random_state ) self . cluster_centers_ , self . labels_ , self . inertia_ , self . n_iter_ = spherical_k_means ( X , n_clusters = self . n_clusters , sample_weight = sample_weight , init = self . init , n_init = self . n_init , max_iter = self . max_iter , verbose = self . verbose , tol = self . tol , random_state = random_state , copy_x = self . copy_x , n_jobs = self . n_jobs , return_n_iter = True , ) return self
|
Compute k - means clustering .
|
61,066 |
def _inertia_from_labels ( X , centers , labels ) : n_examples , n_features = X . shape inertia = np . zeros ( ( n_examples , ) ) for ee in range ( n_examples ) : inertia [ ee ] = 1 - X [ ee , : ] . dot ( centers [ int ( labels [ ee ] ) , : ] . T ) return np . sum ( inertia )
|
Compute inertia with cosine distance using known labels .
|
61,067 |
def _labels_inertia ( X , centers ) : n_examples , n_features = X . shape n_clusters , n_features = centers . shape labels = np . zeros ( ( n_examples , ) ) inertia = np . zeros ( ( n_examples , ) ) for ee in range ( n_examples ) : dists = np . zeros ( ( n_clusters , ) ) for cc in range ( n_clusters ) : dists [ cc ] = 1 - X [ ee , : ] . dot ( centers [ cc , : ] . T ) labels [ ee ] = np . argmin ( dists ) inertia [ ee ] = dists [ int ( labels [ ee ] ) ] return labels , np . sum ( inertia )
|
Compute labels and inertia with cosine distance .
|
61,068 |
def _S ( kappa , alpha , beta ) : kappa = 1. * np . abs ( kappa ) alpha = 1. * alpha beta = 1. * np . abs ( beta ) a_plus_b = alpha + beta u = np . sqrt ( kappa ** 2 + beta ** 2 ) if alpha == 0 : alpha_scale = 0 else : alpha_scale = alpha * np . log ( ( alpha + u ) / a_plus_b ) return u - beta - alpha_scale
|
Compute the antiderivative of the Amos - type bound G on the modified Bessel function ratio .
|
61,069 |
def _init_unit_centers ( X , n_clusters , random_state , init ) : n_examples , n_features = np . shape ( X ) if isinstance ( init , np . ndarray ) : n_init_clusters , n_init_features = init . shape assert n_init_clusters == n_clusters assert n_init_features == n_features centers = init for cc in range ( n_clusters ) : centers [ cc , : ] = centers [ cc , : ] / np . linalg . norm ( centers [ cc , : ] ) return centers elif init == "spherical-k-means" : labels , inertia , centers , iters = spherical_kmeans . _spherical_kmeans_single_lloyd ( X , n_clusters , x_squared_norms = np . ones ( ( n_examples , ) ) , init = "k-means++" ) return centers elif init == "random" : centers = np . random . randn ( n_clusters , n_features ) for cc in range ( n_clusters ) : centers [ cc , : ] = centers [ cc , : ] / np . linalg . norm ( centers [ cc , : ] ) return centers elif init == "k-means++" : centers = _init_centroids ( X , n_clusters , "k-means++" , random_state = random_state , x_squared_norms = np . ones ( ( n_examples , ) ) , ) for cc in range ( n_clusters ) : centers [ cc , : ] = centers [ cc , : ] / np . linalg . norm ( centers [ cc , : ] ) return centers elif init == "random-orthonormal" : centers = np . random . randn ( n_clusters , n_features ) q , r = np . linalg . qr ( centers . T , mode = "reduced" ) return q . T elif init == "random-class" : centers = np . zeros ( ( n_clusters , n_features ) ) for cc in range ( n_clusters ) : while np . linalg . norm ( centers [ cc , : ] ) == 0 : labels = np . random . randint ( 0 , n_clusters , n_examples ) centers [ cc , : ] = X [ labels == cc , : ] . sum ( axis = 0 ) for cc in range ( n_clusters ) : centers [ cc , : ] = centers [ cc , : ] / np . linalg . norm ( centers [ cc , : ] ) return centers
|
Initializes unit norm centers .
|
61,070 |
def _expectation ( X , centers , weights , concentrations , posterior_type = "soft" ) : n_examples , n_features = np . shape ( X ) n_clusters , _ = centers . shape if n_features <= 50 : vmf_f = _vmf_log else : vmf_f = _vmf_log_asymptotic f_log = np . zeros ( ( n_clusters , n_examples ) ) for cc in range ( n_clusters ) : f_log [ cc , : ] = vmf_f ( X , concentrations [ cc ] , centers [ cc , : ] ) posterior = np . zeros ( ( n_clusters , n_examples ) ) if posterior_type == "soft" : weights_log = np . log ( weights ) posterior = np . tile ( weights_log . T , ( n_examples , 1 ) ) . T + f_log for ee in range ( n_examples ) : posterior [ : , ee ] = np . exp ( posterior [ : , ee ] - logsumexp ( posterior [ : , ee ] ) ) elif posterior_type == "hard" : weights_log = np . log ( weights ) weighted_f_log = np . tile ( weights_log . T , ( n_examples , 1 ) ) . T + f_log for ee in range ( n_examples ) : posterior [ np . argmax ( weighted_f_log [ : , ee ] ) , ee ] = 1.0 return posterior
|
Compute the log - likelihood of each datapoint being in each cluster .
|
61,071 |
def _maximization ( X , posterior , force_weights = None ) : n_examples , n_features = X . shape n_clusters , n_examples = posterior . shape concentrations = np . zeros ( ( n_clusters , ) ) centers = np . zeros ( ( n_clusters , n_features ) ) if force_weights is None : weights = np . zeros ( ( n_clusters , ) ) for cc in range ( n_clusters ) : if force_weights is None : weights [ cc ] = np . mean ( posterior [ cc , : ] ) else : weights = force_weights X_scaled = X . copy ( ) if sp . issparse ( X ) : X_scaled . data *= posterior [ cc , : ] . repeat ( np . diff ( X_scaled . indptr ) ) else : for ee in range ( n_examples ) : X_scaled [ ee , : ] *= posterior [ cc , ee ] centers [ cc , : ] = X_scaled . sum ( axis = 0 ) center_norm = np . linalg . norm ( centers [ cc , : ] ) if center_norm > 1e-8 : centers [ cc , : ] = centers [ cc , : ] / center_norm rbar = center_norm / ( n_examples * weights [ cc ] ) concentrations [ cc ] = rbar * n_features - np . power ( rbar , 3. ) if np . abs ( rbar - 1.0 ) < 1e-10 : concentrations [ cc ] = MAX_CONTENTRATION else : concentrations [ cc ] /= 1. - np . power ( rbar , 2. ) del X_scaled return centers , weights , concentrations
|
Estimate new centers weights and concentrations from
|
61,072 |
def _movMF ( X , n_clusters , posterior_type = "soft" , force_weights = None , max_iter = 300 , verbose = False , init = "random-class" , random_state = None , tol = 1e-6 , ) : random_state = check_random_state ( random_state ) n_examples , n_features = np . shape ( X ) centers = _init_unit_centers ( X , n_clusters , random_state , init ) if force_weights is None : weights = np . ones ( ( n_clusters , ) ) weights = weights / np . sum ( weights ) else : weights = force_weights concentrations = np . ones ( ( n_clusters , ) ) if verbose : print ( "Initialization complete" ) for iter in range ( max_iter ) : centers_prev = centers . copy ( ) posterior = _expectation ( X , centers , weights , concentrations , posterior_type = posterior_type ) centers , weights , concentrations = _maximization ( X , posterior , force_weights = force_weights ) tolcheck = squared_norm ( centers_prev - centers ) if tolcheck <= tol : if verbose : print ( "Converged at iteration %d: " "center shift %e within tolerance %e" % ( iter , tolcheck , tol ) ) break labels = np . zeros ( ( n_examples , ) ) for ee in range ( n_examples ) : labels [ ee ] = np . argmax ( posterior [ : , ee ] ) inertia = _inertia_from_labels ( X , centers , labels ) return centers , weights , concentrations , posterior , labels , inertia
|
Mixture of von Mises Fisher clustering .
|
61,073 |
def movMF ( X , n_clusters , posterior_type = "soft" , force_weights = None , n_init = 10 , n_jobs = 1 , max_iter = 300 , verbose = False , init = "random-class" , random_state = None , tol = 1e-6 , copy_x = True , ) : if n_init <= 0 : raise ValueError ( "Invalid number of initializations." " n_init=%d must be bigger than zero." % n_init ) random_state = check_random_state ( random_state ) if max_iter <= 0 : raise ValueError ( "Number of iterations should be a positive number," " got %d instead" % max_iter ) best_inertia = np . infty X = as_float_array ( X , copy = copy_x ) tol = _tolerance ( X , tol ) if hasattr ( init , "__array__" ) : init = check_array ( init , dtype = X . dtype . type , copy = True ) _validate_center_shape ( X , n_clusters , init ) if n_init != 1 : warnings . warn ( "Explicit initial center position passed: " "performing only one init in k-means instead of n_init=%d" % n_init , RuntimeWarning , stacklevel = 2 , ) n_init = 1 best_centers = None best_labels = None best_weights = None best_concentrations = None best_posterior = None best_inertia = None if n_jobs == 1 : for it in range ( n_init ) : ( centers , weights , concentrations , posterior , labels , inertia ) = _movMF ( X , n_clusters , posterior_type = posterior_type , force_weights = force_weights , max_iter = max_iter , verbose = verbose , init = init , random_state = random_state , tol = tol , ) if best_inertia is None or inertia < best_inertia : best_centers = centers . copy ( ) best_labels = labels . copy ( ) best_weights = weights . copy ( ) best_concentrations = concentrations . copy ( ) best_posterior = posterior . copy ( ) best_inertia = inertia else : seeds = random_state . randint ( np . iinfo ( np . int32 ) . max , size = n_init ) results = Parallel ( n_jobs = n_jobs , verbose = 0 ) ( delayed ( _movMF ) ( X , n_clusters , posterior_type = posterior_type , force_weights = force_weights , max_iter = max_iter , verbose = verbose , init = init , random_state = random_state , tol = tol , ) for seed in seeds ) centers , weights , concentrations , posteriors , labels , inertia = zip ( * results ) best = np . argmin ( inertia ) best_labels = labels [ best ] best_inertia = inertia [ best ] best_centers = centers [ best ] best_concentrations = concentrations [ best ] best_posterior = posteriors [ best ] best_weights = weights [ best ] return ( best_centers , best_labels , best_inertia , best_weights , best_concentrations , best_posterior , )
|
Wrapper for parallelization of _movMF and running n_init times .
|
61,074 |
def _check_fit_data ( self , X ) : X = check_array ( X , accept_sparse = "csr" , dtype = [ np . float64 , np . float32 ] ) n_samples , n_features = X . shape if X . shape [ 0 ] < self . n_clusters : raise ValueError ( "n_samples=%d should be >= n_clusters=%d" % ( X . shape [ 0 ] , self . n_clusters ) ) for ee in range ( n_samples ) : if sp . issparse ( X ) : n = sp . linalg . norm ( X [ ee , : ] ) else : n = np . linalg . norm ( X [ ee , : ] ) if np . abs ( n - 1. ) > 1e-4 : raise ValueError ( "Data l2-norm must be 1, found {}" . format ( n ) ) return X
|
Verify that the number of samples given is larger than k
|
61,075 |
def fit ( self , X , y = None ) : if self . normalize : X = normalize ( X ) self . _check_force_weights ( ) random_state = check_random_state ( self . random_state ) X = self . _check_fit_data ( X ) ( self . cluster_centers_ , self . labels_ , self . inertia_ , self . weights_ , self . concentrations_ , self . posterior_ , ) = movMF ( X , self . n_clusters , posterior_type = self . posterior_type , force_weights = self . force_weights , n_init = self . n_init , n_jobs = self . n_jobs , max_iter = self . max_iter , verbose = self . verbose , init = self . init , random_state = random_state , tol = self . tol , copy_x = self . copy_x , ) return self
|
Compute mixture of von Mises Fisher clustering .
|
61,076 |
def transform ( self , X , y = None ) : if self . normalize : X = normalize ( X ) check_is_fitted ( self , "cluster_centers_" ) X = self . _check_test_data ( X ) return self . _transform ( X )
|
Transform X to a cluster - distance space . In the new space each dimension is the cosine distance to the cluster centers . Note that even if X is sparse the array returned by transform will typically be dense .
|
61,077 |
def log_likelihood ( covariance , precision ) : assert covariance . shape == precision . shape dim , _ = precision . shape log_likelihood_ = ( - np . sum ( covariance * precision ) + fast_logdet ( precision ) - dim * np . log ( 2 * np . pi ) ) log_likelihood_ /= 2. return log_likelihood_
|
Computes the log - likelihood between the covariance and precision estimate .
|
61,078 |
def kl_loss ( covariance , precision ) : assert covariance . shape == precision . shape dim , _ = precision . shape logdet_p_dot_c = fast_logdet ( np . dot ( precision , covariance ) ) return 0.5 * ( np . sum ( precision * covariance ) - logdet_p_dot_c - dim )
|
Computes the KL divergence between precision estimate and reference covariance .
|
61,079 |
def ebic ( covariance , precision , n_samples , n_features , gamma = 0 ) : l_theta = - np . sum ( covariance * precision ) + fast_logdet ( precision ) l_theta *= n_features / 2. if np . isinf ( l_theta ) or np . isnan ( l_theta ) : return 1e10 mask = np . abs ( precision . flat ) > np . finfo ( precision . dtype ) . eps precision_nnz = ( np . sum ( mask ) - n_features ) / 2.0 return ( - 2.0 * l_theta + precision_nnz * np . log ( n_samples ) + 4.0 * precision_nnz * np . log ( n_features ) * gamma )
|
Extended Bayesian Information Criteria for model selection .
|
61,080 |
def lattice ( prng , n_features , alpha , random_sign = False , low = 0.3 , high = 0.7 ) : degree = int ( 1 + np . round ( alpha * n_features / 2. ) ) if random_sign : sign_row = - 1.0 * np . ones ( degree ) + 2 * ( prng . uniform ( low = 0 , high = 1 , size = degree ) > .5 ) else : sign_row = - 1.0 * np . ones ( degree ) MAX_ATTEMPTS = 5 attempt = 0 row = np . zeros ( ( n_features , ) ) while np . sum ( row ) == 0 and attempt < MAX_ATTEMPTS : row = np . zeros ( ( n_features , ) ) row [ 1 : 1 + degree ] = sign_row * prng . uniform ( low = low , high = high , size = degree ) attempt += 1 if np . sum ( row ) == 0 : raise Exception ( "InvalidLattice" , "Rows sum to 0." ) return row /= np . abs ( np . sum ( row ) ) return sp . linalg . toeplitz ( c = row , r = row )
|
Returns the adjacency matrix for a lattice network .
|
61,081 |
def _to_diagonally_dominant ( mat ) : mat += np . diag ( np . sum ( mat != 0 , axis = 1 ) + 0.01 ) return mat
|
Make matrix unweighted diagonally dominant using the Laplacian .
|
61,082 |
def _to_diagonally_dominant_weighted ( mat ) : mat += np . diag ( np . sum ( np . abs ( mat ) , axis = 1 ) + 0.01 ) return mat
|
Make matrix weighted diagonally dominant using the Laplacian .
|
61,083 |
def _rescale_to_unit_diagonals ( mat ) : d = np . sqrt ( np . diag ( mat ) ) mat /= d mat /= d [ : , np . newaxis ] return mat
|
Rescale matrix to have unit diagonals .
|
61,084 |
def create ( self , n_features , alpha ) : n_block_features = int ( np . floor ( 1. * n_features / self . n_blocks ) ) if n_block_features * self . n_blocks != n_features : raise ValueError ( ( "Error: n_features {} not divisible by n_blocks {}." "Use n_features = n_blocks * int" ) . format ( n_features , self . n_blocks ) ) return block_adj = self . prototype_adjacency ( n_block_features , alpha ) adjacency = blocks ( self . prng , block_adj , n_blocks = self . n_blocks , chain_blocks = self . chain_blocks ) precision = self . to_precision ( adjacency ) covariance = self . to_covariance ( precision ) return covariance , precision , adjacency
|
Build a new graph with block structure .
|
61,085 |
def _sample_mvn ( n_samples , cov , prng ) : n_features , _ = cov . shape return prng . multivariate_normal ( np . zeros ( n_features ) , cov , size = n_samples )
|
Draw a multivariate normal sample from the graph defined by cov .
|
61,086 |
def _fully_random_weights ( n_features , lam_scale , prng ) : weights = np . zeros ( ( n_features , n_features ) ) n_off_diag = int ( ( n_features ** 2 - n_features ) / 2 ) weights [ np . triu_indices ( n_features , k = 1 ) ] = 0.1 * lam_scale * prng . randn ( n_off_diag ) + ( 0.25 * lam_scale ) weights [ weights < 0 ] = 0 weights = weights + weights . T return weights
|
Generate a symmetric random matrix with zeros along the diagonal .
|
61,087 |
def _fix_weights ( weight_fun , * args ) : weights = weight_fun ( * args ) return weights if _check_psd ( weights ) : return weights off_diag_sums = np . sum ( weights , axis = 1 ) mod_mat = np . linalg . inv ( np . sqrt ( np . diag ( off_diag_sums ) ) ) return np . dot ( mod_mat , weights , mod_mat )
|
Ensure random weight matrix is valid .
|
61,088 |
def _fit ( indexed_params , penalization , lam , lam_perturb , lam_scale_ , estimator , penalty_name , subsample , bootstrap , prng , X = None , ) : index = indexed_params if isinstance ( X , np . ndarray ) : local_X = X else : local_X = X . value n_samples , n_features = local_X . shape prec_is_real = False while not prec_is_real : boot_lam = None if penalization == "subsampling" : pass elif penalization == "random" : boot_lam = _fix_weights ( _random_weights , n_features , lam , lam_perturb , prng ) elif penalization == "fully-random" : boot_lam = _fix_weights ( _fully_random_weights , n_features , lam_scale_ , prng ) else : raise NotImplementedError ( ( "Only penalization = 'subsampling', " "'random', and 'fully-random' have " "been implemented. Found {}." . format ( penalization ) ) ) new_estimator = clone ( estimator ) if boot_lam is not None : new_estimator . set_params ( ** { penalty_name : boot_lam } ) num_subsamples = int ( subsample * n_samples ) rp = bootstrap ( n_samples , num_subsamples , prng ) new_estimator . fit ( local_X [ rp , : ] ) if isinstance ( new_estimator . precision_ , list ) : prec_real_bools = [ ] for prec in new_estimator . precision_ : prec_real_bools . append ( np . all ( np . isreal ( prec ) ) ) prec_is_real = np . all ( np . array ( prec_real_bools ) is True ) elif isinstance ( new_estimator . precision_ , np . ndarray ) : prec_is_real = np . all ( np . isreal ( new_estimator . precision_ ) ) else : raise ValueError ( "Estimator returned invalid precision_." ) return index , ( boot_lam , rp , new_estimator )
|
Wrapper function outside of instance for fitting a single model average trial .
|
61,089 |
def _spark_map ( fun , indexed_param_grid , sc , seed , X_bc ) : def _wrap_random_state ( split_index , partition ) : prng = np . random . RandomState ( seed + split_index ) yield map ( partial ( fun , prng = prng , X = X_bc ) , partition ) par_param_grid = sc . parallelize ( indexed_param_grid ) indexed_results = par_param_grid . mapPartitionsWithIndex ( _wrap_random_state ) . collect ( ) return [ item for sublist in indexed_results for item in sublist ]
|
We cannot pass a RandomState instance to each spark worker since it will behave identically across partitions . Instead we explictly handle the partitions with a newly seeded instance .
|
61,090 |
def quic_graph_lasso_ebic_manual ( X , gamma = 0 ) : print ( "QuicGraphicalLasso (manual EBIC) with:" ) print ( " mode: path" ) print ( " gamma: {}" . format ( gamma ) ) model = QuicGraphicalLasso ( lam = 1.0 , mode = "path" , init_method = "cov" , path = np . logspace ( np . log10 ( 0.01 ) , np . log10 ( 1.0 ) , num = 100 , endpoint = True ) , ) model . fit ( X ) ebic_index = model . ebic_select ( gamma = gamma ) covariance_ = model . covariance_ [ ebic_index ] precision_ = model . precision_ [ ebic_index ] lam_ = model . lam_at_index ( ebic_index ) print ( " len(path lams): {}" . format ( len ( model . path_ ) ) ) print ( " lam_scale_: {}" . format ( model . lam_scale_ ) ) print ( " lam_: {}" . format ( lam_ ) ) print ( " ebic_index: {}" . format ( ebic_index ) ) return covariance_ , precision_ , lam_
|
Run QuicGraphicalLasso with mode = path and gamma ; use EBIC criteria for model selection .
|
61,091 |
def quic_graph_lasso_ebic ( X , gamma = 0 ) : print ( "QuicGraphicalLassoEBIC with:" ) print ( " mode: path" ) print ( " gamma: {}" . format ( gamma ) ) model = QuicGraphicalLassoEBIC ( lam = 1.0 , init_method = "cov" , gamma = gamma ) model . fit ( X ) print ( " len(path lams): {}" . format ( len ( model . path_ ) ) ) print ( " lam_scale_: {}" . format ( model . lam_scale_ ) ) print ( " lam_: {}" . format ( model . lam_ ) ) return model . covariance_ , model . precision_ , model . lam_
|
Run QuicGraphicalLassoEBIC with gamma .
|
61,092 |
def empirical ( X ) : print ( "Empirical" ) cov = np . dot ( X . T , X ) / n_samples return cov , np . linalg . inv ( cov )
|
Compute empirical covariance as baseline estimator .
|
61,093 |
def sk_ledoit_wolf ( X ) : print ( "Ledoit-Wolf (sklearn)" ) lw_cov_ , _ = ledoit_wolf ( X ) lw_prec_ = np . linalg . inv ( lw_cov_ ) return lw_cov_ , lw_prec_
|
Estimate inverse covariance via scikit - learn ledoit_wolf function .
|
61,094 |
def _nonzero_intersection ( m , m_hat ) : n_features , _ = m . shape m_no_diag = m . copy ( ) m_no_diag [ np . diag_indices ( n_features ) ] = 0 m_hat_no_diag = m_hat . copy ( ) m_hat_no_diag [ np . diag_indices ( n_features ) ] = 0 m_hat_nnz = len ( np . nonzero ( m_hat_no_diag . flat ) [ 0 ] ) m_nnz = len ( np . nonzero ( m_no_diag . flat ) [ 0 ] ) intersection_nnz = len ( np . intersect1d ( np . nonzero ( m_no_diag . flat ) [ 0 ] , np . nonzero ( m_hat_no_diag . flat ) [ 0 ] ) ) return m_nnz , m_hat_nnz , intersection_nnz
|
Count the number of nonzeros in and between m and m_hat .
|
61,095 |
def support_false_positive_count ( m , m_hat ) : m_nnz , m_hat_nnz , intersection_nnz = _nonzero_intersection ( m , m_hat ) return int ( ( m_hat_nnz - intersection_nnz ) / 2.0 )
|
Count the number of false positive support elements in m_hat in one triangle not including the diagonal .
|
61,096 |
def support_false_negative_count ( m , m_hat ) : m_nnz , m_hat_nnz , intersection_nnz = _nonzero_intersection ( m , m_hat ) return int ( ( m_nnz - intersection_nnz ) / 2.0 )
|
Count the number of false negative support elements in m_hat in one triangle not including the diagonal .
|
61,097 |
def support_difference_count ( m , m_hat ) : m_nnz , m_hat_nnz , intersection_nnz = _nonzero_intersection ( m , m_hat ) return int ( ( m_nnz + m_hat_nnz - ( 2 * intersection_nnz ) ) / 2.0 )
|
Count the number of different elements in the support in one triangle not including the diagonal .
|
61,098 |
def has_exact_support ( m , m_hat ) : m_nnz , m_hat_nnz , intersection_nnz = _nonzero_intersection ( m , m_hat ) return int ( ( m_nnz + m_hat_nnz - ( 2 * intersection_nnz ) ) == 0 )
|
Returns 1 if support_difference_count is zero 0 else .
|
61,099 |
def has_approx_support ( m , m_hat , prob = 0.01 ) : m_nz = np . flatnonzero ( np . triu ( m , 1 ) ) m_hat_nz = np . flatnonzero ( np . triu ( m_hat , 1 ) ) upper_diagonal_mask = np . flatnonzero ( np . triu ( np . ones ( m . shape ) , 1 ) ) not_m_nz = np . setdiff1d ( upper_diagonal_mask , m_nz ) intersection = np . in1d ( m_hat_nz , m_nz ) not_intersection = np . in1d ( m_hat_nz , not_m_nz ) true_positive_rate = 0.0 if len ( m_nz ) : true_positive_rate = 1. * np . sum ( intersection ) / len ( m_nz ) true_negative_rate = 1. - true_positive_rate false_positive_rate = 0.0 if len ( not_m_nz ) : false_positive_rate = 1. * np . sum ( not_intersection ) / len ( not_m_nz ) return int ( np . less_equal ( true_negative_rate + false_positive_rate , prob ) )
|
Returns 1 if model selection error is less than or equal to prob rate 0 else .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.