idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
2,700
def accept_operator ( self , precedence ) : match = grammar . infix ( self . tokens ) if not match : return if match . operator . precedence < precedence : return return self . tokens . accept ( grammar . infix )
Accept the next binary operator only if it s of higher precedence .
2,701
def operator ( self , lhs , min_precedence ) : while self . accept_operator ( precedence = min_precedence ) : operator = self . tokens . matched . operator if operator . suffix : rhs = self . expression ( ) self . tokens . expect ( common_grammar . match_tokens ( operator . suffix ) ) rhs . end = self . tokens . matched . end elif operator . name == "." : rhs = self . dot_rhs ( ) else : rhs = self . atom ( ) next_min_precedence = operator . precedence if operator . assoc == "left" : next_min_precedence += 1 while self . tokens . match ( grammar . infix ) : if ( self . tokens . matched . operator . precedence < next_min_precedence ) : break rhs = self . operator ( rhs , self . tokens . matched . operator . precedence ) lhs = operator . handler ( lhs , rhs , start = lhs . start , end = rhs . end , source = self . original ) return lhs
Climb operator precedence as long as there are operators .
2,702
def select ( self ) : if self . tokens . accept ( grammar . select_any ) : return self . select_any ( ) if self . tokens . accept ( grammar . select_all ) : self . tokens . expect ( grammar . select_from ) return self . select_from ( ) return self . select_what ( )
First part of an SQL query .
2,703
def _guess_name_of ( self , expr ) : if isinstance ( expr , ast . Var ) : return expr . value if isinstance ( expr , ast . Resolve ) : return expr . rhs . value if isinstance ( expr , ast . Select ) and isinstance ( expr . rhs , ast . Literal ) : name = self . _guess_name_of ( expr . lhs ) if name is not None : return "%s_%s" % ( name , expr . rhs . value ) if isinstance ( expr , ast . Apply ) and isinstance ( expr . func , ast . Var ) : return expr . func . value
Tries to guess what variable name expr ends in .
2,704
def builtin ( self , keyword ) : keyword_start = self . tokens . matched . first . start keyword_end = self . tokens . matched . first . end self . tokens . expect ( common_grammar . lparen ) if self . tokens . matched . start != keyword_end : return self . error ( "No whitespace allowed between function and lparen." , start_token = self . tokens . matched . first ) expr_type = grammar . BUILTINS [ keyword . lower ( ) ] arguments = [ self . expression ( ) ] while self . tokens . accept ( common_grammar . comma ) : arguments . append ( self . expression ( ) ) self . tokens . expect ( common_grammar . rparen ) if expr_type . arity and expr_type . arity != len ( arguments ) : return self . error ( "%s expects %d arguments, but was passed %d." % ( keyword , expr_type . arity , len ( arguments ) ) , start_token = self . tokens . matched . first ) return expr_type ( * arguments , start = keyword_start , end = self . tokens . matched . end , source = self . original )
Parse the pseudo - function application subgrammar .
2,705
def application ( self , func ) : start = self . tokens . matched . start if self . tokens . accept ( common_grammar . rparen ) : return ast . Apply ( func , start = start , end = self . tokens . matched . end , source = self . original ) arguments = [ self . expression ( ) ] while self . tokens . accept ( common_grammar . comma ) : arguments . append ( self . expression ( ) ) self . tokens . expect ( common_grammar . rparen ) return ast . Apply ( func , * arguments , start = start , end = self . tokens . matched . end , source = self . original )
Parse the function application subgrammar .
2,706
def get_singleton ( self ) : only_value = None for value in six . itervalues ( self . ordered_dict ) : if only_value is not None : raise ValueError ( "%r is not a singleton." % self ) only_value = value if only_value is self . __UnsetSentinel or only_value is None : raise ValueError ( "%r is empty." % self ) return only_value
If the row only has one column return that value ; otherwise raise .
2,707
def _cpu ( self ) : value = int ( psutil . cpu_percent ( ) ) set_metric ( "cpu" , value , category = self . category ) gauge ( "cpu" , value )
Record CPU usage .
2,708
def _mem ( self ) : value = int ( psutil . virtual_memory ( ) . percent ) set_metric ( "memory" , value , category = self . category ) gauge ( "memory" , value )
Record Memory usage .
2,709
def _disk ( self ) : mountpoints = [ p . mountpoint for p in psutil . disk_partitions ( ) if p . device . endswith ( self . device ) ] if len ( mountpoints ) != 1 : raise CommandError ( "Unknown device: {0}" . format ( self . device ) ) value = int ( psutil . disk_usage ( mountpoints [ 0 ] ) . percent ) set_metric ( "disk-{0}" . format ( self . device ) , value , category = self . category ) gauge ( "disk-{0}" . format ( self . device ) , value )
Record Disk usage .
2,710
def _net ( self ) : data = psutil . network_io_counters ( pernic = True ) if self . device not in data : raise CommandError ( "Unknown device: {0}" . format ( self . device ) ) value = data [ self . device ] . bytes_sent metric ( "net-{0}-sent" . format ( self . device ) , value , category = self . category ) gauge ( "net-{0}-sent" . format ( self . device ) , value ) value = data [ self . device ] . bytes_recv metric ( "net-{0}-recv" . format ( self . device ) , value , category = self . category )
Record Network usage .
2,711
def implements ( obj , protocol ) : if isinstance ( obj , type ) : raise TypeError ( "First argument to implements must be an instance. " "Got %r." % obj ) return isinstance ( obj , protocol ) or issubclass ( AnyType , protocol )
Does the object obj implement the prococol ?
2,712
def isa ( cls , protocol ) : if not isinstance ( cls , type ) : raise TypeError ( "First argument to isa must be a type. Got %s." % repr ( cls ) ) if not isinstance ( protocol , type ) : raise TypeError ( ( "Second argument to isa must be a type or a Protocol. " "Got an instance of %r." ) % type ( protocol ) ) return issubclass ( cls , protocol ) or issubclass ( AnyType , protocol )
Does the type cls participate in the protocol ?
2,713
def implemented ( cls , for_type ) : for function in cls . required ( ) : if not function . implemented_for_type ( for_type ) : raise TypeError ( "%r doesn't implement %r so it cannot participate in " "the protocol %r." % ( for_type , function . func . __name__ , cls ) ) cls . register ( for_type )
Assert that protocol cls is implemented for type for_type .
2,714
def implicit_static ( cls , for_type = None , for_types = None ) : for type_ in cls . __get_type_args ( for_type , for_types ) : implementations = { } for function in cls . required ( ) : method = getattr ( type_ , function . __name__ , None ) if not callable ( method ) : raise TypeError ( "%s.implicit invokation on type %r is missing instance " "method %r." % ( cls . __name__ , type_ , function . __name__ ) ) implementations [ function ] = method for function in cls . optional ( ) : method = getattr ( type_ , function . __name__ , None ) if callable ( method ) : implementations [ function ] = method return cls . implement ( for_type = type_ , implementations = implementations )
Automatically generate implementations for a type .
2,715
def _build_late_dispatcher ( func_name ) : def _late_dynamic_dispatcher ( obj , * args ) : method = getattr ( obj , func_name , None ) if not callable ( method ) : raise NotImplementedError ( "Instance method %r is not implemented by %r." % ( func_name , obj ) ) return method ( * args ) return _late_dynamic_dispatcher
Return a function that calls method func_name on objects .
2,716
def implicit_dynamic ( cls , for_type = None , for_types = None ) : for type_ in cls . __get_type_args ( for_type , for_types ) : implementations = { } for function in cls . functions ( ) : implementations [ function ] = cls . _build_late_dispatcher ( func_name = function . __name__ ) cls . implement ( for_type = type_ , implementations = implementations )
Automatically generate late dynamic dispatchers to type .
2,717
def implement ( cls , implementations , for_type = None , for_types = None ) : for type_ in cls . __get_type_args ( for_type , for_types ) : cls . _implement_for_type ( for_type = type_ , implementations = implementations )
Provide protocol implementation for a type .
2,718
def _parse_query ( self , source ) : if self . OBJECTFILTER_WORDS . search ( source ) : syntax_ = "objectfilter" else : syntax_ = None return query . Query ( source , syntax = syntax_ )
Parse one of the rules as either objectfilter or dottysql .
2,719
def _parse_tagfile ( self ) : rules = None tag = None for line in self . original : match = self . TAG_DECL_LINE . match ( line ) if match : if tag and rules : yield tag , rules rules = [ ] tag = match . group ( 1 ) continue match = self . TAG_RULE_LINE . match ( line ) if match : source = match . group ( 1 ) rules . append ( self . _parse_query ( source ) )
Parse the tagfile and yield tuples of tag_name list of rule ASTs .
2,720
def normalize ( expr ) : lhs = normalize ( expr . lhs ) rhs = normalize ( expr . rhs ) return type ( expr ) ( lhs , rhs , start = lhs . start , end = rhs . end )
Normalize both sides but don t eliminate the expression .
2,721
def normalize ( expr ) : args = [ normalize ( arg ) for arg in expr . args ] return type ( expr ) ( expr . func , * args , start = expr . start , end = expr . end )
No elimination but normalize arguments .
2,722
def normalize ( expr ) : children = [ ] for child in expr . children : branch = normalize ( child ) if branch is None : continue if type ( branch ) is type ( expr ) : children . extend ( branch . children ) else : children . append ( branch ) if len ( children ) == 0 : return None if len ( children ) == 1 : return children [ 0 ] return type ( expr ) ( * children , start = children [ 0 ] . start , end = children [ - 1 ] . end )
Pass through n - ary expressions and eliminate empty branches .
2,723
def _category_slugs ( self , category ) : key = self . _category_key ( category ) slugs = self . r . smembers ( key ) return slugs
Returns a set of the metric slugs for the given category
2,724
def _granularities ( self ) : keep = False for g in GRANULARITIES : if g == app_settings . MIN_GRANULARITY and not keep : keep = True elif g == app_settings . MAX_GRANULARITY and keep : keep = False yield g if keep : yield g
Returns a generator of all possible granularities based on the MIN_GRANULARITY and MAX_GRANULARITY settings .
2,725
def _build_key_patterns ( self , slug , date ) : patts = OrderedDict ( ) metric_key_patterns = self . _metric_key_patterns ( ) for g in self . _granularities ( ) : date_string = date . strftime ( metric_key_patterns [ g ] [ "date_format" ] ) patts [ g ] = metric_key_patterns [ g ] [ "key" ] . format ( slug , date_string ) return patts
Builds an OrderedDict of metric keys and patterns for the given slug and date .
2,726
def _build_keys ( self , slug , date = None , granularity = 'all' ) : slug = slugify ( slug ) if date is None : date = datetime . utcnow ( ) patts = self . _build_key_patterns ( slug , date ) if granularity == "all" : return list ( patts . values ( ) ) return [ patts [ granularity ] ]
Builds redis keys used to store metrics .
2,727
def delete_metric ( self , slug ) : prefix = "m:{0}:*" . format ( slug ) keys = self . r . keys ( prefix ) self . r . delete ( * keys ) self . r . srem ( self . _metric_slugs_key , slug )
Removes all keys for the given slug .
2,728
def metric ( self , slug , num = 1 , category = None , expire = None , date = None ) : self . r . sadd ( self . _metric_slugs_key , slug ) if category : self . _categorize ( slug , category ) keys = self . _build_keys ( slug , date = date ) pipe = self . r . pipeline ( ) for key in keys : pipe . incr ( key , num ) if expire : pipe . expire ( key , expire ) pipe . execute ( )
Records a metric creating it if it doesn t exist or incrementing it if it does . All metrics are prefixed with m and automatically aggregate for Seconds Minutes Hours Day Week Month and Year .
2,729
def get_metric ( self , slug ) : results = OrderedDict ( ) granularities = self . _granularities ( ) keys = self . _build_keys ( slug ) for granularity , key in zip ( granularities , keys ) : results [ granularity ] = self . r . get ( key ) return results
Get the current values for a metric .
2,730
def get_metrics ( self , slug_list ) : keys = [ 'seconds' , 'minutes' , 'hours' , 'day' , 'week' , 'month' , 'year' ] key_mapping = { gran : key for gran , key in zip ( GRANULARITIES , keys ) } keys = [ key_mapping [ gran ] for gran in self . _granularities ( ) ] results = [ ] for slug in slug_list : metrics = self . r . mget ( * self . _build_keys ( slug ) ) if any ( metrics ) : results . append ( ( slug , dict ( zip ( keys , metrics ) ) ) ) return results
Get the metrics for multiple slugs .
2,731
def get_category_metrics ( self , category ) : slug_list = self . _category_slugs ( category ) return self . get_metrics ( slug_list )
Get metrics belonging to the given category
2,732
def delete_category ( self , category ) : category_key = self . _category_key ( category ) self . r . delete ( category_key ) self . r . srem ( self . _categories_key , category )
Removes the category from Redis . This doesn t touch the metrics ; they simply become uncategorized .
2,733
def get_metric_history ( self , slugs , since = None , to = None , granularity = 'daily' ) : if not type ( slugs ) == list : slugs = [ slugs ] keys = [ ] for slug in slugs : for date in self . _date_range ( granularity , since , to ) : keys += self . _build_keys ( slug , date , granularity ) keys = list ( dedupe ( keys ) ) results = [ 0 if v is None else v for v in self . r . mget ( keys ) ] results = zip ( keys , results ) return sorted ( results , key = lambda t : t [ 0 ] )
Get history for one or more metrics .
2,734
def gauge ( self , slug , current_value ) : k = self . _gauge_key ( slug ) self . r . sadd ( self . _gauge_slugs_key , slug ) self . r . set ( k , current_value )
Set the value for a Gauge .
2,735
def delete_gauge ( self , slug ) : key = self . _gauge_key ( slug ) self . r . delete ( key ) self . r . srem ( self . _gauge_slugs_key , slug )
Removes all gauges with the given slug .
2,736
def gauge ( slug , maximum = 9000 , size = 200 , coerce = 'float' ) : coerce_options = { 'float' : float , 'int' : int , 'str' : str } coerce = coerce_options . get ( coerce , float ) redis = get_r ( ) value = coerce ( redis . get_gauge ( slug ) ) if value < maximum and coerce == float : diff = round ( maximum - value , 2 ) elif value < maximum : diff = maximum - value else : diff = 0 return { 'slug' : slug , 'current_value' : value , 'max_value' : maximum , 'size' : size , 'diff' : diff , }
Include a Donut Chart for the specified Gauge .
2,737
def metric_history ( slug , granularity = "daily" , since = None , to = None , with_data_table = False ) : r = get_r ( ) try : if since and len ( since ) == 10 : since = datetime . strptime ( since , "%Y-%m-%d" ) elif since and len ( since ) == 19 : since = datetime . strptime ( since , "%Y-%m-%d %H:%M:%S" ) if to and len ( to ) == 10 : to = datetime . strptime ( since , "%Y-%m-%d" ) elif to and len ( to ) == 19 : to = datetime . strptime ( to , "%Y-%m-%d %H:%M:%S" ) except ( TypeError , ValueError ) : pass metric_history = r . get_metric_history ( slugs = slug , since = since , to = to , granularity = granularity ) return { 'since' : since , 'to' : to , 'slug' : slug , 'granularity' : granularity , 'metric_history' : metric_history , 'with_data_table' : with_data_table , }
Template Tag to display a metric s history .
2,738
def aggregate_detail ( slug_list , with_data_table = False ) : r = get_r ( ) metrics_data = [ ] granularities = r . _granularities ( ) keys = [ 'seconds' , 'minutes' , 'hours' , 'day' , 'week' , 'month' , 'year' ] key_mapping = { gran : key for gran , key in zip ( GRANULARITIES , keys ) } keys = [ key_mapping [ gran ] for gran in granularities ] for slug , data in r . get_metrics ( slug_list ) : values = [ data [ t ] for t in keys ] metrics_data . append ( ( slug , values ) ) return { 'chart_id' : "metric-aggregate-{0}" . format ( "-" . join ( slug_list ) ) , 'slugs' : slug_list , 'metrics' : metrics_data , 'with_data_table' : with_data_table , 'granularities' : [ g . title ( ) for g in keys ] , }
Template Tag to display multiple metrics .
2,739
def aggregate_history ( slugs , granularity = "daily" , since = None , with_data_table = False ) : r = get_r ( ) slugs = list ( slugs ) try : if since and len ( since ) == 10 : since = datetime . strptime ( since , "%Y-%m-%d" ) elif since and len ( since ) == 19 : since = datetime . strptime ( since , "%Y-%m-%d %H:%M:%S" ) except ( TypeError , ValueError ) : pass history = r . get_metric_history_chart_data ( slugs = slugs , since = since , granularity = granularity ) return { 'chart_id' : "metric-aggregate-history-{0}" . format ( "-" . join ( slugs ) ) , 'slugs' : slugs , 'since' : since , 'granularity' : granularity , 'metric_history' : history , 'with_data_table' : with_data_table , }
Template Tag to display history for multiple metrics .
2,740
def user_func ( func , arg_types = None , return_type = None ) : class UserFunction ( std_core . TypedFunction ) : name = func . __name__ def __call__ ( self , * args , ** kwargs ) : return func ( * args , ** kwargs ) @ classmethod def reflect_static_args ( cls ) : return arg_types @ classmethod def reflect_static_return ( cls ) : return return_type return UserFunction ( )
Create an EFILTER - callable version of function func .
2,741
def infer ( query , replacements = None , root_type = None , libs = ( "stdcore" , "stdmath" ) ) : if root_type : type_scope = scope . ScopeStack ( std_core . MODULE , root_type ) else : type_scope = scope . ScopeStack ( std_core . MODULE ) stdcore_included = False for lib in libs : if lib == "stdcore" : stdcore_included = True continue module = std_core . LibraryModule . ALL_MODULES . get ( lib ) if not module : raise TypeError ( "No standard library module %r." % lib ) type_scope = scope . ScopeStack ( module , type_scope ) if not stdcore_included : raise TypeError ( "'stdcore' must always be included." ) query = q . Query ( query , params = replacements ) return infer_type . infer_type ( query , type_scope )
Determine the type of the query s output without actually running it .
2,742
def search ( query , data , replacements = None ) : query = q . Query ( query , params = replacements ) for entry in data : if solve . solve ( query , entry ) . value : yield entry
Yield objects from data that match the query .
2,743
def peek ( self , steps = 1 ) : try : tokens = iter ( self ) for _ in six . moves . range ( steps ) : next ( tokens ) return next ( tokens ) except StopIteration : return None
Look ahead doesn t affect current_token and next_token .
2,744
def skip ( self , steps = 1 ) : for _ in six . moves . range ( steps ) : self . next_token ( )
Skip ahead by steps tokens .
2,745
def next_token ( self ) : if self . lookahead : self . current_token = self . lookahead . popleft ( ) return self . current_token self . current_token = self . _parse_next_token ( ) return self . current_token
Returns the next logical token advancing the tokenizer .
2,746
def _parse_next_token ( self ) : while self . _position < self . limit : token = self . _next_pattern ( ) if token : return token return None
Will parse patterns until it gets to the next token or EOF .
2,747
def _next_pattern ( self ) : current_state = self . state_stack [ - 1 ] position = self . _position for pattern in self . patterns : if current_state not in pattern . states : continue m = pattern . regex . match ( self . source , position ) if not m : continue position = m . end ( ) token = None if pattern . next_state : self . state_stack . append ( pattern . next_state ) if pattern . action : callback = getattr ( self , pattern . action , None ) if callback is None : raise RuntimeError ( "No method defined for pattern action %s!" % pattern . action ) if "token" in m . groups ( ) : value = m . group ( "token" ) else : value = m . group ( 0 ) token = callback ( string = value , match = m , pattern = pattern ) self . _position = position return token self . _error ( "Don't know how to match next. Did you forget quotes?" , start = self . _position , end = self . _position + 1 )
Parses the next pattern by matching each in turn .
2,748
def _error ( self , message , start , end = None ) : raise errors . EfilterParseError ( source = self . source , start = start , end = end , message = message )
Raise a nice error with the token highlighted .
2,749
def emit ( self , string , match , pattern , ** _ ) : return grammar . Token ( name = pattern . name , value = string , start = match . start ( ) , end = match . end ( ) )
Emits a token using the current pattern match and pattern label .
2,750
def get_pkg_version ( ) : try : with open ( "PKG-INFO" , "r" ) as fp : rgx = re . compile ( r"Version: (\d+)" ) for line in fp . readlines ( ) : match = rgx . match ( line ) if match : return match . group ( 1 ) except IOError : return None
Get version string by parsing PKG - INFO .
2,751
def get_version ( dev_version = False ) : if dev_version : version = git_dev_version ( ) if not version : raise RuntimeError ( "Could not generate dev version from git." ) return version return "1!%d.%d" % ( MAJOR , MINOR )
Generates a version string .
2,752
def getvalues ( self ) : idx = 0 generator = self . _generator_func ( ) first_value = next ( generator ) self . _value_type = type ( first_value ) yield first_value for idx , value in enumerate ( generator ) : if not isinstance ( value , self . _value_type ) : raise TypeError ( "All values of a repeated var must be of the same type." " First argument was of type %r, but argument %r is of" " type %r." % ( self . _value_type , value , repeated . value_type ( value ) ) ) self . _watermark = max ( self . _watermark , idx + 1 ) yield value if idx + 1 < self . _watermark : raise ValueError ( "LazyRepetition %r was previously able to iterate its" " generator up to idx %d, but this time iteration stopped after" " idx %d! Generator function %r is not stable." % ( self , self . _watermark , idx + 1 , self . _generator_func ) ) if self . _count is not None and self . _watermark >= self . _count : raise ValueError ( "LazyRepetition %r previously iterated only up to idx %d but" " was now able to reach idx %d! Generator function %r is not" " stable." % ( self , self . _count - 1 , idx + 1 , self . _generator_func ) ) self . _count = self . _watermark + 1
Yields all the values from generator_func and type - checks .
2,753
def value_eq ( self , other ) : self_sorted = ordered . ordered ( self . getvalues ( ) ) other_sorted = ordered . ordered ( repeated . getvalues ( other ) ) return self_sorted == other_sorted
Sorted comparison of values .
2,754
def call_audit ( func ) : def audited_func ( * args , ** kwargs ) : import traceback stack = traceback . extract_stack ( ) r = func ( * args , ** kwargs ) func_name = func . __name__ print ( "@depth %d, trace %s -> %s(*%r, **%r) => %r" % ( len ( stack ) , " -> " . join ( "%s:%d:%s" % x [ 0 : 3 ] for x in stack [ - 5 : - 2 ] ) , func_name , args , kwargs , r ) ) return r return audited_func
Print a detailed audit of all calls to this function .
2,755
def _class_dispatch ( args , kwargs ) : _ = kwargs if not args : raise ValueError ( "Multimethods must be passed at least one positional arg." ) if not isinstance ( args [ 0 ] , type ) : raise TypeError ( "class_multimethod must be called with a type, not instance." ) return args [ 0 ]
See class_multimethod .
2,756
def prefer_type ( self , prefer , over ) : self . _write_lock . acquire ( ) try : if self . _preferred ( preferred = over , over = prefer ) : raise ValueError ( "Type %r is already preferred over %r." % ( over , prefer ) ) prefs = self . _prefer_table . setdefault ( prefer , set ( ) ) prefs . add ( over ) finally : self . _write_lock . release ( )
Prefer one type over another type all else being equivalent .
2,757
def _find_and_cache_best_function ( self , dispatch_type ) : result = self . _dispatch_table . get ( dispatch_type ) if result : return result with self . _write_lock : try : dispatch_mro = dispatch_type . mro ( ) except TypeError : dispatch_mro = ( ) best_match = None result_type = None for candidate_type , candidate_func in self . implementations : if not issubclass ( dispatch_type , candidate_type ) : continue try : match = dispatch_mro . index ( candidate_type ) except ValueError : match = None if best_match is None : if result and match is None : if self . _preferred ( candidate_type , over = result_type ) : result = candidate_func result_type = candidate_type elif self . _preferred ( result_type , over = candidate_type ) : pass else : raise TypeError ( "Two candidate implementations found for " "multimethod function %s (dispatch type %s) " "and neither is preferred." % ( self . func_name , dispatch_type ) ) else : result = candidate_func result_type = candidate_type best_match = match if ( match or 0 ) < ( best_match or 0 ) : result = candidate_func result_type = candidate_type best_match = match self . _dispatch_table [ dispatch_type ] = result return result
Finds the best implementation of this function given a type .
2,758
def implementation ( self , for_type = None , for_types = None ) : for_types = self . __get_types ( for_type , for_types ) def _decorator ( implementation ) : self . implement ( implementation , for_types = for_types ) return self return _decorator
Return a decorator that will register the implementation .
2,759
def implement ( self , implementation , for_type = None , for_types = None ) : unbound_implementation = self . __get_unbound_function ( implementation ) for_types = self . __get_types ( for_type , for_types ) for t in for_types : self . _write_lock . acquire ( ) try : self . implementations . append ( ( t , unbound_implementation ) ) finally : self . _write_lock . release ( )
Registers an implementing function for for_type .
2,760
def get_context_data ( self , ** kwargs ) : data = super ( GaugesView , self ) . get_context_data ( ** kwargs ) data . update ( { 'gauges' : get_r ( ) . gauge_slugs ( ) } ) return data
Includes the Gauge slugs and data in the context .
2,761
def get_success_url ( self ) : slugs = '+' . join ( self . metric_slugs ) url = reverse ( 'redis_metric_aggregate_detail' , args = [ slugs ] ) return url . replace ( "%2B" , "+" )
Reverses the redis_metric_aggregate_detail URL using self . metric_slugs as an argument .
2,762
def form_valid ( self , form ) : self . metric_slugs = [ k . strip ( ) for k in form . cleaned_data [ 'metrics' ] ] return super ( AggregateFormView , self ) . form_valid ( form )
Pull the metrics from the submitted form and store them as a list of strings in self . metric_slugs .
2,763
def get ( self , * args , ** kwargs ) : self . initial = { "category_name" : kwargs . get ( 'category_name' , None ) } return super ( CategoryFormView , self ) . get ( * args , ** kwargs )
See if this view was called with a specified category .
2,764
def rerun ( self ) : self . _state = states . SCHEDULING self . _completed_flag = threading . Event ( ) print 'Pipeline %s in %s state' % ( self . _uid , self . _state )
Rerun sets the state of the Pipeline to scheduling so that the Pipeline can be checked for new stages
2,765
def from_dict ( self , d ) : if 'uid' in d : if d [ 'uid' ] : self . _uid = d [ 'uid' ] if 'name' in d : if d [ 'name' ] : self . _name = d [ 'name' ] if 'state' in d : if isinstance ( d [ 'state' ] , str ) or isinstance ( d [ 'state' ] , unicode ) : if d [ 'state' ] in states . _pipeline_state_values . keys ( ) : self . _state = d [ 'state' ] else : raise ValueError ( obj = self . _uid , attribute = 'state' , expected_value = states . _pipeline_state_values . keys ( ) , actual_value = d [ 'state' ] ) else : raise TypeError ( entity = 'state' , expected_type = str , actual_type = type ( d [ 'state' ] ) ) else : self . _state = states . INITIAL if 'state_history' in d : if isinstance ( d [ 'state_history' ] , list ) : self . _state_history = d [ 'state_history' ] else : raise TypeError ( entity = 'state_history' , expected_type = list , actual_type = type ( d [ 'state_history' ] ) ) if 'completed' in d : if isinstance ( d [ 'completed' ] , bool ) : if d [ 'completed' ] : self . _completed_flag . set ( ) else : raise TypeError ( entity = 'completed' , expected_type = bool , actual_type = type ( d [ 'completed' ] ) )
Create a Pipeline from a dictionary . The change is in inplace .
2,766
def auto_retry ( fun ) : @ functools . wraps ( fun ) def decorated ( instance , * args , ** kwargs ) : cfg = instance . _retry_config remaining_tries = cfg . retry_attempts current_wait = cfg . retry_wait retry_backoff = cfg . retry_backoff last_error = None while remaining_tries >= 0 : try : return fun ( instance , * args , ** kwargs ) except socket . error as e : last_error = e instance . _retry_logger . warning ( 'Connection failed: %s' , e ) remaining_tries -= 1 if remaining_tries == 0 : break time . sleep ( current_wait ) current_wait *= retry_backoff raise last_error return decorated
Decorator for retrying method calls based on instance parameters .
2,767
def iso_mesh_line ( vertices , tris , vertex_data , levels ) : lines = None connects = None vertex_level = None level_index = None if not all ( [ isinstance ( x , np . ndarray ) for x in ( vertices , tris , vertex_data , levels ) ] ) : raise ValueError ( 'all inputs must be numpy arrays' ) if vertices . shape [ 1 ] <= 3 : verts = vertices elif vertices . shape [ 1 ] == 4 : verts = vertices [ : , : - 1 ] else : verts = None if ( verts is not None and tris . shape [ 1 ] == 3 and vertex_data . shape [ 0 ] == verts . shape [ 0 ] ) : edges = np . vstack ( ( tris . reshape ( ( - 1 ) ) , np . roll ( tris , - 1 , axis = 1 ) . reshape ( ( - 1 ) ) ) ) . T edge_datas = vertex_data [ edges ] edge_coors = verts [ edges ] . reshape ( tris . shape [ 0 ] * 3 , 2 , 3 ) for lev in levels : index = ( edge_datas >= lev ) index = index [ : , 0 ] ^ index [ : , 1 ] edge_datas_Ok = edge_datas [ index , : ] xyz = edge_coors [ index ] ratio = np . array ( [ ( lev - edge_datas_Ok [ : , 0 ] ) / ( edge_datas_Ok [ : , 1 ] - edge_datas_Ok [ : , 0 ] ) ] ) point = xyz [ : , 0 , : ] + ratio . T * ( xyz [ : , 1 , : ] - xyz [ : , 0 , : ] ) nbr = point . shape [ 0 ] // 2 if connects is not None : connect = np . arange ( 0 , nbr * 2 ) . reshape ( ( nbr , 2 ) ) + len ( lines ) connects = np . append ( connects , connect , axis = 0 ) lines = np . append ( lines , point , axis = 0 ) vertex_level = np . append ( vertex_level , np . zeros ( len ( point ) ) + lev ) level_index = np . append ( level_index , np . array ( len ( point ) ) ) else : lines = point connects = np . arange ( 0 , nbr * 2 ) . reshape ( ( nbr , 2 ) ) vertex_level = np . zeros ( len ( point ) ) + lev level_index = np . array ( len ( point ) ) vertex_level = vertex_level . reshape ( ( vertex_level . size , 1 ) ) return lines , connects , vertex_level , level_index
Generate an isocurve from vertex data in a surface mesh .
2,768
def set_color ( self , color ) : if color is not None : self . _color_lev = color self . _need_color_update = True self . update ( )
Set the color
2,769
def _compute_iso_color ( self ) : level_color = [ ] colors = self . _lc for i , index in enumerate ( self . _li ) : level_color . append ( np . zeros ( ( index , 4 ) ) + colors [ i ] ) self . _cl = np . vstack ( level_color )
compute LineVisual color from level index and corresponding level color
2,770
def remove ( self ) : self . _multivol . deallocate ( self . id ) ARRAY_CACHE . pop ( self . id , None ) PIXEL_CACHE . pop ( self . id , None )
Remove the layer artist for good
2,771
def _inject ( ) : NS = globals ( ) GLNS = _GL . __dict__ used_names = [ ] used_names . extend ( [ names [ 0 ] for names in _pyopengl2 . _functions_to_import ] ) used_names . extend ( [ name for name in _pyopengl2 . _used_functions ] ) NS [ '_used_names' ] = used_names used_constants = set ( _constants . __dict__ ) injected_constants = 0 injected_functions = 0 for name in dir ( _GL ) : if name . startswith ( 'GL_' ) : if name not in used_constants : NS [ name ] = GLNS [ name ] injected_constants += 1 elif name . startswith ( 'gl' ) : if ( name + ',' ) in _deprecated_functions : pass elif name in used_names : pass else : NS [ name ] = GLNS [ name ] injected_functions += 1
Inject functions and constants from PyOpenGL but leave out the names that are deprecated or that we provide in our API .
2,772
def _find_module ( name , path = None ) : parts = name . split ( '.' ) for part in parts : if path is not None : path = [ path ] fh , path , descr = imp . find_module ( part , path ) if fh is not None and part != parts [ - 1 ] : fh . close ( ) return fh , path , descr
Alternative to imp . find_module that can also search in subpackages .
2,773
def triangulate ( vertices ) : n = len ( vertices ) vertices = np . asarray ( vertices ) zmean = vertices [ : , 2 ] . mean ( ) vertices_2d = vertices [ : , : 2 ] segments = np . repeat ( np . arange ( n + 1 ) , 2 ) [ 1 : - 1 ] segments [ - 2 : ] = n - 1 , 0 if _TRIANGLE_AVAILABLE : vertices_2d , triangles = _triangulate_cpp ( vertices_2d , segments ) else : vertices_2d , triangles = _triangulate_python ( vertices_2d , segments ) vertices = np . empty ( ( len ( vertices_2d ) , 3 ) ) vertices [ : , : 2 ] = vertices_2d vertices [ : , 2 ] = zmean return vertices , triangles
Triangulate a set of vertices
2,774
def triangulate ( self ) : self . _initialize ( ) pts = self . pts front = self . _front for i in range ( 3 , pts . shape [ 0 ] ) : pi = pts [ i ] l = 0 while pts [ front [ l + 1 ] , 0 ] <= pi [ 0 ] : l += 1 pl = pts [ front [ l ] ] if pi [ 0 ] > pl [ 0 ] : self . _add_tri ( front [ l ] , front [ l + 1 ] , i ) front . insert ( l + 1 , i ) else : self . _add_tri ( front [ l ] , front [ l + 1 ] , i ) self . _add_tri ( front [ l - 1 ] , front [ l ] , i ) front [ l ] = i for direction in - 1 , 1 : while True : ind0 = front . index ( i ) ind1 = ind0 + direction ind2 = ind1 + direction if ind2 < 0 or ind2 >= len ( front ) : break p1 = pts [ front [ ind1 ] ] p2 = pts [ front [ ind2 ] ] err = np . geterr ( ) np . seterr ( invalid = 'ignore' ) try : angle = np . arccos ( self . _cosine ( pi , p1 , p2 ) ) finally : np . seterr ( ** err ) if angle > np . pi / 2. or np . isnan ( angle ) : break assert ( i != front [ ind1 ] and front [ ind1 ] != front [ ind2 ] and front [ ind2 ] != i ) self . _add_tri ( i , front [ ind1 ] , front [ ind2 ] , source = 'smooth1' ) front . pop ( ind1 ) if i in self . _tops : for j in self . _bottoms [ self . _tops == i ] : self . _edge_event ( i , j ) front = self . _front self . _finalize ( ) self . tris = np . array ( list ( self . tris . keys ( ) ) , dtype = int )
Do the triangulation
2,775
def _edge_opposite_point ( self , tri , i ) : ind = tri . index ( i ) return ( tri [ ( ind + 1 ) % 3 ] , tri [ ( ind + 2 ) % 3 ] )
Given a triangle return the edge that is opposite point i . Vertexes are returned in the same orientation as in tri .
2,776
def _find_edge_intersections ( self ) : edges = self . pts [ self . edges ] cuts = { } for i in range ( edges . shape [ 0 ] - 1 ) : int1 = self . _intersect_edge_arrays ( edges [ i : i + 1 ] , edges [ i + 1 : ] ) int2 = self . _intersect_edge_arrays ( edges [ i + 1 : ] , edges [ i : i + 1 ] ) err = np . geterr ( ) np . seterr ( divide = 'ignore' , invalid = 'ignore' ) try : mask1 = ( int1 >= 0 ) & ( int1 <= 1 ) mask2 = ( int2 >= 0 ) & ( int2 <= 1 ) mask3 = mask1 & mask2 finally : np . seterr ( ** err ) inds = np . argwhere ( mask3 ) [ : , 0 ] if len ( inds ) == 0 : continue h = int2 [ inds ] [ : , np . newaxis ] pts = ( edges [ i , 0 ] [ np . newaxis , : ] * ( 1.0 - h ) + edges [ i , 1 ] [ np . newaxis , : ] * h ) edge_cuts = cuts . setdefault ( i , [ ] ) for j , ind in enumerate ( inds ) : if 0 < int2 [ ind ] < 1 : edge_cuts . append ( ( int2 [ ind ] , pts [ j ] ) ) if 0 < int1 [ ind ] < 1 : other_cuts = cuts . setdefault ( ind + i + 1 , [ ] ) other_cuts . append ( ( int1 [ ind ] , pts [ j ] ) ) for k , v in cuts . items ( ) : v . sort ( key = lambda x : x [ 0 ] ) for i in range ( len ( v ) - 2 , - 1 , - 1 ) : if v [ i ] [ 0 ] == v [ i + 1 ] [ 0 ] : v . pop ( i + 1 ) return cuts
Return a dictionary containing for each edge in self . edges a list of the positions at which the edge should be split .
2,777
def load_ipython_extension ( ipython ) : import IPython ipy_version = LooseVersion ( IPython . __version__ ) if ipy_version < LooseVersion ( "3.0.0" ) : ipython . write_err ( "Your IPython version is older than " "version 3.0.0, the minimum for Vispy's" "IPython backend. Please upgrade your IPython" "version." ) return _load_webgl_backend ( ipython )
Entry point of the IPython extension
2,778
def _load_webgl_backend ( ipython ) : from . . import app app_instance = app . use_app ( "ipynb_webgl" ) if app_instance . backend_name == "ipynb_webgl" : ipython . write ( "Vispy IPython module has loaded successfully" ) else : ipython . write_err ( "Unable to load webgl backend of Vispy" )
Load the webgl backend for the IPython notebook
2,779
def scale ( s , dtype = None ) : assert len ( s ) == 3 return np . array ( np . diag ( np . concatenate ( [ s , ( 1. , ) ] ) ) , dtype )
Non - uniform scaling along the x y and z axes
2,780
def rotate ( angle , axis , dtype = None ) : angle = np . radians ( angle ) assert len ( axis ) == 3 x , y , z = axis / np . linalg . norm ( axis ) c , s = math . cos ( angle ) , math . sin ( angle ) cx , cy , cz = ( 1 - c ) * x , ( 1 - c ) * y , ( 1 - c ) * z M = np . array ( [ [ cx * x + c , cy * x - z * s , cz * x + y * s , .0 ] , [ cx * y + z * s , cy * y + c , cz * y - x * s , 0. ] , [ cx * z - y * s , cy * z + x * s , cz * z + c , 0. ] , [ 0. , 0. , 0. , 1. ] ] , dtype ) . T return M
The 3x3 rotation matrix for rotation about a vector .
2,781
def perspective ( fovy , aspect , znear , zfar ) : assert ( znear != zfar ) h = math . tan ( fovy / 360.0 * math . pi ) * znear w = h * aspect return frustum ( - w , w , - h , h , znear , zfar )
Create perspective projection matrix
2,782
def affine_map ( points1 , points2 ) : A = np . ones ( ( 4 , 4 ) ) A [ : , : 3 ] = points1 B = np . ones ( ( 4 , 4 ) ) B [ : , : 3 ] = points2 matrix = np . eye ( 4 ) for i in range ( 3 ) : matrix [ i ] = np . linalg . solve ( A , B [ : , i ] ) return matrix
Find a 3D transformation matrix that maps points1 onto points2 .
2,783
def finish ( self , msg = None ) : if self . _finished or self . disable : return self . _finished = True if msg is not None : self ( msg ) self . _new_msg ( "< Exiting %s, total time: %0.4f ms" , self . _name , ( ptime . time ( ) - self . _firstTime ) * 1000 ) type ( self ) . _depth -= 1 if self . _depth < 1 : self . flush ( )
Add a final message ; flush the message list if no parent profiler .
2,784
def _init ( ) : global config , _data_path , _allowed_config_keys app_dir = _get_vispy_app_dir ( ) if app_dir is not None : _data_path = op . join ( app_dir , 'data' ) _test_data_path = op . join ( app_dir , 'test_data' ) else : _data_path = _test_data_path = None _allowed_config_keys = { 'data_path' : string_types , 'default_backend' : string_types , 'gl_backend' : string_types , 'gl_debug' : ( bool , ) , 'glir_file' : string_types + file_types , 'include_path' : list , 'logging_level' : string_types , 'qt_lib' : string_types , 'dpi' : ( int , type ( None ) ) , 'profile' : string_types + ( type ( None ) , ) , 'audit_tests' : ( bool , ) , 'test_data_path' : string_types + ( type ( None ) , ) , } default_config_options = { 'data_path' : _data_path , 'default_backend' : '' , 'gl_backend' : 'gl2' , 'gl_debug' : False , 'glir_file' : '' , 'include_path' : [ ] , 'logging_level' : 'info' , 'qt_lib' : 'any' , 'dpi' : None , 'profile' : None , 'audit_tests' : False , 'test_data_path' : _test_data_path , } config = Config ( ** default_config_options ) try : config . update ( ** _load_config ( ) ) except Exception as err : raise Exception ( 'Error while reading vispy config file "%s":\n %s' % ( _get_config_fname ( ) , err . message ) ) set_log_level ( config [ 'logging_level' ] ) _parse_command_line_arguments ( )
Create global Config object parse command flags
2,785
def _parse_command_line_arguments ( ) : global config argnames = [ 'vispy-backend=' , 'vispy-gl-debug' , 'vispy-glir-file=' , 'vispy-log=' , 'vispy-help' , 'vispy-profile=' , 'vispy-cprofile' , 'vispy-dpi=' , 'vispy-audit-tests' ] try : opts , args = getopt . getopt ( sys . argv [ 1 : ] , '' , argnames ) except getopt . GetoptError : opts = [ ] for o , a in opts : if o . startswith ( '--vispy' ) : if o == '--vispy-backend' : config [ 'default_backend' ] = a logger . info ( 'vispy backend: %s' , a ) elif o == '--vispy-gl-debug' : config [ 'gl_debug' ] = True elif o == '--vispy-glir-file' : config [ 'glir_file' ] = a elif o == '--vispy-log' : if ',' in a : verbose , match = a . split ( ',' ) else : verbose = a match = None config [ 'logging_level' ] = a set_log_level ( verbose , match ) elif o == '--vispy-profile' : config [ 'profile' ] = a elif o == '--vispy-cprofile' : _enable_profiling ( ) elif o == '--vispy-help' : print ( VISPY_HELP ) elif o == '--vispy-dpi' : config [ 'dpi' ] = int ( a ) elif o == '--vispy-audit-tests' : config [ 'audit_tests' ] = True else : logger . warning ( "Unsupported vispy flag: %s" % o )
Transform vispy specific command line args to vispy config . Put into a function so that any variables dont leak in the vispy namespace .
2,786
def _get_vispy_app_dir ( ) : user_dir = os . path . expanduser ( '~' ) path = None if sys . platform . startswith ( 'win' ) : path1 , path2 = os . getenv ( 'LOCALAPPDATA' ) , os . getenv ( 'APPDATA' ) path = path1 or path2 elif sys . platform . startswith ( 'darwin' ) : path = os . path . join ( user_dir , 'Library' , 'Application Support' ) if not ( path and os . path . isdir ( path ) ) : path = user_dir prefix = sys . prefix if getattr ( sys , 'frozen' , None ) : prefix = os . path . abspath ( os . path . dirname ( sys . path [ 0 ] ) ) for reldir in ( 'settings' , '../settings' ) : localpath = os . path . abspath ( os . path . join ( prefix , reldir ) ) if os . path . isdir ( localpath ) : try : open ( os . path . join ( localpath , 'test.write' ) , 'wb' ) . close ( ) os . remove ( os . path . join ( localpath , 'test.write' ) ) except IOError : pass else : path = localpath break appname = '.vispy' if path == user_dir else 'vispy' path = os . path . join ( path , appname ) return path
Helper to get the default directory for storing vispy data
2,787
def _get_config_fname ( ) : directory = _get_vispy_app_dir ( ) if directory is None : return None fname = op . join ( directory , 'vispy.json' ) if os . environ . get ( '_VISPY_CONFIG_TESTING' , None ) is not None : fname = op . join ( _TempDir ( ) , 'vispy.json' ) return fname
Helper for the vispy config file
2,788
def save_config ( ** kwargs ) : if kwargs == { } : kwargs = config . _config current_config = _load_config ( ) current_config . update ( ** kwargs ) fname = _get_config_fname ( ) if fname is None : raise RuntimeError ( 'config filename could not be determined' ) if not op . isdir ( op . dirname ( fname ) ) : os . mkdir ( op . dirname ( fname ) ) with open ( fname , 'w' ) as fid : json . dump ( current_config , fid , sort_keys = True , indent = 0 )
Save configuration keys to vispy config file
2,789
def set_data_dir ( directory = None , create = False , save = False ) : if directory is None : directory = _data_path if _data_path is None : raise IOError ( 'default path cannot be determined, please ' 'set it manually (directory != None)' ) if not op . isdir ( directory ) : if not create : raise IOError ( 'directory "%s" does not exist, perhaps try ' 'create=True to create it?' % directory ) os . mkdir ( directory ) config . update ( data_path = directory ) if save : save_config ( data_path = directory )
Set vispy data download directory
2,790
def _enable_profiling ( ) : import cProfile import atexit global _profiler _profiler = cProfile . Profile ( ) _profiler . enable ( ) atexit . register ( _profile_atexit )
Start profiling and register callback to print stats when the program exits .
2,791
def sys_info ( fname = None , overwrite = False ) : if fname is not None and op . isfile ( fname ) and not overwrite : raise IOError ( 'file exists, use overwrite=True to overwrite' ) out = '' try : from . . app import use_app , Canvas from . . app . backends import BACKEND_NAMES from . . gloo import gl from . . testing import has_backend with use_log_level ( 'warning' ) : app = use_app ( call_reuse = False ) out += 'Platform: %s\n' % platform . platform ( ) out += 'Python: %s\n' % str ( sys . version ) . replace ( '\n' , ' ' ) out += 'Backend: %s\n' % app . backend_name for backend in BACKEND_NAMES : if backend . startswith ( 'ipynb_' ) : continue with use_log_level ( 'warning' , print_msg = False ) : which = has_backend ( backend , out = [ 'which' ] ) [ 1 ] out += '{0:<9} {1}\n' . format ( backend + ':' , which ) out += '\n' canvas = Canvas ( 'Test' , ( 10 , 10 ) , show = False , app = app ) canvas . _backend . _vispy_set_current ( ) out += 'GL version: %r\n' % ( gl . glGetParameter ( gl . GL_VERSION ) , ) x_ = gl . GL_MAX_TEXTURE_SIZE out += 'MAX_TEXTURE_SIZE: %r\n' % ( gl . glGetParameter ( x_ ) , ) out += 'Extensions: %r\n' % ( gl . glGetParameter ( gl . GL_EXTENSIONS ) , ) canvas . close ( ) except Exception : out += '\nInfo-gathering error:\n%s' % traceback . format_exc ( ) pass if fname is not None : with open ( fname , 'w' ) as fid : fid . write ( out ) return out
Get relevant system and debugging information
2,792
def compact ( vertices , indices , tolerance = 1e-3 ) : n = len ( vertices ) V = np . zeros ( n , dtype = [ ( "pos" , np . float32 , 3 ) ] ) V [ "pos" ] [ : , 0 ] = vertices [ : , 0 ] V [ "pos" ] [ : , 1 ] = vertices [ : , 1 ] V [ "pos" ] [ : , 2 ] = vertices [ : , 2 ] epsilon = 1e-3 decimals = int ( np . log ( epsilon ) / np . log ( 1 / 10. ) ) V_ = np . zeros_like ( V ) X = V [ "pos" ] [ : , 0 ] . round ( decimals = decimals ) X [ np . where ( abs ( X ) < epsilon ) ] = 0 V_ [ "pos" ] [ : , 0 ] = X Y = V [ "pos" ] [ : , 1 ] . round ( decimals = decimals ) Y [ np . where ( abs ( Y ) < epsilon ) ] = 0 V_ [ "pos" ] [ : , 1 ] = Y Z = V [ "pos" ] [ : , 2 ] . round ( decimals = decimals ) Z [ np . where ( abs ( Z ) < epsilon ) ] = 0 V_ [ "pos" ] [ : , 2 ] = Z U , RI = np . unique ( V_ , return_inverse = True ) indices = indices . ravel ( ) I_ = indices . copy ( ) . ravel ( ) for i in range ( len ( indices ) ) : I_ [ i ] = RI [ indices [ i ] ] I_ = I_ . reshape ( len ( indices ) / 3 , 3 ) return U . view ( np . float32 ) . reshape ( len ( U ) , 3 ) , I_ , RI
Compact vertices and indices within given tolerance
2,793
def normals ( vertices , indices ) : vertices , indices , mapping = compact ( vertices , indices ) T = vertices [ indices ] N = np . cross ( T [ : , 1 ] - T [ : , 0 ] , T [ : , 2 ] - T [ : , 0 ] ) L = np . sqrt ( np . sum ( N * N , axis = 1 ) ) L [ L == 0 ] = 1.0 N /= L [ : , np . newaxis ] normals = np . zeros_like ( vertices ) normals [ indices [ : , 0 ] ] += N normals [ indices [ : , 1 ] ] += N normals [ indices [ : , 2 ] ] += N L = np . sqrt ( np . sum ( normals * normals , axis = 1 ) ) L [ L == 0 ] = 1.0 normals /= L [ : , np . newaxis ] return normals [ mapping ]
Compute normals over a triangulated surface
2,794
def create_native ( self ) : if self . _backend is not None : return assert self . _app . native self . _app . backend_module . CanvasBackend ( self , ** self . _backend_kwargs ) self . _backend_kwargs = None self . events . draw . connect ( self . context . flush_commands , position = 'last' ) if self . _autoswap : self . events . draw . connect ( ( self , 'swap_buffers' ) , ref = True , position = 'last' )
Create the native widget if not already done so . If the widget is already created this function does nothing .
2,795
def connect ( self , fun ) : name = fun . __name__ if not name . startswith ( 'on_' ) : raise ValueError ( 'When connecting a function based on its name, ' 'the name should start with "on_"' ) eventname = name [ 3 : ] try : emitter = self . events [ eventname ] except KeyError : raise ValueError ( 'Event "%s" not available on this canvas.' % eventname ) emitter . connect ( fun )
Connect a function to an event
2,796
def show ( self , visible = True , run = False ) : self . _backend . _vispy_set_visible ( visible ) if run : self . app . run ( )
Show or hide the canvas
2,797
def close ( self ) : if self . _backend is not None and not self . _closed : self . _closed = True self . events . close ( ) self . _backend . _vispy_close ( ) forget_canvas ( self )
Close the canvas
2,798
def _update_fps ( self , event ) : self . _frame_count += 1 diff = time ( ) - self . _basetime if ( diff > self . _fps_window ) : self . _fps = self . _frame_count / diff self . _basetime = time ( ) self . _frame_count = 0 self . _fps_callback ( self . fps )
Update the fps after every window
2,799
def measure_fps ( self , window = 1 , callback = '%1.1f FPS' ) : self . events . draw . disconnect ( self . _update_fps ) if callback : if isinstance ( callback , string_types ) : callback_str = callback def callback ( x ) : print ( callback_str % x ) self . _fps_window = window self . events . draw . connect ( self . _update_fps ) self . _fps_callback = callback else : self . _fps_callback = None
Measure the current FPS