idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
1,200 |
def _init_draw ( self ) : if self . original is not None : self . original . set_data ( np . random . random ( ( 10 , 10 , 3 ) ) ) self . processed . set_data ( np . random . random ( ( 10 , 10 , 3 ) ) )
|
Initializes the drawing of the frames by setting the images to random colors .
|
1,201 |
def read_frame ( self ) : ret , frame = self . capture . read ( ) if not ret : self . event_source . stop ( ) try : self . capture . release ( ) except AttributeError : pass return None if self . convert_color != - 1 and is_color_image ( frame ) : return cv2 . cvtColor ( frame , self . convert_color ) return frame
|
Reads a frame and converts the color if needed .
|
1,202 |
def annotate ( self , framedata ) : for artist in self . annotation_artists : artist . remove ( ) self . annotation_artists = [ ] for annotation in self . annotations : if annotation [ 2 ] > framedata : return if annotation [ 2 ] == framedata : pos = annotation [ 0 : 2 ] shape = self . annotations_default [ 'shape' ] color = self . annotations_default [ 'color' ] size = self . annotations_default [ 'size' ] line = self . annotations_default [ 'line' ] if len ( annotation ) > 3 : shape = annotation [ 3 ] . get ( 'shape' , shape ) color = annotation [ 3 ] . get ( 'color' , color ) size = annotation [ 3 ] . get ( 'size' , size ) line = annotation [ 3 ] . get ( 'line' , line ) if shape == 'CIRC' and hasattr ( size , '__len__' ) : size = 30 if not hasattr ( color , '__len__' ) : color = ( color , ) * 3 if shape == 'RECT' : patch = patches . Rectangle ( ( pos [ 0 ] - size [ 0 ] // 2 , pos [ 1 ] - size [ 1 ] // 2 ) , size [ 0 ] , size [ 1 ] , fill = False , lw = line , fc = 'none' , ec = color ) elif shape == 'CIRC' : patch = patches . CirclePolygon ( pos , radius = size , fc = 'none' , ec = color , lw = line ) self . annotation_artists . append ( patch ) self . axes_processed . add_artist ( self . annotation_artists [ - 1 ] )
|
Annotates the processed axis with given annotations for the provided framedata .
|
1,203 |
def _draw_frame ( self , framedata ) : original = self . read_frame ( ) if original is None : self . update_info ( self . info_string ( message = 'Finished.' , frame = framedata ) ) return if self . original is not None : processed = self . process_frame ( original . copy ( ) ) if self . cmap_original is not None : original = to_gray ( original ) elif not is_color_image ( original ) : self . original . set_cmap ( 'gray' ) self . original . set_data ( original ) else : processed = self . process_frame ( original ) if self . cmap_processed is not None : processed = to_gray ( processed ) elif not is_color_image ( processed ) : self . processed . set_cmap ( 'gray' ) if self . annotations : self . annotate ( framedata ) self . processed . set_data ( processed ) self . update_info ( self . info_string ( frame = framedata ) )
|
Reads processes and draws the frames .
|
1,204 |
def update_info ( self , custom = None ) : self . figure . suptitle ( self . info_string ( ) if custom is None else custom )
|
Updates the figure s suptitle .
|
1,205 |
def info_string ( self , size = None , message = '' , frame = - 1 ) : info = [ ] if size is not None : info . append ( 'Size: {1}x{0}' . format ( * size ) ) elif self . size is not None : info . append ( 'Size: {1}x{0}' . format ( * self . size ) ) if frame >= 0 : info . append ( 'Frame: {}' . format ( frame ) ) if message != '' : info . append ( '{}' . format ( message ) ) return ' ' . join ( info )
|
Returns information about the stream .
|
1,206 |
def _slice_required_len ( slice_obj ) : if slice_obj . step and slice_obj . step != 1 : return None if slice_obj . start is None and slice_obj . stop is None : return None if slice_obj . start and slice_obj . start < 0 : return None if slice_obj . stop and slice_obj . stop < 0 : return None if slice_obj . stop : if slice_obj . start and slice_obj . start > slice_obj . stop : return 0 return slice_obj . stop return slice_obj . start + 1
|
Calculate how many items must be in the collection to satisfy this slice
|
1,207 |
def stylize ( text , styles , reset = True ) : terminator = attr ( "reset" ) if reset else "" return "{}{}{}" . format ( "" . join ( styles ) , text , terminator )
|
conveniently styles your text as and resets ANSI codes at its end .
|
1,208 |
def attribute ( self ) : paint = { "bold" : self . ESC + "1" + self . END , 1 : self . ESC + "1" + self . END , "dim" : self . ESC + "2" + self . END , 2 : self . ESC + "2" + self . END , "underlined" : self . ESC + "4" + self . END , 4 : self . ESC + "4" + self . END , "blink" : self . ESC + "5" + self . END , 5 : self . ESC + "5" + self . END , "reverse" : self . ESC + "7" + self . END , 7 : self . ESC + "7" + self . END , "hidden" : self . ESC + "8" + self . END , 8 : self . ESC + "8" + self . END , "reset" : self . ESC + "0" + self . END , 0 : self . ESC + "0" + self . END , "res_bold" : self . ESC + "21" + self . END , 21 : self . ESC + "21" + self . END , "res_dim" : self . ESC + "22" + self . END , 22 : self . ESC + "22" + self . END , "res_underlined" : self . ESC + "24" + self . END , 24 : self . ESC + "24" + self . END , "res_blink" : self . ESC + "25" + self . END , 25 : self . ESC + "25" + self . END , "res_reverse" : self . ESC + "27" + self . END , 27 : self . ESC + "27" + self . END , "res_hidden" : self . ESC + "28" + self . END , 28 : self . ESC + "28" + self . END , } return paint [ self . color ]
|
Set or reset attributes
|
1,209 |
def foreground ( self ) : code = self . ESC + "38;5;" if str ( self . color ) . isdigit ( ) : self . reverse_dict ( ) color = self . reserve_paint [ str ( self . color ) ] return code + self . paint [ color ] + self . END elif self . color . startswith ( "#" ) : return code + str ( self . HEX ) + self . END else : return code + self . paint [ self . color ] + self . END
|
Print 256 foreground colors
|
1,210 |
def reset ( self , required = False ) : reset = self . _ow . reset ( ) if required and reset : raise OneWireError ( "No presence pulse found. Check devices and wiring." ) return not reset
|
Perform a reset and check for presence pulse .
|
1,211 |
def scan ( self ) : devices = [ ] diff = 65 rom = False count = 0 for _ in range ( 0xff ) : rom , diff = self . _search_rom ( rom , diff ) if rom : count += 1 if count > self . maximum_devices : raise RuntimeError ( "Maximum device count of {} exceeded." . format ( self . maximum_devices ) ) devices . append ( OneWireAddress ( rom ) ) if diff == 0 : break return devices
|
Scan for devices on the bus and return a list of addresses .
|
1,212 |
def crc8 ( data ) : crc = 0 for byte in data : crc ^= byte for _ in range ( 8 ) : if crc & 0x01 : crc = ( crc >> 1 ) ^ 0x8C else : crc >>= 1 crc &= 0xFF return crc
|
Perform the 1 - Wire CRC check on the provided data .
|
1,213 |
def preferences_class_prepared ( sender , * args , ** kwargs ) : cls = sender if issubclass ( cls , Preferences ) : cls . add_to_class ( 'singleton' , SingletonManager ( ) ) setattr ( preferences . Preferences , cls . _meta . object_name , property ( lambda x : cls . singleton . get ( ) ) )
|
Adds various preferences members to preferences . preferences thus enabling easy access from code .
|
1,214 |
def site_cleanup ( sender , action , instance , ** kwargs ) : if action == 'post_add' : if isinstance ( instance , Preferences ) and hasattr ( instance . __class__ , 'objects' ) : site_conflicts = instance . __class__ . objects . filter ( sites__in = instance . sites . all ( ) ) . only ( 'id' ) . distinct ( ) for conflict in site_conflicts : if conflict . id != instance . id : for site in instance . sites . all ( ) : conflict . sites . remove ( site )
|
Make sure there is only a single preferences object per site . So remove sites from pre - existing preferences objects .
|
1,215 |
def get_queryset ( self ) : queryset = super ( SingletonManager , self ) . get_queryset ( ) current_site = None if getattr ( settings , 'SITE_ID' , None ) is not None : current_site = Site . objects . get_current ( ) if current_site is not None : queryset = queryset . filter ( sites = settings . SITE_ID ) if not queryset . exists ( ) : obj = self . model . objects . create ( ) if current_site is not None : obj . sites . add ( current_site ) return queryset
|
Return the first preferences object for the current site . If preferences do not exist create it .
|
1,216 |
def load_iterable ( self , iterable , session = None ) : data = [ ] load = self . loads for v in iterable : data . append ( load ( v ) ) return data
|
Load an iterable .
|
1,217 |
def _search ( self , words , include = None , exclude = None , lookup = None ) : lookup = lookup or 'contains' query = self . router . worditem . query ( ) if include : query = query . filter ( model_type__in = include ) if exclude : query = query . exclude ( model_type__in = include ) if not words : return [ query ] qs = [ ] if lookup == 'in' : qs . append ( query . filter ( word__in = words ) ) elif lookup == 'contains' : for word in words : qs . append ( query . filter ( word = word ) ) else : raise ValueError ( 'Unknown lookup "{0}"' . format ( lookup ) ) return qs
|
Full text search . Return a list of queries to intersect .
|
1,218 |
def redis_client ( address = None , connection_pool = None , timeout = None , parser = None , ** kwargs ) : if not connection_pool : if timeout == 0 : if not async : raise ImportError ( 'Asynchronous connection requires async ' 'bindings installed.' ) return async . pool . redis ( address , ** kwargs ) else : kwargs [ 'socket_timeout' ] = timeout return Redis ( address [ 0 ] , address [ 1 ] , ** kwargs ) else : return Redis ( connection_pool = connection_pool )
|
Get a new redis client .
|
1,219 |
def dict_flat_generator ( value , attname = None , splitter = JSPLITTER , dumps = None , prefix = None , error = ValueError , recursive = True ) : if not isinstance ( value , dict ) or not recursive : if not prefix : raise error ( 'Cannot assign a non dictionary to a JSON field' ) else : name = '%s%s%s' % ( attname , splitter , prefix ) if attname else prefix yield name , dumps ( value ) if dumps else value else : for field in value : val = value [ field ] key = prefix if field : key = '%s%s%s' % ( prefix , splitter , field ) if prefix else field for k , v2 in dict_flat_generator ( val , attname , splitter , dumps , key , error , field ) : yield k , v2
|
Convert a nested dictionary into a flat dictionary representation
|
1,220 |
def addmul_number_dicts ( series ) : if not series : return vtype = value_type ( ( s [ 1 ] for s in series ) ) if vtype == 1 : return sum ( ( weight * float ( d ) for weight , d in series ) ) elif vtype == 3 : keys = set ( series [ 0 ] [ 1 ] ) for serie in series [ 1 : ] : keys . intersection_update ( serie [ 1 ] ) results = { } for key in keys : key_series = tuple ( ( weight , d [ key ] ) for weight , d in series ) result = addmul_number_dicts ( key_series ) if result is not None : results [ key ] = result return results
|
Multiply dictionaries by a numeric values and add them together .
|
1,221 |
def Download ( campaign = 0 , queue = 'build' , email = None , walltime = 8 , ** kwargs ) : if type ( campaign ) is int : subcampaign = - 1 elif type ( campaign ) is float : x , y = divmod ( campaign , 1 ) campaign = int ( x ) subcampaign = round ( y * 10 ) pbsfile = os . path . join ( EVEREST_SRC , 'missions' , 'k2' , 'download.pbs' ) str_w = 'walltime=%d:00:00' % walltime str_v = 'EVEREST_DAT=%s,CAMPAIGN=%d,SUBCAMPAIGN=%d' % ( EVEREST_DAT , campaign , subcampaign ) if subcampaign == - 1 : str_name = 'download_c%02d' % campaign else : str_name = 'download_c%02d.%d' % ( campaign , subcampaign ) str_out = os . path . join ( EVEREST_DAT , 'k2' , str_name + '.log' ) qsub_args = [ 'qsub' , pbsfile , '-q' , queue , '-v' , str_v , '-o' , str_out , '-j' , 'oe' , '-N' , str_name , '-l' , str_w ] if email is not None : qsub_args . append ( [ '-M' , email , '-m' , 'ae' ] ) print ( "Submitting the job..." ) subprocess . call ( qsub_args )
|
Submits a cluster job to the build queue to download all TPFs for a given campaign .
|
1,222 |
def Run ( campaign = 0 , EPIC = None , nodes = 5 , ppn = 12 , walltime = 100 , mpn = None , email = None , queue = None , ** kwargs ) : if type ( campaign ) is int : subcampaign = - 1 elif type ( campaign ) is float : x , y = divmod ( campaign , 1 ) campaign = int ( x ) subcampaign = round ( y * 10 ) if EVEREST_DEV and ( queue == 'bf' ) : walltime = min ( 10 , walltime ) try : strkwargs = pickle . dumps ( kwargs , 0 ) . decode ( 'utf-8' ) . replace ( '\n' , '%%%' ) except UnicodeDecodeError : raise ValueError ( 'Unable to pickle `kwargs`. Currently the `kwargs` ' + 'values may only be `int`s, `float`s, `string`s, ' + '`bool`s, or lists of these.' ) pbsfile = os . path . join ( EVEREST_SRC , 'missions' , 'k2' , 'run.pbs' ) if mpn is not None : str_n = 'nodes=%d:ppn=%d,feature=%dcore,mem=%dgb' % ( nodes , ppn , ppn , mpn * nodes ) else : str_n = 'nodes=%d:ppn=%d,feature=%dcore' % ( nodes , ppn , ppn ) str_w = 'walltime=%d:00:00' % walltime str_v = "EVEREST_DAT=%s,NODES=%d," % ( EVEREST_DAT , nodes ) + "EPIC=%d," % ( 0 if EPIC is None else EPIC ) + "CAMPAIGN=%d,SUBCAMPAIGN=%d,STRKWARGS='%s'" % ( campaign , subcampaign , strkwargs ) if EPIC is None : if subcampaign == - 1 : str_name = 'c%02d' % campaign else : str_name = 'c%02d.%d' % ( campaign , subcampaign ) else : str_name = 'EPIC%d' % EPIC str_out = os . path . join ( EVEREST_DAT , 'k2' , str_name + '.log' ) qsub_args = [ 'qsub' , pbsfile , '-v' , str_v , '-o' , str_out , '-j' , 'oe' , '-N' , str_name , '-l' , str_n , '-l' , str_w ] if email is not None : qsub_args . append ( [ '-M' , email , '-m' , 'ae' ] ) if queue is not None : qsub_args += [ '-q' , queue ] print ( "Submitting the job..." ) subprocess . call ( qsub_args )
|
Submits a cluster job to compute and plot data for all targets in a given campaign .
|
1,223 |
def PrimaryHDU ( model ) : cards = model . _mission . HDUCards ( model . meta , hdu = 0 ) if 'KEPMAG' not in [ c [ 0 ] for c in cards ] : cards . append ( ( 'KEPMAG' , model . mag , 'Kepler magnitude' ) ) cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'COMMENT' , '* EVEREST INFO *' ) ) cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'MISSION' , model . mission , 'Mission name' ) ) cards . append ( ( 'VERSION' , EVEREST_MAJOR_MINOR , 'EVEREST pipeline version' ) ) cards . append ( ( 'SUBVER' , EVEREST_VERSION , 'EVEREST pipeline subversion' ) ) cards . append ( ( 'DATE' , strftime ( '%Y-%m-%d' ) , 'EVEREST file creation date (YYYY-MM-DD)' ) ) header = pyfits . Header ( cards = cards ) hdu = pyfits . PrimaryHDU ( header = header ) return hdu
|
Construct the primary HDU file containing basic header info .
|
1,224 |
def PixelsHDU ( model ) : cards = model . _mission . HDUCards ( model . meta , hdu = 2 ) cards = [ ] cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'COMMENT' , '* EVEREST INFO *' ) ) cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'MISSION' , model . mission , 'Mission name' ) ) cards . append ( ( 'VERSION' , EVEREST_MAJOR_MINOR , 'EVEREST pipeline version' ) ) cards . append ( ( 'SUBVER' , EVEREST_VERSION , 'EVEREST pipeline subversion' ) ) cards . append ( ( 'DATE' , strftime ( '%Y-%m-%d' ) , 'EVEREST file creation date (YYYY-MM-DD)' ) ) header = pyfits . Header ( cards = cards ) arrays = [ pyfits . Column ( name = 'FPIX' , format = '%dD' % model . fpix . shape [ 1 ] , array = model . fpix ) ] X1N = model . X1N if X1N is not None : arrays . append ( pyfits . Column ( name = 'X1N' , format = '%dD' % X1N . shape [ 1 ] , array = X1N ) ) cols = pyfits . ColDefs ( arrays ) hdu = pyfits . BinTableHDU . from_columns ( cols , header = header , name = 'PIXELS' ) return hdu
|
Construct the HDU containing the pixel - level light curve .
|
1,225 |
def ApertureHDU ( model ) : cards = model . _mission . HDUCards ( model . meta , hdu = 3 ) cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'COMMENT' , '* EVEREST INFO *' ) ) cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'MISSION' , model . mission , 'Mission name' ) ) cards . append ( ( 'VERSION' , EVEREST_MAJOR_MINOR , 'EVEREST pipeline version' ) ) cards . append ( ( 'SUBVER' , EVEREST_VERSION , 'EVEREST pipeline subversion' ) ) cards . append ( ( 'DATE' , strftime ( '%Y-%m-%d' ) , 'EVEREST file creation date (YYYY-MM-DD)' ) ) header = pyfits . Header ( cards = cards ) hdu = pyfits . ImageHDU ( data = model . aperture , header = header , name = 'APERTURE MASK' ) return hdu
|
Construct the HDU containing the aperture used to de - trend .
|
1,226 |
def ImagesHDU ( model ) : cards = model . _mission . HDUCards ( model . meta , hdu = 4 ) cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'COMMENT' , '* EVEREST INFO *' ) ) cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'MISSION' , model . mission , 'Mission name' ) ) cards . append ( ( 'VERSION' , EVEREST_MAJOR_MINOR , 'EVEREST pipeline version' ) ) cards . append ( ( 'SUBVER' , EVEREST_VERSION , 'EVEREST pipeline subversion' ) ) cards . append ( ( 'DATE' , strftime ( '%Y-%m-%d' ) , 'EVEREST file creation date (YYYY-MM-DD)' ) ) format = '%dD' % model . pixel_images [ 0 ] . shape [ 1 ] arrays = [ pyfits . Column ( name = 'STAMP1' , format = format , array = model . pixel_images [ 0 ] ) , pyfits . Column ( name = 'STAMP2' , format = format , array = model . pixel_images [ 1 ] ) , pyfits . Column ( name = 'STAMP3' , format = format , array = model . pixel_images [ 2 ] ) ] header = pyfits . Header ( cards = cards ) cols = pyfits . ColDefs ( arrays ) hdu = pyfits . BinTableHDU . from_columns ( cols , header = header , name = 'POSTAGE STAMPS' ) return hdu
|
Construct the HDU containing sample postage stamp images of the target .
|
1,227 |
def HiResHDU ( model ) : cards = model . _mission . HDUCards ( model . meta , hdu = 5 ) cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'COMMENT' , '* EVEREST INFO *' ) ) cards . append ( ( 'COMMENT' , '************************' ) ) cards . append ( ( 'MISSION' , model . mission , 'Mission name' ) ) cards . append ( ( 'VERSION' , EVEREST_MAJOR_MINOR , 'EVEREST pipeline version' ) ) cards . append ( ( 'SUBVER' , EVEREST_VERSION , 'EVEREST pipeline subversion' ) ) cards . append ( ( 'DATE' , strftime ( '%Y-%m-%d' ) , 'EVEREST file creation date (YYYY-MM-DD)' ) ) header = pyfits . Header ( cards = cards ) if model . hires is not None : hdu = pyfits . ImageHDU ( data = model . hires , header = header , name = 'HI RES IMAGE' ) else : hdu = pyfits . ImageHDU ( data = np . empty ( ( 0 , 0 ) , dtype = float ) , header = header , name = 'HI RES IMAGE' ) return hdu
|
Construct the HDU containing the hi res image of the target .
|
1,228 |
def MaskSolveSlow ( A , b , w = 5 , progress = True , niter = None ) : N = b . shape [ 0 ] if niter is None : niter = N - w + 1 X = np . empty ( ( niter , N - w ) ) for n in prange ( niter ) : mask = np . arange ( n , n + w ) An = np . delete ( np . delete ( A , mask , axis = 0 ) , mask , axis = 1 ) Un = cholesky ( An ) bn = np . delete ( b , mask ) X [ n ] = cho_solve ( ( Un , False ) , bn ) return X
|
Identical to MaskSolve but computes the solution the brute - force way .
|
1,229 |
def unmasked ( self , depth = 0.01 ) : return 1 - ( np . hstack ( self . _O2 ) + np . hstack ( self . _O3 ) / depth ) / np . hstack ( self . _O1 )
|
Return the unmasked overfitting metric for a given transit depth .
|
1,230 |
def show ( self ) : try : if platform . system ( ) . lower ( ) . startswith ( 'darwin' ) : subprocess . call ( [ 'open' , self . pdf ] ) elif os . name == 'nt' : os . startfile ( self . pdf ) elif os . name == 'posix' : subprocess . call ( [ 'xdg-open' , self . pdf ] ) else : raise IOError ( "" ) except IOError : log . info ( "Unable to open the pdf. Try opening it manually:" ) log . info ( self . pdf )
|
Show the overfitting PDF summary .
|
1,231 |
def season ( self ) : try : self . _season except AttributeError : self . _season = self . _mission . Season ( self . ID ) if hasattr ( self . _season , '__len__' ) : raise AttributeError ( "Please choose a campaign/season for this target: %s." % self . _season ) return self . _season
|
Return the current observing season .
|
1,232 |
def fcor ( self ) : if self . XCBV is None : return None else : return self . flux - self . _mission . FitCBVs ( self )
|
The CBV - corrected de - trended flux .
|
1,233 |
def plot_info ( self , dvs ) : axl , axc , axr = dvs . title ( ) axc . annotate ( "%s %d" % ( self . _mission . IDSTRING , self . ID ) , xy = ( 0.5 , 0.5 ) , xycoords = 'axes fraction' , ha = 'center' , va = 'center' , fontsize = 18 ) axc . annotate ( r"%.2f ppm $\rightarrow$ %.2f ppm" % ( self . cdppr , self . cdpp ) , xy = ( 0.5 , 0.2 ) , xycoords = 'axes fraction' , ha = 'center' , va = 'center' , fontsize = 8 , color = 'k' , fontstyle = 'italic' ) axl . annotate ( "%s %s%02d: %s" % ( self . mission . upper ( ) , self . _mission . SEASONCHAR , self . season , self . name ) , xy = ( 0.5 , 0.5 ) , xycoords = 'axes fraction' , ha = 'center' , va = 'center' , fontsize = 12 , color = 'k' ) axl . annotate ( self . aperture_name if len ( self . neighbors ) == 0 else "%s, %d neighbors" % ( self . aperture_name , len ( self . neighbors ) ) , xy = ( 0.5 , 0.2 ) , xycoords = 'axes fraction' , ha = 'center' , va = 'center' , fontsize = 8 , color = 'k' , fontstyle = 'italic' ) axr . annotate ( "%s %.3f" % ( self . _mission . MAGSTRING , self . mag ) , xy = ( 0.5 , 0.5 ) , xycoords = 'axes fraction' , ha = 'center' , va = 'center' , fontsize = 12 , color = 'k' ) if not np . isnan ( self . cdppg ) and self . cdppg > 0 : axr . annotate ( r"GP %.3f ppm" % ( self . cdppg ) , xy = ( 0.5 , 0.2 ) , xycoords = 'axes fraction' , ha = 'center' , va = 'center' , fontsize = 8 , color = 'k' , fontstyle = 'italic' )
|
Plots miscellaneous de - trending information on the data validation summary figure .
|
1,234 |
def compute ( self ) : if self . transit_model is not None : return self . compute_joint ( ) log . info ( 'Computing the model...' ) model = [ None for b in self . breakpoints ] for b , brkpt in enumerate ( self . breakpoints ) : m = self . get_masked_chunk ( b ) c = self . get_chunk ( b ) mK = GetCovariance ( self . kernel , self . kernel_params , self . time [ m ] , self . fraw_err [ m ] ) med = np . nanmedian ( self . fraw [ m ] ) f = self . fraw [ m ] - med A = np . zeros ( ( len ( m ) , len ( m ) ) ) B = np . zeros ( ( len ( c ) , len ( m ) ) ) for n in range ( self . pld_order ) : if ( self . lam_idx >= n ) and ( self . lam [ b ] [ n ] is not None ) : XM = self . X ( n , m ) XC = self . X ( n , c ) A += self . lam [ b ] [ n ] * np . dot ( XM , XM . T ) B += self . lam [ b ] [ n ] * np . dot ( XC , XM . T ) del XM , XC W = np . linalg . solve ( mK + A , f ) model [ b ] = np . dot ( B , W ) del A , B , W if len ( model ) > 1 : self . model = model [ 0 ] [ : - self . bpad ] for m in model [ 1 : - 1 ] : i = 1 while len ( self . model ) - i in self . mask : i += 1 offset = self . model [ - i ] - m [ self . bpad - i ] self . model = np . concatenate ( [ self . model , m [ self . bpad : - self . bpad ] + offset ] ) i = 1 while len ( self . model ) - i in self . mask : i += 1 offset = self . model [ - i ] - model [ - 1 ] [ self . bpad - i ] self . model = np . concatenate ( [ self . model , model [ - 1 ] [ self . bpad : ] + offset ] ) else : self . model = model [ 0 ] self . model -= np . nanmedian ( self . model ) self . cdpp_arr = self . get_cdpp_arr ( ) self . cdpp = self . get_cdpp ( ) self . _weights = None
|
Compute the model for the current value of lambda .
|
1,235 |
def apply_mask ( self , x = None ) : if x is None : return np . delete ( np . arange ( len ( self . time ) ) , self . mask ) else : return np . delete ( x , self . mask , axis = 0 )
|
Returns the outlier mask an array of indices corresponding to the non - outliers .
|
1,236 |
def get_cdpp ( self , flux = None ) : if flux is None : flux = self . flux return self . _mission . CDPP ( self . apply_mask ( flux ) , cadence = self . cadence )
|
Returns the scalar CDPP for the light curve .
|
1,237 |
def get ( ID , pipeline = 'everest2' , campaign = None ) : log . info ( 'Downloading %s light curve for %d...' % ( pipeline , ID ) ) if EVEREST_DEV : if pipeline . lower ( ) == 'everest2' or pipeline . lower ( ) == 'k2sff' : from . import Season , TargetDirectory , FITSFile if campaign is None : campaign = Season ( ID ) fits = os . path . join ( TargetDirectory ( ID , campaign ) , FITSFile ( ID , campaign ) ) newdir = os . path . join ( KPLR_ROOT , "data" , "everest" , str ( ID ) ) if not os . path . exists ( newdir ) : os . makedirs ( newdir ) if os . path . exists ( fits ) : shutil . copy ( fits , newdir ) if pipeline . lower ( ) == 'everest2' : s = k2plr . EVEREST ( ID , version = 2 , sci_campaign = campaign ) time = s . time flux = s . flux elif pipeline . lower ( ) == 'everest1' : s = k2plr . EVEREST ( ID , version = 1 , sci_campaign = campaign ) time = s . time flux = s . flux elif pipeline . lower ( ) == 'k2sff' : s = k2plr . K2SFF ( ID , sci_campaign = campaign ) time = s . time flux = s . fcor s = k2plr . EVEREST ( ID , version = 2 , sci_campaign = campaign ) flux *= np . nanmedian ( s . flux ) elif pipeline . lower ( ) == 'k2sc' : s = k2plr . K2SC ( ID , sci_campaign = campaign ) time = s . time flux = s . pdcflux elif pipeline . lower ( ) == 'raw' : s = k2plr . EVEREST ( ID , version = 2 , raw = True , sci_campaign = campaign ) time = s . time flux = s . flux else : raise ValueError ( 'Invalid pipeline: `%s`.' % pipeline ) return time , flux
|
Returns the time and flux for a given EPIC ID and a given pipeline name .
|
1,238 |
def plot ( ID , pipeline = 'everest2' , show = True , campaign = None ) : time , flux = get ( ID , pipeline = pipeline , campaign = campaign ) mask = np . where ( np . isnan ( flux ) ) [ 0 ] time = np . delete ( time , mask ) flux = np . delete ( flux , mask ) fig , ax = pl . subplots ( 1 , figsize = ( 10 , 4 ) ) fig . subplots_adjust ( bottom = 0.15 ) ax . plot ( time , flux , "k." , markersize = 3 , alpha = 0.5 ) N = int ( 0.995 * len ( flux ) ) hi , lo = flux [ np . argsort ( flux ) ] [ [ N , - N ] ] pad = ( hi - lo ) * 0.1 ylim = ( lo - pad , hi + pad ) ax . set_ylim ( ylim ) from . k2 import CDPP ax . annotate ( '%.2f ppm' % CDPP ( flux ) , xy = ( 0.98 , 0.975 ) , xycoords = 'axes fraction' , ha = 'right' , va = 'top' , fontsize = 12 , color = 'r' , zorder = 99 ) ax . margins ( 0 , None ) ax . set_xlabel ( "Time (BJD - 2454833)" , fontsize = 16 ) ax . set_ylabel ( "%s Flux" % pipeline . upper ( ) , fontsize = 16 ) fig . canvas . set_window_title ( "%s: EPIC %d" % ( pipeline . upper ( ) , ID ) ) if show : pl . show ( ) pl . close ( ) else : return fig , ax
|
Plots the de - trended flux for the given EPIC ID and for the specified pipeline .
|
1,239 |
def get_outliers ( self ) : log . info ( "Clipping outliers..." ) log . info ( 'Iter %d/%d: %d outliers' % ( 0 , self . oiter , len ( self . outmask ) ) ) def M ( x ) : return np . delete ( x , np . concatenate ( [ self . nanmask , self . badmask , self . transitmask ] ) , axis = 0 ) t = M ( self . time ) outmask = [ np . array ( [ - 1 ] ) , np . array ( self . outmask ) ] while not np . array_equal ( outmask [ - 2 ] , outmask [ - 1 ] ) : if len ( outmask ) - 1 > self . oiter : log . error ( 'Maximum number of iterations in ' + '``get_outliers()`` exceeded. Skipping...' ) break if np . any ( [ np . array_equal ( outmask [ - 1 ] , i ) for i in outmask [ : - 1 ] ] ) : log . error ( 'Function ``get_outliers()`` ' + 'is going in circles. Skipping...' ) break self . compute ( ) f = SavGol ( M ( self . flux ) ) med = np . nanmedian ( f ) MAD = 1.4826 * np . nanmedian ( np . abs ( f - med ) ) inds = np . where ( ( f > med + self . osigma * MAD ) | ( f < med - self . osigma * MAD ) ) [ 0 ] inds = np . array ( [ np . argmax ( self . time == t [ i ] ) for i in inds ] ) self . outmask = np . array ( inds , dtype = int ) outmask . append ( np . array ( inds ) ) log . info ( 'Iter %d/%d: %d outliers' % ( len ( outmask ) - 2 , self . oiter , len ( self . outmask ) ) )
|
Performs iterative sigma clipping to get outliers .
|
1,240 |
def get_ylim ( self ) : bn = np . array ( list ( set ( np . concatenate ( [ self . badmask , self . nanmask ] ) ) ) , dtype = int ) fraw = np . delete ( self . fraw , bn ) lo , hi = fraw [ np . argsort ( fraw ) ] [ [ 3 , - 3 ] ] flux = np . delete ( self . flux , bn ) fsort = flux [ np . argsort ( flux ) ] if fsort [ int ( 0.01 * len ( fsort ) ) ] < lo : lo = fsort [ int ( 0.01 * len ( fsort ) ) ] if fsort [ int ( 0.99 * len ( fsort ) ) ] > hi : hi = fsort [ int ( 0.99 * len ( fsort ) ) ] pad = ( hi - lo ) * 0.05 ylim = ( lo - pad , hi + pad ) return ylim
|
Computes the ideal y - axis limits for the light curve plot . Attempts to set the limits equal to those of the raw light curve but if more than 1% of the flux lies either above or below these limits auto - expands to include those points . At the end adds 5% padding to both the top and the bottom .
|
1,241 |
def plot_cbv ( self , ax , flux , info , show_cbv = False ) : bnmask = np . array ( list ( set ( np . concatenate ( [ self . badmask , self . nanmask ] ) ) ) , dtype = int ) def M ( x ) : return np . delete ( x , bnmask ) if self . cadence == 'lc' : ax . plot ( M ( self . time ) , M ( flux ) , ls = 'none' , marker = '.' , color = 'k' , markersize = 2 , alpha = 0.45 ) else : ax . plot ( M ( self . time ) , M ( flux ) , ls = 'none' , marker = '.' , color = 'k' , markersize = 2 , alpha = 0.03 , zorder = - 1 ) ax . set_rasterization_zorder ( 0 ) ax . plot ( self . time [ 0 ] , np . nanmedian ( M ( flux ) ) , marker = '.' , alpha = 0 ) ax . plot ( self . time [ - 1 ] , np . nanmedian ( M ( flux ) ) , marker = '.' , alpha = 0 ) if show_cbv : ax . plot ( self . time , self . _mission . FitCBVs ( self ) + np . nanmedian ( flux ) , 'r-' , alpha = 0.2 ) ax . annotate ( info , xy = ( 0.98 , 0.025 ) , xycoords = 'axes fraction' , ha = 'right' , va = 'bottom' , fontsize = 10 , alpha = 0.5 , fontweight = 'bold' ) ax . margins ( 0.01 , 0.1 ) flux = np . delete ( flux , bnmask ) N = int ( 0.995 * len ( flux ) ) hi , lo = flux [ np . argsort ( flux ) ] [ [ N , - N ] ] fsort = flux [ np . argsort ( flux ) ] pad = ( hi - lo ) * 0.2 ylim = ( lo - pad , hi + pad ) ax . set_ylim ( ylim ) ax . get_yaxis ( ) . set_major_formatter ( Formatter . Flux ) ax . set_xlabel ( r'Time (%s)' % self . _mission . TIMEUNITS , fontsize = 9 ) for tick in ax . get_xticklabels ( ) + ax . get_yticklabels ( ) : tick . set_fontsize ( 7 )
|
Plots the final CBV - corrected light curve .
|
1,242 |
def load_tpf ( self ) : if not self . loaded : if self . _data is not None : data = self . _data else : data = self . _mission . GetData ( self . ID , season = self . season , cadence = self . cadence , clobber = self . clobber_tpf , aperture_name = self . aperture_name , saturated_aperture_name = self . saturated_aperture_name , max_pixels = self . max_pixels , saturation_tolerance = self . saturation_tolerance , get_hires = self . get_hires , get_nearby = self . get_nearby ) if data is None : raise Exception ( "Unable to retrieve target data." ) self . cadn = data . cadn self . time = data . time self . model = np . zeros_like ( self . time ) self . fpix = data . fpix self . fraw = np . sum ( self . fpix , axis = 1 ) self . fpix_err = data . fpix_err self . fraw_err = np . sqrt ( np . sum ( self . fpix_err ** 2 , axis = 1 ) ) self . nanmask = data . nanmask self . badmask = data . badmask self . transitmask = np . array ( [ ] , dtype = int ) self . outmask = np . array ( [ ] , dtype = int ) self . aperture = data . aperture self . aperture_name = data . aperture_name self . apertures = data . apertures self . quality = data . quality self . Xpos = data . Xpos self . Ypos = data . Ypos self . mag = data . mag self . pixel_images = data . pixel_images self . nearby = data . nearby self . hires = data . hires self . saturated = data . saturated self . meta = data . meta self . bkg = data . bkg self . breakpoints [ - 1 ] = len ( self . time ) - 1 self . get_norm ( ) self . loaded = True
|
Loads the target pixel file .
|
1,243 |
def load_model ( self , name = None ) : if self . clobber : return False if name is None : name = self . name file = os . path . join ( self . dir , '%s.npz' % name ) if os . path . exists ( file ) : if not self . is_parent : log . info ( "Loading '%s.npz'..." % name ) try : data = np . load ( file ) for key in data . keys ( ) : try : setattr ( self , key , data [ key ] [ ( ) ] ) except NotImplementedError : pass if hasattr ( self , 'cdpp6' ) : self . cdpp = self . cdpp6 del self . cdpp6 if hasattr ( self , 'cdpp6_arr' ) : self . cdpp_arr = np . array ( self . cdpp6_arr ) del self . cdpp6_arr if hasattr ( self , 'gppp' ) : self . cdppg = self . gppp del self . gppp pl . close ( ) return True except : log . warn ( "Error loading '%s.npz'." % name ) exctype , value , tb = sys . exc_info ( ) for line in traceback . format_exception_only ( exctype , value ) : ln = line . replace ( '\n' , '' ) log . warn ( ln ) os . rename ( file , file + '.bad' ) if self . is_parent : raise Exception ( 'Unable to load `%s` model for target %d.' % ( self . name , self . ID ) ) return False
|
Loads a saved version of the model .
|
1,244 |
def save_model ( self ) : log . info ( "Saving data to '%s.npz'..." % self . name ) d = dict ( self . __dict__ ) d . pop ( '_weights' , None ) d . pop ( '_A' , None ) d . pop ( '_B' , None ) d . pop ( '_f' , None ) d . pop ( '_mK' , None ) d . pop ( 'K' , None ) d . pop ( 'dvs' , None ) d . pop ( 'clobber' , None ) d . pop ( 'clobber_tpf' , None ) d . pop ( '_mission' , None ) d . pop ( 'debug' , None ) d . pop ( 'transit_model' , None ) d . pop ( '_transit_model' , None ) np . savez ( os . path . join ( self . dir , self . name + '.npz' ) , ** d ) pdf = PdfPages ( os . path . join ( self . dir , self . name + '.pdf' ) ) pdf . savefig ( self . dvs . fig ) pl . close ( self . dvs . fig ) d = pdf . infodict ( ) d [ 'Title' ] = 'EVEREST: %s de-trending of %s %d' % ( self . name , self . _mission . IDSTRING , self . ID ) d [ 'Author' ] = 'Rodrigo Luger' pdf . close ( )
|
Saves all of the de - trending information to disk in an npz file and saves the DVS as a pdf .
|
1,245 |
def exception_handler ( self , pdb ) : exctype , value , tb = sys . exc_info ( ) errfile = os . path . join ( self . dir , self . name + '.err' ) with open ( errfile , 'w' ) as f : for line in traceback . format_exception_only ( exctype , value ) : ln = line . replace ( '\n' , '' ) log . error ( ln ) print ( ln , file = f ) for line in traceback . format_tb ( tb ) : ln = line . replace ( '\n' , '' ) log . error ( ln ) print ( ln , file = f ) if pdb : raise
|
A custom exception handler .
|
1,246 |
def init_kernel ( self ) : if self . kernel_params is None : X = self . apply_mask ( self . fpix / self . flux . reshape ( - 1 , 1 ) ) y = self . apply_mask ( self . flux ) - np . dot ( X , np . linalg . solve ( np . dot ( X . T , X ) , np . dot ( X . T , self . apply_mask ( self . flux ) ) ) ) white = np . nanmedian ( [ np . nanstd ( c ) for c in Chunks ( y , 13 ) ] ) amp = self . gp_factor * np . nanstd ( y ) tau = 30.0 if self . kernel == 'Basic' : self . kernel_params = [ white , amp , tau ] elif self . kernel == 'QuasiPeriodic' : self . kernel_params = [ white , amp , 1. , 20. ]
|
Initializes the covariance matrix with a guess at the GP kernel parameters .
|
1,247 |
def run ( self ) : try : log . info ( "Loading target data..." ) self . load_tpf ( ) self . mask_planets ( ) self . plot_aperture ( [ self . dvs . top_right ( ) for i in range ( 4 ) ] ) self . init_kernel ( ) M = self . apply_mask ( np . arange ( len ( self . time ) ) ) self . cdppr_arr = self . get_cdpp_arr ( ) self . cdpp_arr = np . array ( self . cdppr_arr ) self . cdppv_arr = np . array ( self . cdppr_arr ) self . cdppr = self . get_cdpp ( ) self . cdpp = self . cdppr self . cdppv = self . cdppr log . info ( "%s (Raw): CDPP = %s" % ( self . name , self . cdpps ) ) self . plot_lc ( self . dvs . left ( ) , info_right = 'Raw' , color = 'k' ) for n in range ( self . pld_order ) : self . lam_idx += 1 self . get_outliers ( ) if n > 0 and self . optimize_gp : self . update_gp ( ) self . cross_validate ( self . dvs . right ( ) , info = 'CV%d' % n ) self . cdpp_arr = self . get_cdpp_arr ( ) self . cdppv_arr *= self . cdpp_arr self . cdpp = self . get_cdpp ( ) self . cdppv = np . nanmean ( self . cdppv_arr ) log . info ( "%s (%d/%d): CDPP = %s" % ( self . name , n + 1 , self . pld_order , self . cdpps ) ) self . plot_lc ( self . dvs . left ( ) , info_right = 'LC%d' % ( n + 1 ) , info_left = '%d outliers' % len ( self . outmask ) ) self . finalize ( ) self . plot_final ( self . dvs . top_left ( ) ) self . plot_info ( self . dvs ) self . save_model ( ) except : self . exception_handler ( self . debug )
|
Runs the de - trending step .
|
1,248 |
def publish ( self , ** kwargs ) : try : self . cbv_win = 999 self . cbv_order = 3 self . cbv_num = 1 self . _mission . GetTargetCBVs ( self ) cbv = CBV ( ) self . plot_info ( cbv ) self . plot_cbv ( cbv . body ( ) , self . fcor , 'Corrected' ) self . plot_cbv ( cbv . body ( ) , self . flux , 'De-trended' , show_cbv = True ) self . plot_cbv ( cbv . body ( ) , self . fraw , 'Raw' ) pdf = PdfPages ( os . path . join ( self . dir , 'cbv.pdf' ) ) pdf . savefig ( cbv . fig ) pl . close ( cbv . fig ) d = pdf . infodict ( ) d [ 'Title' ] = 'EVEREST: %s de-trending of %s %d' % ( self . name , self . _mission . IDSTRING , self . ID ) d [ 'Author' ] = 'Rodrigo Luger' pdf . close ( ) assert os . path . exists ( os . path . join ( self . dir , self . name + '.pdf' ) ) , "Unable to locate %s.pdf." % self . name output = PdfFileWriter ( ) pdfOne = PdfFileReader ( os . path . join ( self . dir , 'cbv.pdf' ) ) pdfTwo = PdfFileReader ( os . path . join ( self . dir , self . name + '.pdf' ) ) output . addPage ( pdfOne . getPage ( 0 ) ) output . addPage ( pdfTwo . getPage ( pdfTwo . numPages - 1 ) ) outputStream = open ( os . path . join ( self . dir , self . _mission . DVSFile ( self . ID , self . season , self . cadence ) ) , "wb" ) output . write ( outputStream ) outputStream . close ( ) os . remove ( os . path . join ( self . dir , 'cbv.pdf' ) ) MakeFITS ( self ) except : self . exception_handler ( self . debug )
|
Correct the light curve with the CBVs generate a cover page for the DVS figure and produce a FITS file for publication .
|
1,249 |
def run ( self ) : try : self . plot_aperture ( [ self . dvs . top_right ( ) for i in range ( 4 ) ] ) self . plot_lc ( self . dvs . left ( ) , info_right = 'nPLD' , color = 'k' ) self . cross_validate ( self . dvs . right ( ) ) self . compute ( ) self . cdpp_arr = self . get_cdpp_arr ( ) self . cdpp = self . get_cdpp ( ) self . plot_lc ( self . dvs . left ( ) , info_right = 'Powell' , color = 'k' ) self . plot_final ( self . dvs . top_left ( ) ) self . plot_info ( self . dvs ) self . save_model ( ) except : self . exception_handler ( self . debug )
|
Runs the de - trending .
|
1,250 |
def validation_scatter ( self , log_lam , b , masks , pre_v , gp , flux , time , med ) : self . lam [ b ] = 10 ** log_lam scatter = [ None for i in range ( len ( masks ) ) ] for i in range ( len ( masks ) ) : model = self . cv_compute ( b , * pre_v [ i ] ) try : gpm , _ = gp . predict ( flux - model - med , time [ masks [ i ] ] ) except ValueError : return 1.e30 fdet = ( flux - model ) [ masks [ i ] ] - gpm scatter [ i ] = 1.e6 * ( 1.4826 * np . nanmedian ( np . abs ( fdet / med - np . nanmedian ( fdet / med ) ) ) / np . sqrt ( len ( masks [ i ] ) ) ) return np . max ( scatter )
|
Computes the scatter in the validation set .
|
1,251 |
def iterdirty ( self ) : return iter ( chain ( itervalues ( self . _new ) , itervalues ( self . _modified ) ) )
|
Ordered iterator over dirty elements .
|
1,252 |
def commit ( self , callback = None ) : if self . executed : raise InvalidTransaction ( 'Invalid operation. ' 'Transaction already executed.' ) session = self . session self . session = None self . on_result = self . _commit ( session , callback ) return self . on_result
|
Close the transaction and commit session to the backend .
|
1,253 |
def load_related ( self , meta , fname , data , fields , encoding ) : field = meta . dfields [ fname ] if field in meta . multifields : fmeta = field . structure_class ( ) . _meta if fmeta . name in ( 'hashtable' , 'zset' ) : return ( ( native_str ( id , encoding ) , pairs_to_dict ( fdata , encoding ) ) for id , fdata in data ) else : return ( ( native_str ( id , encoding ) , fdata ) for id , fdata in data ) else : return self . build ( data , meta , fields , fields , encoding )
|
Parse data for related objects .
|
1,254 |
def _execute_query ( self ) : pipe = self . pipe if not self . card : if self . meta . ordering : self . ismember = getattr ( self . backend . client , 'zrank' ) self . card = getattr ( pipe , 'zcard' ) self . _check_member = self . zism else : self . ismember = getattr ( self . backend . client , 'sismember' ) self . card = getattr ( pipe , 'scard' ) self . _check_member = self . sism else : self . ismember = None self . card ( self . query_key ) result = yield pipe . execute ( ) yield result [ - 1 ]
|
Execute the query without fetching data . Returns the number of elements in the query .
|
1,255 |
def order ( self , last ) : desc = last . desc field = last . name nested = last . nested nested_args = [ ] while nested : meta = nested . model . _meta nested_args . extend ( ( self . backend . basekey ( meta ) , nested . name ) ) last = nested nested = nested . nested method = 'ALPHA' if last . field . internal_type == 'text' else '' if field == last . model . _meta . pkname ( ) : field = '' return { 'field' : field , 'method' : method , 'desc' : desc , 'nested' : nested_args }
|
Perform ordering with respect model fields .
|
1,256 |
def related_lua_args ( self ) : related = self . queryelem . select_related if related : meta = self . meta for rel in related : field = meta . dfields [ rel ] relmodel = field . relmodel bk = self . backend . basekey ( relmodel . _meta ) if relmodel else '' fields = list ( related [ rel ] ) if meta . pkname ( ) in fields : fields . remove ( meta . pkname ( ) ) if not fields : fields . append ( '' ) ftype = field . type if field in meta . multifields else '' data = { 'field' : field . attname , 'type' : ftype , 'bk' : bk , 'fields' : fields } yield field . name , data
|
Generator of load_related arguments
|
1,257 |
def pop_range ( self , start , stop = None , withscores = True , ** options ) : return self . backend . execute ( self . client . zpopbyscore ( self . id , start , stop , withscores = withscores , ** options ) , partial ( self . _range , withscores ) )
|
Remove and return a range from the ordered set by score .
|
1,258 |
def execute_session ( self , session_data ) : pipe = self . client . pipeline ( ) for sm in session_data : meta = sm . meta if sm . structures : self . flush_structure ( sm , pipe ) delquery = None if sm . deletes is not None : delquery = sm . deletes . backend_query ( pipe = pipe ) self . accumulate_delete ( pipe , delquery ) if sm . dirty : meta_info = json . dumps ( self . meta ( meta ) ) lua_data = [ len ( sm . dirty ) ] processed = [ ] for instance in sm . dirty : state = instance . get_state ( ) if not meta . is_valid ( instance ) : raise FieldValueError ( json . dumps ( instance . _dbdata [ 'errors' ] ) ) score = MIN_FLOAT if meta . ordering : if meta . ordering . auto : score = meta . ordering . name . incrby else : v = getattr ( instance , meta . ordering . name , None ) if v is not None : score = meta . ordering . field . scorefun ( v ) data = instance . _dbdata [ 'cleaned_data' ] action = state . action prev_id = state . iid if state . persistent else '' id = instance . pkvalue ( ) or '' data = flat_mapping ( data ) lua_data . extend ( ( action , prev_id , id , score , len ( data ) ) ) lua_data . extend ( data ) processed . append ( state . iid ) self . odmrun ( pipe , 'commit' , meta , ( ) , meta_info , * lua_data , iids = processed ) return pipe . execute ( )
|
Execute a session in redis .
|
1,259 |
def flush ( self , meta = None ) : pattern = self . basekey ( meta ) if meta else self . namespace return self . client . delpattern ( '%s*' % pattern )
|
Flush all model keys from the database
|
1,260 |
def GetCovariance ( kernel , kernel_params , time , errors ) : K = np . diag ( errors ** 2 ) K += GP ( kernel , kernel_params , white = False ) . get_matrix ( time ) return K
|
Returns the covariance matrix for a given light curve segment .
|
1,261 |
def NegLnLike ( x , time , flux , errors , kernel ) : gp = GP ( kernel , x , white = True ) gp . compute ( time , errors ) if OLDGEORGE : nll = - gp . lnlikelihood ( flux ) ngr = - 2 * gp . grad_lnlikelihood ( flux ) / np . sqrt ( gp . kernel . pars ) else : nll = - gp . log_likelihood ( flux ) ngr = - 2 * gp . grad_log_likelihood ( flux ) / np . sqrt ( np . exp ( gp . get_parameter_vector ( ) ) ) return nll , ngr
|
Returns the negative log - likelihood function and its gradient .
|
1,262 |
def missing_intervals ( startdate , enddate , start , end , dateconverter = None , parseinterval = None , intervals = None ) : parseinterval = parseinterval or default_parse_interval dateconverter = dateconverter or todate startdate = dateconverter ( parseinterval ( startdate , 0 ) ) enddate = max ( startdate , dateconverter ( parseinterval ( enddate , 0 ) ) ) if intervals is not None and not isinstance ( intervals , Intervals ) : intervals = Intervals ( intervals ) calc_intervals = Intervals ( ) if start : if startdate < start : calc_start = startdate calc_end = parseinterval ( start , - 1 ) if calc_end >= calc_start : calc_intervals . append ( Interval ( calc_start , calc_end ) ) if enddate > end : calc_start = parseinterval ( end , 1 ) calc_end = enddate if calc_end >= calc_start : calc_intervals . append ( Interval ( calc_start , calc_end ) ) else : start = startdate end = enddate calc_intervals . append ( Interval ( startdate , enddate ) ) if calc_intervals : if intervals : calc_intervals . extend ( intervals ) elif intervals : calc_intervals = intervals return calc_intervals
|
Given a startdate and an enddate dates evaluate the date intervals from which data is not available . It return a list of two - dimensional tuples containing start and end date for the interval . The list could countain 0 1 or 2 tuples .
|
1,263 |
def InitLog ( file_name = None , log_level = logging . DEBUG , screen_level = logging . CRITICAL , pdb = False ) : root = logging . getLogger ( ) root . handlers = [ ] root . setLevel ( logging . DEBUG ) if file_name is not None : if not os . path . exists ( os . path . dirname ( file_name ) ) : os . makedirs ( os . path . dirname ( file_name ) ) fh = logging . FileHandler ( file_name ) fh . setLevel ( log_level ) fh_formatter = logging . Formatter ( "%(asctime)s %(levelname)-5s [%(name)s.%(funcName)s()]: %(message)s" , datefmt = "%m/%d/%y %H:%M:%S" ) fh . setFormatter ( fh_formatter ) fh . addFilter ( NoPILFilter ( ) ) root . addHandler ( fh ) sh = logging . StreamHandler ( sys . stdout ) if pdb : sh . setLevel ( logging . DEBUG ) else : sh . setLevel ( screen_level ) sh_formatter = logging . Formatter ( "%(levelname)-5s [%(name)s.%(funcName)s()]: %(message)s" ) sh . setFormatter ( sh_formatter ) sh . addFilter ( NoPILFilter ( ) ) root . addHandler ( sh ) if pdb : sys . excepthook = ExceptionHookPDB else : sys . excepthook = ExceptionHook
|
A little routine to initialize the logging functionality .
|
1,264 |
def ExceptionHook ( exctype , value , tb ) : for line in traceback . format_exception_only ( exctype , value ) : log . error ( line . replace ( '\n' , '' ) ) for line in traceback . format_tb ( tb ) : log . error ( line . replace ( '\n' , '' ) ) sys . __excepthook__ ( exctype , value , tb )
|
A custom exception handler that logs errors to file .
|
1,265 |
def prange ( * x ) : try : root = logging . getLogger ( ) if len ( root . handlers ) : for h in root . handlers : if ( type ( h ) is logging . StreamHandler ) and ( h . level != logging . CRITICAL ) : from tqdm import tqdm return tqdm ( range ( * x ) ) return range ( * x ) else : from tqdm import tqdm return tqdm ( range ( * x ) ) except ImportError : return range ( * x )
|
Progress bar range with tqdm
|
1,266 |
def back ( self , * fields ) : ts = self . irange ( - 1 , - 1 , fields = fields ) if ts : return ts . end ( ) , ts [ 0 ]
|
Return the back pair of the structure
|
1,267 |
def Search ( ID , mission = 'k2' ) : assert mission == 'k2' , "Only the K2 mission is supported for now." print ( "Searching for target %d..." % ID ) season = missions . k2 . Season ( ID ) if season in [ 91 , 92 , [ 91 , 92 ] ] : print ( "Campaign 9 is currently not part of the EVEREST catalog." ) return elif season == 101 : print ( "The first half of campaign 10 is not currently part of " + "the EVEREST catalog." ) return elif season is not None : print ( "Target is in campaign %d of the EVEREST catalog." % season ) return star = k2plr_client . k2_star ( ID ) if star . objtype . lower ( ) != "star" : print ( "Target is of type %s, not STAR, " % star . objtype + "and is therefore not included in the EVEREST catalog." ) return try : tpf = star . get_target_pixel_files ( ) except : print ( "Unable to download the raw pixel files for this target." ) return if len ( tpf ) == 0 : print ( "Raw pixel files are not available for this target. Looks like " + "data may not have been collected for it." ) return if tpf [ 0 ] . sci_campaign not in missions . k2 . SEASONS : print ( "Targets for campaign %d are not yet available." % tpf [ 0 ] . sci_campaign ) return try : k2sff = k2plr . K2SFF ( ID ) except : print ( "Error downloading the K2SFF light curve for this target. " + "Currently, EVEREST uses the K2SFF apertures to perform " + "photometry. This is likely to change in the next version." ) return try : assert np . count_nonzero ( k2sff . apertures [ 15 ] ) , "Invalid aperture." except : print ( "Unable to retrieve the K2SFF aperture for this target. " + "Currently, EVEREST uses the K2SFF apertures to perform " + "photometry. This is likely to change in the next version." ) return if star . kp < 8 : print ( "Target has Kp = %.1f and is too saturated " + "for proper de-trending with EVEREST." ) return print ( "I'm not sure why this target isn't in the EVEREST catalog." + "You can try de-trending it yourself:" ) print ( "http://faculty.washington.edu/rodluger/everest/pipeline.html" ) return
|
Why is my target not in the EVEREST database?
|
1,268 |
def _get_norm ( self ) : log . info ( 'Computing the PLD normalization...' ) mod = [ None for b in self . breakpoints ] for b , brkpt in enumerate ( self . breakpoints ) : c = self . get_chunk ( b ) inds = np . array ( list ( set ( np . concatenate ( [ self . transitmask , self . recmask ] ) ) ) , dtype = int ) M = np . delete ( np . arange ( len ( self . time ) ) , inds , axis = 0 ) if b > 0 : m = M [ ( M > self . breakpoints [ b - 1 ] - self . bpad ) & ( M <= self . breakpoints [ b ] + self . bpad ) ] else : m = M [ M <= self . breakpoints [ b ] + self . bpad ] mK = GetCovariance ( self . kernel , self . kernel_params , self . time [ m ] , self . fraw_err [ m ] ) med = np . nanmedian ( self . fraw [ m ] ) f = self . fraw [ m ] - med A = np . zeros ( ( len ( m ) , len ( m ) ) ) B = np . zeros ( ( len ( c ) , len ( m ) ) ) for n in range ( self . pld_order ) : XM = self . X ( n , m ) XC = self . X ( n , c ) A += self . reclam [ b ] [ n ] * np . dot ( XM , XM . T ) B += self . reclam [ b ] [ n ] * np . dot ( XC , XM . T ) del XM , XC W = np . linalg . solve ( mK + A , f ) mod [ b ] = np . dot ( B , W ) del A , B , W if len ( mod ) > 1 : model = mod [ 0 ] [ : - self . bpad ] for m in mod [ 1 : - 1 ] : offset = model [ - 1 ] - m [ self . bpad - 1 ] model = np . concatenate ( [ model , m [ self . bpad : - self . bpad ] + offset ] ) offset = model [ - 1 ] - mod [ - 1 ] [ self . bpad - 1 ] model = np . concatenate ( [ model , mod [ - 1 ] [ self . bpad : ] + offset ] ) else : model = mod [ 0 ] model -= np . nanmedian ( model ) self . _norm = self . fraw - model
|
Computes the PLD flux normalization array .
|
1,269 |
def plot_pipeline ( self , pipeline , * args , ** kwargs ) : if pipeline != 'everest2' : return getattr ( missions , self . mission ) . pipelines . plot ( self . ID , pipeline , * args , ** kwargs ) else : plot_raw = kwargs . get ( 'plot_raw' , False ) plot_cbv = kwargs . get ( 'plot_cbv' , True ) show = kwargs . get ( 'show' , True ) if plot_raw : y = self . fraw ylabel = 'Raw Flux' elif plot_cbv : y = self . fcor ylabel = "EVEREST2 Flux" else : y = self . flux ylabel = "EVEREST2 Flux" bnmask = np . concatenate ( [ self . nanmask , self . badmask ] ) time = np . delete ( self . time , bnmask ) flux = np . delete ( y , bnmask ) fig , ax = pl . subplots ( 1 , figsize = ( 10 , 4 ) ) fig . subplots_adjust ( bottom = 0.15 ) ax . plot ( time , flux , "k." , markersize = 3 , alpha = 0.5 ) N = int ( 0.995 * len ( flux ) ) hi , lo = flux [ np . argsort ( flux ) ] [ [ N , - N ] ] pad = ( hi - lo ) * 0.1 ylim = ( lo - pad , hi + pad ) ax . set_ylim ( ylim ) ax . plot ( self . time [ self . badmask ] , y [ self . badmask ] , "r." , markersize = 3 , alpha = 0.2 ) ax . annotate ( '%.2f ppm' % self . _mission . CDPP ( flux ) , xy = ( 0.98 , 0.975 ) , xycoords = 'axes fraction' , ha = 'right' , va = 'top' , fontsize = 12 , color = 'r' , zorder = 99 ) ax . margins ( 0 , None ) ax . set_xlabel ( "Time (%s)" % self . _mission . TIMEUNITS , fontsize = 16 ) ax . set_ylabel ( ylabel , fontsize = 16 ) fig . canvas . set_window_title ( "EVEREST2: EPIC %d" % ( self . ID ) ) if show : pl . show ( ) pl . close ( ) else : return fig , ax
|
Plots the light curve for the target de - trended with a given pipeline .
|
1,270 |
def get_pipeline ( self , * args , ** kwargs ) : return getattr ( missions , self . mission ) . pipelines . get ( self . ID , * args , ** kwargs )
|
Returns the time and flux arrays for the target obtained by a given pipeline .
|
1,271 |
def _save_npz ( self ) : d = dict ( self . __dict__ ) d . pop ( '_weights' , None ) d . pop ( '_A' , None ) d . pop ( '_B' , None ) d . pop ( '_f' , None ) d . pop ( '_mK' , None ) d . pop ( 'K' , None ) d . pop ( 'dvs' , None ) d . pop ( 'clobber' , None ) d . pop ( 'clobber_tpf' , None ) d . pop ( '_mission' , None ) d . pop ( 'debug' , None ) np . savez ( os . path . join ( self . dir , self . name + '.npz' ) , ** d )
|
Saves all of the de - trending information to disk in an npz file
|
1,272 |
def Interpolate ( time , mask , y ) : yy = np . array ( y ) t_ = np . delete ( time , mask ) y_ = np . delete ( y , mask , axis = 0 ) if len ( yy . shape ) == 1 : yy [ mask ] = np . interp ( time [ mask ] , t_ , y_ ) elif len ( yy . shape ) == 2 : for n in range ( yy . shape [ 1 ] ) : yy [ mask , n ] = np . interp ( time [ mask ] , t_ , y_ [ : , n ] ) else : raise Exception ( "Array ``y`` must be either 1- or 2-d." ) return yy
|
Masks certain elements in the array y and linearly interpolates over them returning an array y of the same length .
|
1,273 |
def Smooth ( x , window_len = 100 , window = 'hanning' ) : if window_len == 0 : return np . zeros_like ( x ) s = np . r_ [ 2 * x [ 0 ] - x [ window_len - 1 : : - 1 ] , x , 2 * x [ - 1 ] - x [ - 1 : - window_len : - 1 ] ] if window == 'flat' : w = np . ones ( window_len , 'd' ) else : w = eval ( 'np.' + window + '(window_len)' ) y = np . convolve ( w / w . sum ( ) , s , mode = 'same' ) return y [ window_len : - window_len + 1 ]
|
Smooth data by convolving on a given timescale .
|
1,274 |
def SavGol ( y , win = 49 ) : if len ( y ) >= win : return y - savgol_filter ( y , win , 2 ) + np . nanmedian ( y ) else : return y
|
Subtracts a second order Savitsky - Golay filter with window size win and returns the result . This acts as a high pass filter .
|
1,275 |
def NumRegressors ( npix , pld_order , cross_terms = True ) : res = 0 for k in range ( 1 , pld_order + 1 ) : if cross_terms : res += comb ( npix + k - 1 , k ) else : res += npix return int ( res )
|
Return the number of regressors for npix pixels and PLD order pld_order .
|
1,276 |
def Downbin ( x , newsize , axis = 0 , operation = 'mean' ) : assert newsize < x . shape [ axis ] , "The new size of the array must be smaller than the current size." oldsize = x . shape [ axis ] newshape = list ( x . shape ) newshape [ axis ] = newsize newshape . insert ( axis + 1 , oldsize // newsize ) trim = oldsize % newsize if trim : xtrim = x [ : - trim ] else : xtrim = x if operation == 'mean' : xbin = np . nanmean ( xtrim . reshape ( newshape ) , axis = axis + 1 ) elif operation == 'sum' : xbin = np . nansum ( xtrim . reshape ( newshape ) , axis = axis + 1 ) elif operation == 'quadsum' : xbin = np . sqrt ( np . nansum ( xtrim . reshape ( newshape ) ** 2 , axis = axis + 1 ) ) elif operation == 'median' : xbin = np . nanmedian ( xtrim . reshape ( newshape ) , axis = axis + 1 ) else : raise ValueError ( "`operation` must be either `mean`, " + "`sum`, `quadsum`, or `median`." ) return xbin
|
Downbins an array to a smaller size .
|
1,277 |
def lookup ( var_name , contexts = ( ) , start = 0 ) : start = len ( contexts ) if start >= 0 else start for context in reversed ( contexts [ : start ] ) : try : if var_name in context : return context [ var_name ] except TypeError as te : continue return None
|
lookup the value of the var_name on the stack of contexts
|
1,278 |
def delimiters_to_re ( delimiters ) : delimiters = tuple ( delimiters ) if delimiters in re_delimiters : re_tag = re_delimiters [ delimiters ] else : open_tag , close_tag = delimiters open_tag = '' . join ( [ c if c . isalnum ( ) else '\\' + c for c in open_tag ] ) close_tag = '' . join ( [ c if c . isalnum ( ) else '\\' + c for c in close_tag ] ) re_tag = re . compile ( open_tag + r'([#^>&{/!=]?)\s*(.*?)\s*([}=]?)' + close_tag , re . DOTALL ) re_delimiters [ delimiters ] = re_tag return re_tag
|
convert delimiters to corresponding regular expressions
|
1,279 |
def _escape ( self , text ) : ret = EMPTYSTRING if text is None else str ( text ) if self . escape : return html_escape ( ret ) else : return ret
|
Escape text according to self . escape
|
1,280 |
def _lookup ( self , dot_name , contexts ) : filters = [ x for x in map ( lambda x : x . strip ( ) , dot_name . split ( '|' ) ) ] dot_name = filters [ 0 ] filters = filters [ 1 : ] if not dot_name . startswith ( '.' ) : dot_name = './' + dot_name paths = dot_name . split ( '/' ) last_path = paths [ - 1 ] refer_context = last_path == '' or last_path == '.' or last_path == '..' paths = paths if refer_context else paths [ : - 1 ] level = 0 for path in paths : if path == '..' : level -= 1 elif path != '.' : level += len ( path . strip ( '.' ) . split ( '.' ) ) names = last_path . split ( '.' ) if refer_context or names [ 0 ] == '' : try : value = contexts [ level - 1 ] except : value = None else : value = lookup ( names [ 0 ] , contexts , level ) if not refer_context : for name in names [ 1 : ] : try : index = parse_int ( name ) name = parse_int ( name ) if isinstance ( value , ( list , tuple ) ) else name value = value [ name ] except : value = None break for f in filters : try : func = self . root . filters [ f ] value = func ( value ) except : continue return value
|
lookup value for names like a . b . c and handle filters as well
|
1,281 |
def _render_children ( self , contexts , partials ) : ret = [ ] for child in self . children : ret . append ( child . _render ( contexts , partials ) ) return EMPTYSTRING . join ( ret )
|
Render the children tokens
|
1,282 |
def _render ( self , contexts , partials ) : val = self . _lookup ( self . value , contexts ) if val : return EMPTYSTRING return self . _render_children ( contexts , partials )
|
render inverted section
|
1,283 |
def Setup ( ) : if not os . path . exists ( os . path . join ( EVEREST_DAT , 'k2' , 'cbv' ) ) : os . makedirs ( os . path . join ( EVEREST_DAT , 'k2' , 'cbv' ) ) GetK2Stars ( clobber = False )
|
Called when the code is installed . Sets up directories and downloads the K2 catalog .
|
1,284 |
def CDPP ( flux , mask = [ ] , cadence = 'lc' ) : rmswin = 13 svgwin = 49 if cadence == 'sc' : newsize = len ( flux ) // 30 flux = Downbin ( flux , newsize , operation = 'mean' ) flux_savgol = SavGol ( np . delete ( flux , mask ) , win = svgwin ) if len ( flux_savgol ) : return Scatter ( flux_savgol / np . nanmedian ( flux_savgol ) , remove_outliers = True , win = rmswin ) else : return np . nan
|
Compute the proxy 6 - hr CDPP metric .
|
1,285 |
def HasShortCadence ( EPIC , season = None ) : if season is None : season = Campaign ( EPIC ) if season is None : return None stars = GetK2Campaign ( season ) i = np . where ( [ s [ 0 ] == EPIC for s in stars ] ) [ 0 ] if len ( i ) : return stars [ i [ 0 ] ] [ 3 ] else : return None
|
Returns True if short cadence data is available for this target .
|
1,286 |
def DVSFile ( ID , season , cadence = 'lc' ) : if cadence == 'sc' : strcadence = '_sc' else : strcadence = '' return 'hlsp_everest_k2_llc_%d-c%02d_kepler_v%s_dvs%s.pdf' % ( ID , season , EVEREST_MAJOR_MINOR , strcadence )
|
Returns the name of the DVS PDF for a given target .
|
1,287 |
def GetTargetCBVs ( model ) : season = model . season name = model . name if name . endswith ( '.sc' ) : name = name [ : - 3 ] model . XCBV = sysrem . GetCBVs ( season , model = name , niter = model . cbv_niter , sv_win = model . cbv_win , sv_order = model . cbv_order )
|
Returns the design matrix of CBVs for the given target .
|
1,288 |
def StatsToCSV ( campaign , model = 'nPLD' ) : statsfile = os . path . join ( EVEREST_SRC , 'missions' , 'k2' , 'tables' , 'c%02d_%s.cdpp' % ( campaign , model ) ) csvfile = os . path . join ( os . path . dirname ( EVEREST_SRC ) , 'docs' , 'c%02d.csv' % campaign ) epic , kp , cdpp6r , cdpp6 , _ , _ , _ , _ , saturated = np . loadtxt ( statsfile , unpack = True , skiprows = 2 ) with open ( csvfile , 'w' ) as f : print ( 'c%02d' % campaign , file = f ) for i in range ( len ( epic ) ) : print ( '%09d,%.3f,%.3f,%.3f,%d' % ( epic [ i ] , kp [ i ] , cdpp6r [ i ] , cdpp6 [ i ] , int ( saturated [ i ] ) ) , file = f )
|
Generate the CSV file used in the search database for the documentation .
|
1,289 |
def do_pending_lookups ( event , sender , ** kwargs ) : key = ( sender . _meta . app_label , sender . _meta . name ) for callback in pending_lookups . pop ( key , [ ] ) : callback ( sender )
|
Handle any pending relations to the sending model . Sent from class_prepared .
|
1,290 |
def Many2ManyThroughModel ( field ) : from stdnet . odm import ModelType , StdModel , ForeignKey , CompositeIdField name_model = field . model . _meta . name name_relmodel = field . relmodel . _meta . name if name_model == name_relmodel : name_relmodel += '2' through = field . through if through is None : name = '{0}_{1}' . format ( name_model , name_relmodel ) class Meta : app_label = field . model . _meta . app_label through = ModelType ( name , ( StdModel , ) , { 'Meta' : Meta } ) field . through = through field1 = ForeignKey ( field . model , related_name = field . name , related_manager_class = makeMany2ManyRelatedManager ( field . relmodel , name_model , name_relmodel ) ) field1 . register_with_model ( name_model , through ) field2 = ForeignKey ( field . relmodel , related_name = field . related_name , related_manager_class = makeMany2ManyRelatedManager ( field . model , name_relmodel , name_model ) ) field2 . register_with_model ( name_relmodel , through ) pk = CompositeIdField ( name_model , name_relmodel ) pk . register_with_model ( 'id' , through )
|
Create a Many2Many through model with two foreign key fields and a CompositeFieldId depending on the two foreign keys .
|
1,291 |
def makeMany2ManyRelatedManager ( formodel , name_relmodel , name_formodel ) : class _Many2ManyRelatedManager ( Many2ManyRelatedManager ) : pass _Many2ManyRelatedManager . formodel = formodel _Many2ManyRelatedManager . name_relmodel = name_relmodel _Many2ManyRelatedManager . name_formodel = name_formodel return _Many2ManyRelatedManager
|
formodel is the model which the manager .
|
1,292 |
def metaphone_processor ( words ) : for word in words : for w in double_metaphone ( word ) : if w : w = w . strip ( ) if w : yield w
|
Double metaphone word processor .
|
1,293 |
def tolerant_metaphone_processor ( words ) : for word in words : r = 0 for w in double_metaphone ( word ) : if w : w = w . strip ( ) if w : r += 1 yield w if not r : yield word
|
Double metaphone word processor slightly modified so that when no words are returned by the algorithm the original word is returned .
|
1,294 |
def stemming_processor ( words ) : stem = PorterStemmer ( ) . stem for word in words : word = stem ( word , 0 , len ( word ) - 1 ) yield word
|
Porter Stemmer word processor
|
1,295 |
def Pool ( pool = 'AnyPool' , ** kwargs ) : if pool == 'MPIPool' : return MPIPool ( ** kwargs ) elif pool == 'MultiPool' : return MultiPool ( ** kwargs ) elif pool == 'SerialPool' : return SerialPool ( ** kwargs ) elif pool == 'AnyPool' : if MPIPool . enabled ( ) : return MPIPool ( ** kwargs ) elif MultiPool . enabled ( ) : return MultiPool ( ** kwargs ) else : return SerialPool ( ** kwargs ) else : raise ValueError ( 'Invalid pool ``%s``.' % pool )
|
Chooses between the different pools . If pool == AnyPool chooses based on availability .
|
1,296 |
def wait ( self ) : if self . is_master ( ) : raise RuntimeError ( "Master node told to await jobs." ) status = MPI . Status ( ) while True : if self . debug : print ( "Worker {0} waiting for task." . format ( self . rank ) ) task = self . comm . recv ( source = 0 , tag = MPI . ANY_TAG , status = status ) if self . debug : print ( "Worker {0} got task {1} with tag {2}." . format ( self . rank , type ( task ) , status . tag ) ) if isinstance ( task , _close_pool_message ) : if self . debug : print ( "Worker {0} told to quit." . format ( self . rank ) ) break if isinstance ( task , _function_wrapper ) : self . function = task . function if self . debug : print ( "Worker {0} replaced its task function: {1}." . format ( self . rank , self . function ) ) continue result = self . function ( task ) if self . debug : print ( "Worker {0} sending answer {1} with tag {2}." . format ( self . rank , type ( result ) , status . tag ) ) self . comm . isend ( result , dest = 0 , tag = status . tag ) if self . exit_on_end : sys . exit ( )
|
If this isn t the master process wait for instructions .
|
1,297 |
def commit_when_no_transaction ( f ) : def _ ( self , * args , ** kwargs ) : r = f ( self , * args , ** kwargs ) return self . session . add ( self ) if self . session is not None else r _ . __name__ = f . __name__ _ . __doc__ = f . __doc__ return _
|
Decorator for committing changes when the instance session is not in a transaction .
|
1,298 |
def irange ( self , start = 0 , end = - 1 , callback = None , withscores = True , ** options ) : backend = self . read_backend res = backend . structure ( self ) . irange ( start , end , withscores = withscores , ** options ) if not callback : callback = self . load_data if withscores else self . load_values return backend . execute ( res , callback )
|
Return the range by rank between start and end .
|
1,299 |
def pop_front ( self ) : backend = self . backend return backend . execute ( backend . structure ( self ) . pop_front ( ) , self . value_pickler . loads )
|
Remove the first element from of the list .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.