idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
1,300
def block_pop_back ( self , timeout = 10 ) : value = yield self . backend_structure ( ) . block_pop_back ( timeout ) if value is not None : yield self . value_pickler . loads ( value )
Remove the last element from of the list . If no elements are available blocks for at least timeout seconds .
1,301
def block_pop_front ( self , timeout = 10 ) : value = yield self . backend_structure ( ) . block_pop_front ( timeout ) if value is not None : yield self . value_pickler . loads ( value )
Remove the first element from of the list . If no elements are available blocks for at least timeout seconds .
1,302
def push_front ( self , value ) : self . cache . push_front ( self . value_pickler . dumps ( value ) )
Appends a copy of value to the beginning of the list .
1,303
def aggregate ( self , kwargs ) : meta = self . _meta fields = meta . dfields field_lookups = { } for name , value in iteritems ( kwargs ) : bits = name . split ( JSPLITTER ) field_name = bits . pop ( 0 ) if field_name not in fields : raise QuerySetError ( 'Could not filter on model "{0}".\ Field "{1}" does not exist.' . format ( meta , field_name ) ) field = fields [ field_name ] attname = field . attname lookup = None if bits : bits = [ n . lower ( ) for n in bits ] if bits [ - 1 ] == 'in' : bits . pop ( ) elif bits [ - 1 ] in range_lookups : lookup = bits . pop ( ) remaining = JSPLITTER . join ( bits ) if lookup : attname , nested = field . get_lookup ( remaining , QuerySetError ) lookups = get_lookups ( attname , field_lookups ) lookups . append ( lookup_value ( lookup , ( value , nested ) ) ) continue elif remaining : value = field . filter ( self . session , remaining , value ) lookups = get_lookups ( attname , field_lookups ) if not field . index : raise QuerySetError ( "%s %s is not an index. Cannot query." % ( field . __class__ . __name__ , field_name ) ) if not iterable ( value ) : value = ( value , ) for v in value : if isinstance ( v , Q ) : v = lookup_value ( 'set' , v . construct ( ) ) else : v = lookup_value ( 'value' , field . serialise ( v , lookup ) ) lookups . append ( v ) return [ queryset ( self , name = name , underlying = field_lookups [ name ] ) for name in sorted ( field_lookups ) ]
Aggregate lookup parameters .
1,304
def models_from_model ( model , include_related = False , exclude = None ) : if exclude is None : exclude = set ( ) if model and model not in exclude : exclude . add ( model ) if isinstance ( model , ModelType ) and not model . _meta . abstract : yield model if include_related : exclude . add ( model ) for field in model . _meta . fields : if hasattr ( field , 'relmodel' ) : through = getattr ( field , 'through' , None ) for rmodel in ( field . relmodel , field . model , through ) : for m in models_from_model ( rmodel , include_related = include_related , exclude = exclude ) : yield m for manytomany in model . _meta . manytomany : related = getattr ( model , manytomany ) for m in models_from_model ( related . model , include_related = include_related , exclude = exclude ) : yield m elif not isinstance ( model , ModelType ) and isclass ( model ) : yield model
Generator of all model in model .
1,305
def unregister ( self , model = None ) : if model is not None : try : manager = self . _registered_models . pop ( model ) except KeyError : return if self . _registered_names . get ( manager . _meta . name ) == manager : self . _registered_names . pop ( manager . _meta . name ) return [ manager ] else : managers = list ( self . _registered_models . values ( ) ) self . _registered_models . clear ( ) return managers
Unregister a model if provided otherwise it unregister all registered models . Return a list of unregistered model managers or None if no managers were removed .
1,306
def execute_script ( self , name , keys , * args , ** options ) : script = get_script ( name ) if not script : raise redis . RedisError ( 'No such script "%s"' % name ) address = self . address ( ) if address not in all_loaded_scripts : all_loaded_scripts [ address ] = set ( ) loaded = all_loaded_scripts [ address ] toload = script . required_scripts . difference ( loaded ) for name in toload : s = get_script ( name ) yield self . script_load ( s . script ) loaded . update ( toload ) yield script ( self , keys , args , options )
Execute a script .
1,307
def query ( self , model ) : session = self . router . session ( ) fields = tuple ( ( f . name for f in model . _meta . scalarfields if f . type == 'text' ) ) qs = session . query ( model ) . load_only ( * fields ) for related in self . get_related_fields ( model ) : qs = qs . load_related ( related ) return qs
Return a query for model when it needs to be indexed .
1,308
def intervals ( self , startdate , enddate , parseinterval = None ) : return missing_intervals ( startdate , enddate , self . data_start , self . data_end , dateconverter = self . todate , parseinterval = parseinterval )
Given a startdate and an enddate dates evaluate the date intervals from which data is not available . It return a list of two - dimensional tuples containing start and end date for the interval . The list could contain 0 1 or 2 tuples .
1,309
def merged_series ( cls , * series , ** kwargs ) : router , backend = cls . check_router ( None , * series ) if backend : target = router . register ( cls ( ) , backend ) router . session ( ) . add ( target ) target . _merge ( * series , ** kwargs ) backend = target . backend return backend . execute ( backend . structure ( target ) . irange_and_delete ( ) , target . load_data )
Merge series and return the results without storing data in the backend server .
1,310
def backend_fields ( self , fields ) : dfields = self . dfields processed = set ( ) names = [ ] atts = [ ] pkname = self . pkname ( ) for name in fields : if name == pkname or name in processed : continue elif name in dfields : processed . add ( name ) field = dfields [ name ] names . append ( field . name ) atts . append ( field . attname ) else : bname = name . split ( JSPLITTER ) [ 0 ] if bname in dfields : field = dfields [ bname ] if field . type in ( 'json object' , 'related object' ) : processed . add ( name ) names . append ( name ) atts . append ( name ) return names , atts
Return a two elements tuple containing a list of fields names and a list of field attribute names .
1,311
def as_dict ( self ) : pk = self . pk id_type = 3 if pk . type == 'auto' : id_type = 1 return { 'id_name' : pk . name , 'id_type' : id_type , 'sorted' : bool ( self . ordering ) , 'autoincr' : self . ordering and self . ordering . auto , 'multi_fields' : [ field . name for field in self . multifields ] , 'indices' : dict ( ( ( idx . attname , idx . unique ) for idx in self . indices ) ) }
Model metadata in a dictionary
1,312
def loadedfields ( self ) : if self . _loadedfields is None : for field in self . _meta . scalarfields : yield field else : fields = self . _meta . dfields processed = set ( ) for name in self . _loadedfields : if name in processed : continue if name in fields : processed . add ( name ) yield fields [ name ] else : name = name . split ( JSPLITTER ) [ 0 ] if name in fields and name not in processed : field = fields [ name ] if field . type == 'json object' : processed . add ( name ) yield field
Generator of fields loaded from database
1,313
def clone ( self , ** data ) : meta = self . _meta session = self . session pkname = meta . pkname ( ) pkvalue = data . pop ( pkname , None ) fields = self . todict ( exclude_cache = True ) fields . update ( data ) fields . pop ( '__dbdata__' , None ) obj = self . _meta . make_object ( ( pkvalue , None , fields ) ) obj . session = session return obj
Utility method for cloning the instance as a new object .
1,314
def title ( self ) : return self . title_left ( on = False ) , self . title_center ( on = False ) , self . title_right ( on = False )
Returns the axis instance where the title will be printed
1,315
def footer ( self ) : return self . footer_left ( on = False ) , self . footer_center ( on = False ) , self . footer_right ( on = False )
Returns the axis instance where the footer will be printed
1,316
def top_right ( self ) : res = self . body_top_right [ self . tcount ] ( ) self . tcount += 1 return res
Returns the axis instance at the top right of the page where the postage stamp and aperture is displayed
1,317
def left ( self ) : res = self . body_left [ self . lcount ] ( ) self . lcount += 1 return res
Returns the current axis instance on the left side of the page where each successive light curve is displayed
1,318
def right ( self ) : res = self . body_right [ self . rcount ] ( ) self . rcount += 1 return res
Returns the current axis instance on the right side of the page where cross - validation information is displayed
1,319
def body ( self ) : res = self . _body [ self . bcount ] ( ) self . bcount += 1 return res
Returns the axis instance where the light curves will be shown
1,320
def hashmodel ( model , library = None ) : library = library or 'python-stdnet' meta = model . _meta sha = hashlib . sha1 ( to_bytes ( '{0}({1})' . format ( library , meta ) ) ) hash = sha . hexdigest ( ) [ : 8 ] meta . hash = hash if hash in _model_dict : raise KeyError ( 'Model "{0}" already in hash table.\ Rename your model or the module containing the model.' . format ( meta ) ) _model_dict [ hash ] = model
Calculate the Hash id of metaclass meta
1,321
def bind ( self , callback , sender = None ) : key = ( _make_id ( callback ) , _make_id ( sender ) ) self . callbacks . append ( ( key , callback ) )
Bind a callback for a given sender .
1,322
def fire ( self , sender = None , ** params ) : keys = ( _make_id ( None ) , _make_id ( sender ) ) results = [ ] for ( _ , key ) , callback in self . callbacks : if key in keys : results . append ( callback ( self , sender , ** params ) ) return results
Fire callbacks from a sender .
1,323
def Channel ( EPIC , campaign = None ) : if campaign is None : campaign = Campaign ( EPIC ) if hasattr ( campaign , '__len__' ) : raise AttributeError ( "Please choose a campaign/season for this target: %s." % campaign ) try : stars = GetK2Stars ( ) [ campaign ] except KeyError : log . warn ( "Unknown channel for target. Defaulting to channel 2." ) return 2 i = np . argmax ( [ s [ 0 ] == EPIC for s in stars ] ) return stars [ i ] [ 2 ]
Returns the channel number for a given EPIC target .
1,324
def Module ( EPIC , campaign = None ) : channel = Channel ( EPIC , campaign = campaign ) nums = { 2 : 1 , 3 : 5 , 4 : 9 , 6 : 13 , 7 : 17 , 8 : 21 , 9 : 25 , 10 : 29 , 11 : 33 , 12 : 37 , 13 : 41 , 14 : 45 , 15 : 49 , 16 : 53 , 17 : 57 , 18 : 61 , 19 : 65 , 20 : 69 , 22 : 73 , 23 : 77 , 24 : 81 } for c in [ channel , channel - 1 , channel - 2 , channel - 3 ] : if c in nums . values ( ) : for mod , chan in nums . items ( ) : if chan == c : return mod return None
Returns the module number for a given EPIC target .
1,325
def Channels ( module ) : nums = { 2 : 1 , 3 : 5 , 4 : 9 , 6 : 13 , 7 : 17 , 8 : 21 , 9 : 25 , 10 : 29 , 11 : 33 , 12 : 37 , 13 : 41 , 14 : 45 , 15 : 49 , 16 : 53 , 17 : 57 , 18 : 61 , 19 : 65 , 20 : 69 , 22 : 73 , 23 : 77 , 24 : 81 } if module in nums : return [ nums [ module ] , nums [ module ] + 1 , nums [ module ] + 2 , nums [ module ] + 3 ] else : return None
Returns the channels contained in the given K2 module .
1,326
def GetSources ( ID , darcsec = None , stars_only = False ) : client = kplr . API ( ) star = client . k2_star ( ID ) tpf = star . get_target_pixel_files ( ) [ 0 ] with tpf . open ( ) as f : crpix1 = f [ 2 ] . header [ 'CRPIX1' ] crpix2 = f [ 2 ] . header [ 'CRPIX2' ] crval1 = f [ 2 ] . header [ 'CRVAL1' ] crval2 = f [ 2 ] . header [ 'CRVAL2' ] cdelt1 = f [ 2 ] . header [ 'CDELT1' ] cdelt2 = f [ 2 ] . header [ 'CDELT2' ] pc1_1 = f [ 2 ] . header [ 'PC1_1' ] pc1_2 = f [ 2 ] . header [ 'PC1_2' ] pc2_1 = f [ 2 ] . header [ 'PC2_1' ] pc2_2 = f [ 2 ] . header [ 'PC2_2' ] pc = np . array ( [ [ pc1_1 , pc1_2 ] , [ pc2_1 , pc2_2 ] ] ) pc = np . linalg . inv ( pc ) crpix1p = f [ 2 ] . header [ 'CRPIX1P' ] crpix2p = f [ 2 ] . header [ 'CRPIX2P' ] crval1p = f [ 2 ] . header [ 'CRVAL1P' ] crval2p = f [ 2 ] . header [ 'CRVAL2P' ] cdelt1p = f [ 2 ] . header [ 'CDELT1P' ] cdelt2p = f [ 2 ] . header [ 'CDELT2P' ] if darcsec is None : darcsec = 4 * max ( f [ 2 ] . data . shape ) epicid , ra , dec , kepmag = MASTRADec ( star . k2_ra , star . k2_dec , darcsec , stars_only ) sources = [ ] for i , epic in enumerate ( epicid ) : dra = ( ra [ i ] - crval1 ) * np . cos ( np . radians ( dec [ i ] ) ) / cdelt1 ddec = ( dec [ i ] - crval2 ) / cdelt2 sx = pc [ 0 , 0 ] * dra + pc [ 0 , 1 ] * ddec + crpix1 + crval1p - 1.0 sy = pc [ 1 , 0 ] * dra + pc [ 1 , 1 ] * ddec + crpix2 + crval2p - 1.0 sources . append ( dict ( ID = epic , x = sx , y = sy , mag = kepmag [ i ] , x0 = crval1p , y0 = crval2p ) ) return sources
Grabs the EPIC coordinates from the TPF and searches MAST for other EPIC targets within the same aperture .
1,327
def SaturationFlux ( EPIC , campaign = None , ** kwargs ) : channel , well_depth = np . loadtxt ( os . path . join ( EVEREST_SRC , 'missions' , 'k2' , 'tables' , 'well_depth.tsv' ) , unpack = True ) satflx = well_depth [ channel == Channel ( EPIC , campaign = campaign ) ] [ 0 ] / 6.02 return satflx
Returns the well depth for the target . If any of the target s pixels have flux larger than this value they are likely to be saturated and cause charge bleeding . The well depths were obtained from Table 13 of the Kepler instrument handbook . We assume an exposure time of 6 . 02s .
1,328
def GetStars ( campaign , module , model = 'nPLD' , ** kwargs ) : channels = Channels ( module ) assert channels is not None , "No channels available on this module." all = GetK2Campaign ( campaign ) stars = np . array ( [ s [ 0 ] for s in all if s [ 2 ] in channels and os . path . exists ( os . path . join ( EVEREST_DAT , 'k2' , 'c%02d' % int ( campaign ) , ( '%09d' % s [ 0 ] ) [ : 4 ] + '00000' , ( '%09d' % s [ 0 ] ) [ 4 : ] , model + '.npz' ) ) ] , dtype = int ) N = len ( stars ) assert N > 0 , "No light curves found for campaign %d, module %d." % ( campaign , module ) fluxes = [ ] errors = [ ] kpars = [ ] for n in range ( N ) : nf = os . path . join ( EVEREST_DAT , 'k2' , 'c%02d' % int ( campaign ) , ( '%09d' % stars [ n ] ) [ : 4 ] + '00000' , ( '%09d' % stars [ n ] ) [ 4 : ] , model + '.npz' ) data = np . load ( nf ) t = data [ 'time' ] if n == 0 : time = t breakpoints = data [ 'breakpoints' ] y = data [ 'fraw' ] - data [ 'model' ] err = data [ 'fraw_err' ] m = np . array ( list ( set ( np . concatenate ( [ data [ 'outmask' ] , data [ 'badmask' ] , data [ 'nanmask' ] , data [ 'transitmask' ] ] ) ) ) , dtype = int ) y = np . interp ( t , np . delete ( t , m ) , np . delete ( y , m ) ) err = np . interp ( t , np . delete ( t , m ) , np . delete ( err , m ) ) fluxes . append ( y ) errors . append ( err ) kpars . append ( data [ 'kernel_params' ] ) return time , breakpoints , np . array ( fluxes ) , np . array ( errors ) , np . array ( kpars )
Returns de - trended light curves for all stars on a given module in a given campaign .
1,329
def parse_info ( response ) : info = { } response = response . decode ( 'utf-8' ) def get_value ( value ) : if ',' and '=' not in value : return value sub_dict = { } for item in value . split ( ',' ) : k , v = item . split ( '=' ) try : sub_dict [ k ] = int ( v ) except ValueError : sub_dict [ k ] = v return sub_dict data = info for line in response . splitlines ( ) : keyvalue = line . split ( ':' ) if len ( keyvalue ) == 2 : key , value = keyvalue try : data [ key ] = int ( value ) except ValueError : data [ key ] = get_value ( value ) else : data = { } info [ line [ 2 : ] ] = data return info
Parse the response of Redis s INFO command into a Python dict . In doing so convert byte data into unicode .
1,330
def zdiffstore ( self , dest , keys , withscores = False ) : keys = ( dest , ) + tuple ( keys ) wscores = 'withscores' if withscores else '' return self . execute_script ( 'zdiffstore' , keys , wscores , withscores = withscores )
Compute the difference of multiple sorted . The difference of sets specified by keys into a new sorted set in dest .
1,331
def zpopbyrank ( self , name , start , stop = None , withscores = False , desc = False ) : stop = stop if stop is not None else start return self . execute_script ( 'zpop' , ( name , ) , 'rank' , start , stop , int ( desc ) , int ( withscores ) , withscores = withscores )
Pop a range by rank .
1,332
def lnprior ( x ) : per , t0 , b = x if b < - 1 or b > 1 : return - np . inf elif per < 7 or per > 10 : return - np . inf elif t0 < 1978 or t0 > 1979 : return - np . inf else : return 0.
Return the log prior given parameter vector x .
1,333
def lnlike ( x , star ) : ll = lnprior ( x ) if np . isinf ( ll ) : return ll , ( np . nan , np . nan ) per , t0 , b = x model = TransitModel ( 'b' , per = per , t0 = t0 , b = b , rhos = 10. ) ( star . time ) like , d , vard = star . lnlike ( model , full_output = True ) ll += like return ll , ( d , )
Return the log likelihood given parameter vector x .
1,334
def permitted_query ( self , query , group , operations ) : session = query . session models = session . router user = group . user if user . is_superuser : return query roles = group . roles . query ( ) roles = group . roles . query ( ) throgh_model = models . role . permissions . model models [ throgh_model ] . filter ( role = roles , permission__model_type = query . model , permission__operations = operations ) permissions = router . permission . filter ( model_type = query . model , level = operations ) owner_query = query . filter ( user = user ) roles = models . role . filter ( model_type = query . model , level__ge = level ) groups = Role . groups . throughquery ( session ) . filter ( role = roles ) . get_field ( 'group' ) if user . groups . filter ( id = groups ) . count ( ) : permitted = models . instancerole . filter ( role = roles ) . get_field ( 'object_id' ) return owner_query . union ( model . objects . filter ( id = permitted ) ) else : return owner_query
Change the query so that only instances for which group has roles with permission on operations are returned .
1,335
def init_app ( self , app , session = None , parameters = None ) : if parameters is not None and not isinstance ( parameters , ParamsBuilder ) : raise InvalidUsage ( "parameters should be a pysnow.ParamsBuilder object, not %r" % type ( parameters ) . __name__ ) self . _session = session self . _parameters = parameters app . config . setdefault ( 'SNOW_INSTANCE' , None ) app . config . setdefault ( 'SNOW_HOST' , None ) app . config . setdefault ( 'SNOW_USER' , None ) app . config . setdefault ( 'SNOW_PASSWORD' , None ) app . config . setdefault ( 'SNOW_OAUTH_CLIENT_ID' , None ) app . config . setdefault ( 'SNOW_OAUTH_CLIENT_SECRET' , None ) app . config . setdefault ( 'SNOW_USE_SSL' , True ) if app . config [ 'SNOW_OAUTH_CLIENT_ID' ] and app . config [ 'SNOW_OAUTH_CLIENT_SECRET' ] : self . _client_type_oauth = True elif self . _session or ( app . config [ 'SNOW_USER' ] and app . config [ 'SNOW_PASSWORD' ] ) : self . _client_type_basic = True else : raise ConfigError ( "You must supply user credentials, a session or OAuth credentials to use flask-snow" )
Initializes snow extension
1,336
def connection ( self ) : ctx = stack . top . app if ctx is not None : if not hasattr ( ctx , 'snow' ) : if self . _client_type_oauth : if not self . _token_updater : warnings . warn ( "No token updater has been set. Token refreshes will be ignored." ) client = self . _get_oauth_client ( ) else : client = self . _get_basic_client ( ) if self . _parameters : client . parameters = self . _parameters ctx . snow = client return ctx . snow
Snow connection instance stores a pysnow . Client instance and pysnow . Resource instances
1,337
def usage ( ) : global options l = len ( options [ 'long' ] ) options [ 'shortlist' ] = [ s for s in options [ 'short' ] if s is not ":" ] print ( "python -m behave2cucumber [-h] [-d level|--debug=level]" ) for i in range ( l ) : print ( " -{0}|--{1:20} {2}" . format ( options [ 'shortlist' ] [ i ] , options [ 'long' ] [ i ] , options [ 'descriptions' ] [ i ] ) )
Print out a usage message
1,338
def direction ( theta , phi ) : return np . array ( [ np . cos ( phi ) * np . sin ( theta ) , np . sin ( phi ) * np . sin ( theta ) , np . cos ( theta ) ] )
Return the direction vector of a cylinder defined by the spherical coordinates theta and phi .
1,339
def projection_matrix ( w ) : return np . identity ( 3 ) - np . dot ( np . reshape ( w , ( 3 , 1 ) ) , np . reshape ( w , ( 1 , 3 ) ) )
Return the projection matrix of a direction w .
1,340
def skew_matrix ( w ) : return np . array ( [ [ 0 , - w [ 2 ] , w [ 1 ] ] , [ w [ 2 ] , 0 , - w [ 0 ] ] , [ - w [ 1 ] , w [ 0 ] , 0 ] ] )
Return the skew matrix of a direction w .
1,341
def calc_A ( Ys ) : return sum ( np . dot ( np . reshape ( Y , ( 3 , 1 ) ) , np . reshape ( Y , ( 1 , 3 ) ) ) for Y in Ys )
Return the matrix A from a list of Y vectors .
1,342
def calc_A_hat ( A , S ) : return np . dot ( S , np . dot ( A , np . transpose ( S ) ) )
Return the A_hat matrix of A given the skew matrix S
1,343
def G ( w , Xs ) : n = len ( Xs ) P = projection_matrix ( w ) Ys = [ np . dot ( P , X ) for X in Xs ] A = calc_A ( Ys ) A_hat = calc_A_hat ( A , skew_matrix ( w ) ) u = sum ( np . dot ( Y , Y ) for Y in Ys ) / n v = np . dot ( A_hat , sum ( np . dot ( Y , Y ) * Y for Y in Ys ) ) / np . trace ( np . dot ( A_hat , A ) ) return sum ( ( np . dot ( Y , Y ) - u - 2 * np . dot ( Y , v ) ) ** 2 for Y in Ys )
Calculate the G function given a cylinder direction w and a list of data points Xs to be fitted .
1,344
def C ( w , Xs ) : n = len ( Xs ) P = projection_matrix ( w ) Ys = [ np . dot ( P , X ) for X in Xs ] A = calc_A ( Ys ) A_hat = calc_A_hat ( A , skew_matrix ( w ) ) return np . dot ( A_hat , sum ( np . dot ( Y , Y ) * Y for Y in Ys ) ) / np . trace ( np . dot ( A_hat , A ) )
Calculate the cylinder center given the cylinder direction and a list of data points .
1,345
def r ( w , Xs ) : n = len ( Xs ) P = projection_matrix ( w ) c = C ( w , Xs ) return np . sqrt ( sum ( np . dot ( c - X , np . dot ( P , c - X ) ) for X in Xs ) / n )
Calculate the radius given the cylinder direction and a list of data points .
1,346
def get ( self , request , key ) : try : email_val = EmailAddressValidation . objects . get ( validation_key = key ) except EmailAddressValidation . DoesNotExist : messages . error ( request , _ ( 'The email address you are trying to ' 'verify either has already been verified' ' or does not exist.' ) ) return redirect ( '/' ) try : email = EmailAddress . objects . get ( address = email_val . address ) except EmailAddress . DoesNotExist : email = EmailAddress ( address = email_val . address ) if email . user and email . user . is_active : messages . error ( request , _ ( 'The email address you are trying to ' 'verify is already an active email ' 'address.' ) ) email_val . delete ( ) return redirect ( '/' ) email . user = email_val . user email . save ( ) email_val . delete ( ) user = User . objects . get ( username = email . user . username ) user . is_active = True user . save ( ) messages . success ( request , _ ( 'Email address verified!' ) ) return redirect ( 'user_profile' , username = email_val . user . username )
Validate an email with the given key
1,347
def delete ( self , request , key ) : request . DELETE = http . QueryDict ( request . body ) email_addr = request . DELETE . get ( 'email' ) user_id = request . DELETE . get ( 'user' ) if not email_addr : return http . HttpResponseBadRequest ( ) try : email = EmailAddressValidation . objects . get ( address = email_addr , user_id = user_id ) except EmailAddressValidation . DoesNotExist : pass else : email . delete ( ) return http . HttpResponse ( status = 204 ) try : email = EmailAddress . objects . get ( address = email_addr , user_id = user_id ) except EmailAddress . DoesNotExist : raise http . Http404 email . user = None email . save ( ) return http . HttpResponse ( status = 204 )
Remove an email address validated or not .
1,348
def update ( self , request , key ) : request . UPDATE = http . QueryDict ( request . body ) email_addr = request . UPDATE . get ( 'email' ) user_id = request . UPDATE . get ( 'user' ) if not email_addr : return http . HttpResponseBadRequest ( ) try : email = EmailAddress . objects . get ( address = email_addr , user_id = user_id ) except EmailAddress . DoesNotExist : raise http . Http404 email . user . email = email_addr email . user . save ( ) return http . HttpResponse ( status = 204 )
Set an email address as primary address .
1,349
def get_env_setting ( setting ) : try : return os . environ [ setting ] except KeyError : error_msg = "Set the %s env variable" % setting raise ImproperlyConfigured ( error_msg )
Get the environment setting or return exception
1,350
def validate_social_account ( account , url ) : request = urllib2 . Request ( urlparse . urljoin ( url , account ) ) request . get_method = lambda : 'HEAD' try : response = urllib2 . urlopen ( request ) except urllib2 . HTTPError : return False return response . code == 200
Verifies if a social account is valid .
1,351
def fitting_rmsd ( w_fit , C_fit , r_fit , Xs ) : return np . sqrt ( sum ( ( geometry . point_line_distance ( p , C_fit , w_fit ) - r_fit ) ** 2 for p in Xs ) / len ( Xs ) )
Calculate the RMSD of fitting .
1,352
def basic_parse ( response , buf_size = ijson . backend . BUFSIZE ) : lexer = iter ( IncrementalJsonParser . lexer ( response , buf_size ) ) for value in ijson . backend . parse_value ( lexer ) : yield value try : next ( lexer ) except StopIteration : pass else : raise ijson . common . JSONError ( 'Additional data' )
Iterator yielding unprefixed events .
1,353
def drop_connection ( self , name , database = None ) : request_executor = self . _store . get_request_executor ( database ) command = DropSubscriptionConnectionCommand ( name ) request_executor . execute ( command )
Force server to close current client subscription connection to the server
1,354
def execute_from_command_line ( argv = None ) : os . environ . setdefault ( "DJANGO_SETTINGS_MODULE" , "colab.settings" ) from django . conf import settings if not hasattr ( settings , 'SECRET_KEY' ) and 'initconfig' in sys . argv : command = initconfig . Command ( ) command . handle ( ) else : utility = ManagementUtility ( argv ) utility . execute ( )
A simple method that runs a ManagementUtility .
1,355
def normalize ( v ) : if 0 == np . linalg . norm ( v ) : return v return v / np . linalg . norm ( v )
Normalize a vector based on its 2 norm .
1,356
def rotation_matrix_from_axis_and_angle ( u , theta ) : x = u [ 0 ] y = u [ 1 ] z = u [ 2 ] s = np . sin ( theta ) c = np . cos ( theta ) return np . array ( [ [ c + x ** 2 * ( 1 - c ) , x * y * ( 1 - c ) - z * s , x * z * ( 1 - c ) + y * s ] , [ y * x * ( 1 - c ) + z * s , c + y ** 2 * ( 1 - c ) , y * z * ( 1 - c ) - x * s ] , [ z * x * ( 1 - c ) - y * s , z * y * ( 1 - c ) + x * s , c + z ** 2 * ( 1 - c ) ] ] )
Calculate a rotation matrix from an axis and an angle .
1,357
def point_line_distance ( p , l_p , l_v ) : l_v = normalize ( l_v ) u = p - l_p return np . linalg . norm ( u - np . dot ( u , l_v ) * l_v )
Calculate the distance between a point and a line defined by a point and a direction vector .
1,358
def raw_query ( self , query , query_parameters = None ) : self . assert_no_raw_query ( ) if len ( self . _where_tokens ) != 0 or len ( self . _select_tokens ) != 0 or len ( self . _order_by_tokens ) != 0 or len ( self . _group_by_tokens ) != 0 : raise InvalidOperationException ( "You can only use raw_query on a new query, without applying any operations " "(such as where, select, order_by, group_by, etc)" ) if query_parameters : self . query_parameters = query_parameters self . _query = query return self
To get all the document that equal to the query
1,359
def where_equals ( self , field_name , value , exact = False ) : if field_name is None : raise ValueError ( "None field_name is invalid" ) field_name = Query . escape_if_needed ( field_name ) self . _add_operator_if_needed ( ) token = "equals" if self . negate : self . negate = False token = "not_equals" self . last_equality = { field_name : value } token = _Token ( field_name = field_name , value = self . add_query_parameter ( value ) , token = token , exact = exact ) token . write = self . rql_where_write ( token ) self . _where_tokens . append ( token ) return self
To get all the document that equal to the value in the given field_name
1,360
def where ( self , exact = False , ** kwargs ) : for field_name in kwargs : if isinstance ( kwargs [ field_name ] , list ) : self . where_in ( field_name , kwargs [ field_name ] , exact ) else : self . where_equals ( field_name , kwargs [ field_name ] , exact ) return self
To get all the document that equal to the value within kwargs with the specific key
1,361
def search ( self , field_name , search_terms , operator = QueryOperator . OR ) : if field_name is None : raise ValueError ( "None field_name is invalid" ) field_name = Query . escape_if_needed ( field_name ) self . _add_operator_if_needed ( ) self . negate_if_needed ( field_name ) self . last_equality = { field_name : "(" + search_terms + ")" if ' ' in search_terms else search_terms } token = _Token ( field_name = field_name , token = "search" , value = self . add_query_parameter ( search_terms ) , search_operator = operator ) token . write = self . rql_where_write ( token ) self . _where_tokens . append ( token ) return self
For more complex text searching
1,362
def where_ends_with ( self , field_name , value ) : if field_name is None : raise ValueError ( "None field_name is invalid" ) field_name = Query . escape_if_needed ( field_name ) self . _add_operator_if_needed ( ) self . negate_if_needed ( field_name ) self . last_equality = { field_name : value } token = _Token ( field_name = field_name , token = "endsWith" , value = self . add_query_parameter ( value ) ) token . write = self . rql_where_write ( token ) self . _where_tokens . append ( token ) return self
To get all the document that ends with the value in the giving field_name
1,363
def where_in ( self , field_name , values , exact = False ) : field_name = Query . escape_if_needed ( field_name ) self . _add_operator_if_needed ( ) self . negate_if_needed ( field_name ) token = _Token ( field_name = field_name , value = self . add_query_parameter ( list ( Utils . unpack_iterable ( values ) ) ) , token = "in" , exact = exact ) token . write = self . rql_where_write ( token ) self . _where_tokens . append ( token ) return self
Check that the field has one of the specified values
1,364
def to_facets ( self , facets , start = 0 , page_size = None ) : if len ( facets ) == 0 : raise ValueError ( "Facets must contain at least one entry" , "facets" ) str_query = self . __str__ ( ) facet_query = FacetQuery ( str_query , None , facets , start , page_size , query_parameters = self . query_parameters , wait_for_non_stale_results = self . wait_for_non_stale_results , wait_for_non_stale_results_timeout = self . timeout , cutoff_etag = self . cutoff_etag ) command = GetFacetsCommand ( query = facet_query ) return self . session . requests_executor . execute ( command )
Query the facets results for this query using the specified list of facets with the given start and pageSize
1,365
def show_G_distribution ( data ) : Xs , t = fitting . preprocess_data ( data ) Theta , Phi = np . meshgrid ( np . linspace ( 0 , np . pi , 50 ) , np . linspace ( 0 , 2 * np . pi , 50 ) ) G = [ ] for i in range ( len ( Theta ) ) : G . append ( [ ] ) for j in range ( len ( Theta [ i ] ) ) : w = fitting . direction ( Theta [ i ] [ j ] , Phi [ i ] [ j ] ) G [ - 1 ] . append ( fitting . G ( w , Xs ) ) plt . imshow ( G , extent = [ 0 , np . pi , 0 , 2 * np . pi ] , origin = 'lower' ) plt . show ( )
Show the distribution of the G function .
1,366
def show_fit ( w_fit , C_fit , r_fit , Xs ) : fig = plt . figure ( ) ax = fig . gca ( projection = '3d' ) ax . scatter ( [ X [ 0 ] for X in Xs ] , [ X [ 1 ] for X in Xs ] , [ X [ 2 ] for X in Xs ] ) theta = np . arccos ( np . dot ( w_fit , np . array ( [ 0 , 0 , 1 ] ) ) ) phi = np . arctan2 ( w_fit [ 1 ] , w_fit [ 0 ] ) M = np . dot ( rotation_matrix_from_axis_and_angle ( np . array ( [ 0 , 0 , 1 ] ) , phi ) , rotation_matrix_from_axis_and_angle ( np . array ( [ 0 , 1 , 0 ] ) , theta ) ) delta = np . linspace ( - np . pi , np . pi , 20 ) z = np . linspace ( - 10 , 10 , 20 ) Delta , Z = np . meshgrid ( delta , z ) X = r_fit * np . cos ( Delta ) Y = r_fit * np . sin ( Delta ) for i in range ( len ( X ) ) : for j in range ( len ( X [ i ] ) ) : p = np . dot ( M , np . array ( [ X [ i ] [ j ] , Y [ i ] [ j ] , Z [ i ] [ j ] ] ) ) + C_fit X [ i ] [ j ] = p [ 0 ] Y [ i ] [ j ] = p [ 1 ] Z [ i ] [ j ] = p [ 2 ] ax . plot_surface ( X , Y , Z , alpha = 0.2 ) ax . quiver ( C_fit [ 0 ] , C_fit [ 1 ] , C_fit [ 2 ] , r_fit * w_fit [ 0 ] , r_fit * w_fit [ 1 ] , r_fit * w_fit [ 2 ] , color = 'red' ) plt . show ( )
Plot the fitting given the fitted axis direction the fitted center the fitted radius and the data points .
1,367
def find_window ( self , highlight_locations ) : if len ( self . text_block ) <= self . max_length : return ( 0 , self . max_length ) num_chars_before = getattr ( settings , 'HIGHLIGHT_NUM_CHARS_BEFORE_MATCH' , 0 ) best_start , best_end = super ( ColabHighlighter , self ) . find_window ( highlight_locations ) if best_start <= num_chars_before : best_end -= best_start best_start = 0 else : best_start -= num_chars_before best_end -= num_chars_before return ( best_start , best_end )
Getting the HIGHLIGHT_NUM_CHARS_BEFORE_MATCH setting to find how many characters before the first word found should be removed from the window
1,368
def login ( self ) : response = self . session . get ( self . base_url + '/login_sid.lua' , timeout = 10 ) xml = ET . fromstring ( response . text ) if xml . find ( 'SID' ) . text == "0000000000000000" : challenge = xml . find ( 'Challenge' ) . text url = self . base_url + "/login_sid.lua" response = self . session . get ( url , params = { "username" : self . username , "response" : self . calculate_response ( challenge , self . password ) , } , timeout = 10 ) xml = ET . fromstring ( response . text ) sid = xml . find ( 'SID' ) . text if xml . find ( 'SID' ) . text == "0000000000000000" : blocktime = int ( xml . find ( 'BlockTime' ) . text ) exc = Exception ( "Login failed, please wait {} seconds" . format ( blocktime ) ) exc . blocktime = blocktime raise exc self . sid = sid return sid
Try to login and set the internal session id .
1,369
def calculate_response ( self , challenge , password ) : to_hash = ( challenge + "-" + password ) . encode ( "UTF-16LE" ) hashed = hashlib . md5 ( to_hash ) . hexdigest ( ) return "{0}-{1}" . format ( challenge , hashed )
Calculate response for the challenge - response authentication
1,370
def get_actors ( self ) : devices = self . homeautoswitch ( "getdevicelistinfos" ) xml = ET . fromstring ( devices ) actors = [ ] for device in xml . findall ( 'device' ) : actors . append ( Actor ( fritzbox = self , device = device ) ) return actors
Returns a list of Actor objects for querying SmartHome devices .
1,371
def get_actor_by_ain ( self , ain ) : for actor in self . get_actors ( ) : if actor . actor_id == ain : return actor
Return a actor identified by it s ain or return None
1,372
def homeautoswitch ( self , cmd , ain = None , param = None ) : assert self . sid , "Not logged in" params = { 'switchcmd' : cmd , 'sid' : self . sid , } if param is not None : params [ 'param' ] = param if ain : params [ 'ain' ] = ain url = self . base_url + '/webservices/homeautoswitch.lua' response = self . session . get ( url , params = params , timeout = 10 ) response . raise_for_status ( ) return response . text . strip ( ) . encode ( 'utf-8' )
Call a switch method . Should only be used by internal library functions .
1,373
def get_switch_actors ( self ) : actors = { } for ain in self . homeautoswitch ( "getswitchlist" ) . split ( ',' ) : actors [ ain ] = { 'name' : self . homeautoswitch ( "getswitchname" , ain ) , 'state' : bool ( self . homeautoswitch ( "getswitchstate" , ain ) ) , 'present' : bool ( self . homeautoswitch ( "getswitchpresent" , ain ) ) , 'power' : self . homeautoswitch ( "getswitchpower" , ain ) , 'energy' : self . homeautoswitch ( "getswitchenergy" , ain ) , 'temperature' : self . homeautoswitch ( "getswitchtemperature" , ain ) , } return actors
Get information about all actors
1,374
def get_devices ( self ) : url = self . base_url + '/net/home_auto_query.lua' response = self . session . get ( url , params = { 'sid' : self . sid , 'command' : 'AllOutletStates' , 'xhr' : 0 , } , timeout = 15 ) response . raise_for_status ( ) data = response . json ( ) count = int ( data [ "Outlet_count" ] ) devices = [ ] for i in range ( 1 , count + 1 ) : device = Device ( int ( data [ "DeviceID_{0}" . format ( i ) ] ) , int ( data [ "DeviceConnectState_{0}" . format ( i ) ] ) , int ( data [ "DeviceSwitchState_{0}" . format ( i ) ] ) ) devices . append ( device ) return devices
Return a list of devices . Deprecated use get_actors instead .
1,375
def get_consumption ( self , deviceid , timerange = "10" ) : tranges = ( "10" , "24h" , "month" , "year" ) if timerange not in tranges : raise ValueError ( "Unknown timerange. Possible values are: {0}" . format ( tranges ) ) url = self . base_url + "/net/home_auto_query.lua" response = self . session . get ( url , params = { 'sid' : self . sid , 'command' : 'EnergyStats_{0}' . format ( timerange ) , 'id' : deviceid , 'xhr' : 0 , } , timeout = 15 ) response . raise_for_status ( ) data = response . json ( ) result = { } values_map = { 'MM_Value_Amp' : 'mm_value_amp' , 'MM_Value_Power' : 'mm_value_power' , 'MM_Value_Volt' : 'mm_value_volt' , 'EnStats_average_value' : 'enstats_average_value' , 'EnStats_max_value' : 'enstats_max_value' , 'EnStats_min_value' : 'enstats_min_value' , 'EnStats_timer_type' : 'enstats_timer_type' , 'sum_Day' : 'sum_day' , 'sum_Month' : 'sum_month' , 'sum_Year' : 'sum_year' , } for avm_key , py_key in values_map . items ( ) : result [ py_key ] = int ( data [ avm_key ] ) count = int ( data [ "EnStats_count" ] ) watt_values = [ None for i in range ( count ) ] volt_values = [ None for i in range ( count ) ] for i in range ( 1 , count + 1 ) : watt_values [ i - 1 ] = int ( data [ "EnStats_watt_value_{}" . format ( i ) ] ) volt_values [ i - 1 ] = int ( data [ "EnStats_volt_value_{}" . format ( i ) ] ) result [ 'watt_values' ] = watt_values result [ 'volt_values' ] = volt_values return result
Return all available energy consumption data for the device . You need to divice watt_values by 100 and volt_values by 1000 to get the real values .
1,376
def get_logs ( self ) : assert BeautifulSoup , "Please install bs4 to use this method" url = self . base_url + "/system/syslog.lua" response = self . session . get ( url , params = { 'sid' : self . sid , 'stylemode' : 'print' , } , timeout = 15 ) response . raise_for_status ( ) entries = [ ] tree = BeautifulSoup ( response . text ) rows = tree . find ( 'table' ) . find_all ( 'tr' ) for row in rows : columns = row . find_all ( "td" ) date = columns [ 0 ] . string time = columns [ 1 ] . string message = columns [ 2 ] . find ( "a" ) . string merged = "{} {} {}" . format ( date , time , message . encode ( "UTF-8" ) ) msg_hash = hashlib . md5 ( merged ) . hexdigest ( ) entries . append ( LogEntry ( date , time , message , msg_hash ) ) return entries
Return the system logs since the last reboot .
1,377
def seen_nonce ( id , nonce , timestamp ) : key = '{id}:{n}:{ts}' . format ( id = id , n = nonce , ts = timestamp ) if cache . get ( key ) : log . warning ( 'replay attack? already processed nonce {k}' . format ( k = key ) ) return True else : log . debug ( 'caching nonce {k}' . format ( k = key ) ) cache . set ( key , True , timeout = getattr ( settings , 'HAWK_MESSAGE_EXPIRATION' , default_message_expiration ) + 5 ) return False
Returns True if the Hawk nonce has been seen already .
1,378
def cli ( context , host , username , password ) : context . obj = FritzBox ( host , username , password )
FritzBox SmartHome Tool
1,379
def actors ( context ) : fritz = context . obj fritz . login ( ) for actor in fritz . get_actors ( ) : click . echo ( "{} ({} {}; AIN {} )" . format ( actor . name , actor . manufacturer , actor . productname , actor . actor_id , ) ) if actor . has_temperature : click . echo ( "Temp: act {} target {}; battery (low): {}" . format ( actor . temperature , actor . target_temperature , actor . battery_low , ) ) click . echo ( "Temp (via get): act {} target {}" . format ( actor . get_temperature ( ) , actor . get_target_temperature ( ) , ) )
Display a list of actors
1,380
def switch_on ( context , ain ) : context . obj . login ( ) actor = context . obj . get_actor_by_ain ( ain ) if actor : click . echo ( "Switching {} on" . format ( actor . name ) ) actor . switch_on ( ) else : click . echo ( "Actor not found: {}" . format ( ain ) )
Switch an actor s power to ON
1,381
def switch_state ( context , ain ) : context . obj . login ( ) actor = context . obj . get_actor_by_ain ( ain ) if actor : click . echo ( "State for {} is: {}" . format ( ain , 'ON' if actor . get_state ( ) else 'OFF' ) ) else : click . echo ( "Actor not found: {}" . format ( ain ) )
Get an actor s power state
1,382
def switch_toggle ( context , ain ) : context . obj . login ( ) actor = context . obj . get_actor_by_ain ( ain ) if actor : if actor . get_state ( ) : actor . switch_off ( ) click . echo ( "State for {} is now OFF" . format ( ain ) ) else : actor . switch_on ( ) click . echo ( "State for {} is now ON" . format ( ain ) ) else : click . echo ( "Actor not found: {}" . format ( ain ) )
Toggle an actor s power state
1,383
def logs ( context , format ) : fritz = context . obj fritz . login ( ) messages = fritz . get_logs ( ) if format == "plain" : for msg in messages : merged = "{} {} {}" . format ( msg . date , msg . time , msg . message . encode ( "UTF-8" ) ) click . echo ( merged ) if format == "json" : entries = [ msg . _asdict ( ) for msg in messages ] click . echo ( json . dumps ( { "entries" : entries , } ) )
Show system logs since last reboot
1,384
def set_temperature ( self , temp ) : param = 16 + ( ( temp - 8 ) * 2 ) if param < 16 : param = 253 logger . info ( "Actor " + self . name + ": Temperature control set to off" ) elif param >= 56 : param = 254 logger . info ( "Actor " + self . name + ": Temperature control set to on" ) else : logger . info ( "Actor " + self . name + ": Temperature control set to " + str ( temp ) ) return self . box . homeautoswitch ( "sethkrtsoll" , self . actor_id , param )
Sets the temperature in celcius
1,385
def get_builder_openshift_url ( self ) : key = "builder_openshift_url" url = self . _get_deprecated ( key , self . conf_section , key ) if url is None : logger . warning ( "%r not found, falling back to get_openshift_base_uri()" , key ) url = self . get_openshift_base_uri ( ) return url
url of OpenShift where builder will connect
1,386
def load ( self ) : self . _validate ( ) self . _logger . logging_load ( ) self . encoding = get_file_encoding ( self . source , self . encoding ) if six . PY3 : self . _csv_reader = csv . reader ( io . open ( self . source , "r" , encoding = self . encoding ) , delimiter = self . delimiter , quotechar = self . quotechar , strict = True , skipinitialspace = True , ) else : self . _csv_reader = csv . reader ( _utf_8_encoder ( io . open ( self . source , "r" , encoding = self . encoding ) ) , delimiter = self . delimiter , quotechar = self . quotechar , strict = True , skipinitialspace = True , ) formatter = CsvTableFormatter ( self . _to_data_matrix ( ) ) formatter . accept ( self ) return formatter . to_table_data ( )
Extract tabular data as |TableData| instances from a CSV file . |load_source_desc_file|
1,387
def load ( self ) : self . _validate ( ) self . _logger . logging_load ( ) self . _csv_reader = csv . reader ( six . StringIO ( self . source . strip ( ) ) , delimiter = self . delimiter , quotechar = self . quotechar , strict = True , skipinitialspace = True , ) formatter = CsvTableFormatter ( self . _to_data_matrix ( ) ) formatter . accept ( self ) return formatter . to_table_data ( )
Extract tabular data as |TableData| instances from a CSV text object . |load_source_desc_text|
1,388
def set_params ( self , ** kwargs ) : self . scratch = kwargs . pop ( 'scratch' , False ) self . is_auto = kwargs . pop ( 'is_auto' , False ) self . isolated = kwargs . pop ( 'isolated' , False ) self . validate_build_variation ( ) self . base_image = kwargs . get ( 'base_image' ) self . platform_node_selector = kwargs . get ( 'platform_node_selector' , { } ) self . platform_descriptors = kwargs . get ( 'platform_descriptors' , { } ) self . scratch_build_node_selector = kwargs . get ( 'scratch_build_node_selector' , { } ) self . explicit_build_node_selector = kwargs . get ( 'explicit_build_node_selector' , { } ) self . auto_build_node_selector = kwargs . get ( 'auto_build_node_selector' , { } ) self . isolated_build_node_selector = kwargs . get ( 'isolated_build_node_selector' , { } ) logger . debug ( "setting params '%s' for %s" , kwargs , self . spec ) self . spec . set_params ( ** kwargs ) self . osbs_api = kwargs . pop ( 'osbs_api' )
set parameters according to specification
1,389
def has_ist_trigger ( self ) : triggers = self . template [ 'spec' ] . get ( 'triggers' , [ ] ) if not triggers : return False for trigger in triggers : if trigger [ 'type' ] == 'ImageChange' and trigger [ 'imageChange' ] [ 'from' ] [ 'kind' ] == 'ImageStreamTag' : return True return False
Return True if this BuildConfig has ImageStreamTag trigger .
1,390
def set_secret_for_plugin ( self , secret , plugin = None , mount_path = None ) : has_plugin_conf = False if plugin is not None : has_plugin_conf = self . dj . dock_json_has_plugin_conf ( plugin [ 0 ] , plugin [ 1 ] ) if 'secrets' in self . template [ 'spec' ] [ 'strategy' ] [ 'customStrategy' ] : if not plugin or has_plugin_conf : custom = self . template [ 'spec' ] [ 'strategy' ] [ 'customStrategy' ] if mount_path : secret_path = mount_path else : secret_path = os . path . join ( SECRETS_PATH , secret ) logger . info ( "Configuring %s secret at %s" , secret , secret_path ) existing = [ secret_mount for secret_mount in custom [ 'secrets' ] if secret_mount [ 'secretSource' ] [ 'name' ] == secret ] if existing : logger . debug ( "secret %s already set" , secret ) else : custom [ 'secrets' ] . append ( { 'secretSource' : { 'name' : secret , } , 'mountPath' : secret_path , } ) if plugin and plugin [ 2 ] is not None : self . dj . dock_json_set_arg ( * ( plugin + ( secret_path , ) ) ) else : logger . debug ( "not setting secret for unused plugin %s" , plugin [ 1 ] )
Sets secret for plugin if no plugin specified it will also set general secret
1,391
def adjust_for_triggers ( self ) : triggers = self . template [ 'spec' ] . get ( 'triggers' , [ ] ) remove_plugins = [ ( "prebuild_plugins" , "check_and_set_rebuild" ) , ( "prebuild_plugins" , "stop_autorebuild_if_disabled" ) , ] should_remove = False if triggers and ( self . is_custom_base_image ( ) or self . is_from_scratch_image ( ) ) : if self . is_custom_base_image ( ) : msg = "removing %s from request because custom base image" elif self . is_from_scratch_image ( ) : msg = 'removing %s from request because FROM scratch image' del self . template [ 'spec' ] [ 'triggers' ] should_remove = True elif not triggers : msg = "removing %s from request because there are no triggers" should_remove = True if should_remove : for when , which in remove_plugins : logger . info ( msg , which ) self . dj . remove_plugin ( when , which )
Remove trigger - related plugins when needed
1,392
def adjust_for_custom_base_image ( self ) : plugins = [ ] if self . is_custom_base_image ( ) : plugins . append ( ( "prebuild_plugins" , "pull_base_image" ) ) plugins . append ( ( "prebuild_plugins" , "koji_parent" ) ) plugins . append ( ( "prebuild_plugins" , "inject_parent_image" ) ) msg = "removing %s from custom image build request" else : plugins . append ( ( "prebuild_plugins" , "add_filesystem" ) ) msg = "removing %s from non custom image build request" for when , which in plugins : logger . info ( msg , which ) self . dj . remove_plugin ( when , which )
Disable plugins to handle builds depending on whether or not this is a build from a custom base image .
1,393
def render_koji ( self ) : phase = 'prebuild_plugins' plugin = 'koji' if not self . dj . dock_json_has_plugin_conf ( phase , plugin ) : return if self . spec . yum_repourls . value : logger . info ( "removing koji from request " "because there is yum repo specified" ) self . dj . remove_plugin ( phase , plugin ) elif not ( self . spec . koji_target . value and self . spec . kojiroot . value and self . spec . kojihub . value ) : logger . info ( "removing koji from request as not specified" ) self . dj . remove_plugin ( phase , plugin ) else : self . dj . dock_json_set_arg ( phase , plugin , "target" , self . spec . koji_target . value ) self . dj . dock_json_set_arg ( phase , plugin , "root" , self . spec . kojiroot . value ) self . dj . dock_json_set_arg ( phase , plugin , "hub" , self . spec . kojihub . value ) if self . spec . proxy . value : self . dj . dock_json_set_arg ( phase , plugin , "proxy" , self . spec . proxy . value )
if there is yum repo specified don t pick stuff from koji
1,394
def render_sendmail ( self ) : phase = 'exit_plugins' plugin = 'sendmail' if not self . dj . dock_json_has_plugin_conf ( phase , plugin ) : return if self . spec . smtp_host . value and self . spec . smtp_from . value : self . dj . dock_json_set_arg ( phase , plugin , 'url' , self . spec . builder_openshift_url . value ) self . dj . dock_json_set_arg ( phase , plugin , 'smtp_host' , self . spec . smtp_host . value ) self . dj . dock_json_set_arg ( phase , plugin , 'from_address' , self . spec . smtp_from . value ) else : logger . info ( "removing sendmail from request, " "requires smtp_host and smtp_from" ) self . dj . remove_plugin ( phase , plugin ) return if self . spec . kojihub . value and self . spec . kojiroot . value : self . dj . dock_json_set_arg ( phase , plugin , 'koji_hub' , self . spec . kojihub . value ) self . dj . dock_json_set_arg ( phase , plugin , "koji_root" , self . spec . kojiroot . value ) if self . spec . smtp_to_submitter . value : self . dj . dock_json_set_arg ( phase , plugin , 'to_koji_submitter' , self . spec . smtp_to_submitter . value ) if self . spec . smtp_to_pkgowner . value : self . dj . dock_json_set_arg ( phase , plugin , 'to_koji_pkgowner' , self . spec . smtp_to_pkgowner . value ) if self . spec . smtp_additional_addresses . value : self . dj . dock_json_set_arg ( phase , plugin , 'additional_addresses' , self . spec . smtp_additional_addresses . value ) if self . spec . smtp_error_addresses . value : self . dj . dock_json_set_arg ( phase , plugin , 'error_addresses' , self . spec . smtp_error_addresses . value ) if self . spec . smtp_email_domain . value : self . dj . dock_json_set_arg ( phase , plugin , 'email_domain' , self . spec . smtp_email_domain . value )
if we have smtp_host and smtp_from configure sendmail plugin else remove it
1,395
def render_fetch_maven_artifacts ( self ) : phase = 'prebuild_plugins' plugin = 'fetch_maven_artifacts' if not self . dj . dock_json_has_plugin_conf ( phase , plugin ) : return koji_hub = self . spec . kojihub . value koji_root = self . spec . kojiroot . value if not koji_hub and not koji_root : logger . info ( 'Removing %s because kojihub and kojiroot were not specified' , plugin ) self . dj . remove_plugin ( phase , plugin ) return self . dj . dock_json_set_arg ( phase , plugin , 'koji_hub' , koji_hub ) self . dj . dock_json_set_arg ( phase , plugin , "koji_root" , koji_root ) if self . spec . artifacts_allowed_domains . value : self . dj . dock_json_set_arg ( phase , plugin , 'allowed_domains' , self . spec . artifacts_allowed_domains . value )
Configure fetch_maven_artifacts plugin
1,396
def render_pulp_pull ( self ) : phases = ( 'postbuild_plugins' , 'exit_plugins' ) plugin = 'pulp_pull' for phase in phases : if not self . dj . dock_json_has_plugin_conf ( phase , plugin ) : continue pulp_registry = self . spec . pulp_registry . value if not pulp_registry : logger . info ( "removing %s from request, requires pulp_registry" , pulp_registry ) self . dj . remove_plugin ( phase , plugin ) continue if not self . spec . kojihub . value : logger . info ( 'Removing %s because no kojihub was specified' , plugin ) self . dj . remove_plugin ( phase , plugin ) continue if self . spec . prefer_schema1_digest . value is not None : self . dj . dock_json_set_arg ( phase , 'pulp_pull' , 'expect_v2schema2' , not self . spec . prefer_schema1_digest . value )
If a pulp registry is specified use pulp_pull plugin
1,397
def render_pulp_sync ( self ) : if not self . dj . dock_json_has_plugin_conf ( 'postbuild_plugins' , 'pulp_sync' ) : return pulp_registry = self . spec . pulp_registry . value docker_registry = None registry_secret = None registries = zip_longest ( self . spec . registry_uris . value , self . spec . registry_secrets . value ) for registry , secret in registries : if registry . version == 'v2' : docker_registry = registry . uri registry_secret = secret logger . info ( "using docker v2 registry %s for pulp_sync" , docker_registry ) break if pulp_registry and docker_registry : self . dj . dock_json_set_arg ( 'postbuild_plugins' , 'pulp_sync' , 'pulp_registry_name' , pulp_registry ) self . dj . dock_json_set_arg ( 'postbuild_plugins' , 'pulp_sync' , 'docker_registry' , docker_registry ) if registry_secret : self . set_secret_for_plugin ( registry_secret , plugin = ( 'postbuild_plugins' , 'pulp_sync' , 'registry_secret_path' ) ) if self . spec . pulp_secret . value is None : raise OsbsValidationException ( "Pulp registry specified " "but no auth config" ) source_registry = self . spec . source_registry_uri . value perform_delete = ( source_registry is None or source_registry . docker_uri != registry . docker_uri ) if perform_delete : push_conf = self . dj . dock_json_get_plugin_conf ( 'exit_plugins' , 'delete_from_registry' ) args = push_conf . setdefault ( 'args' , { } ) delete_registries = args . setdefault ( 'registries' , { } ) placeholder = '{{REGISTRY_URI}}' if placeholder in delete_registries : regdict = delete_registries [ placeholder ] . copy ( ) del delete_registries [ placeholder ] else : regdict = { } if registry_secret : regdict [ 'secret' ] = os . path . join ( SECRETS_PATH , registry_secret ) delete_registries [ docker_registry ] = regdict self . dj . dock_json_set_arg ( 'exit_plugins' , 'delete_from_registry' , 'registries' , delete_registries ) else : logger . info ( "removing delete_from_registry from request, " "source and target registry are identical" ) self . dj . remove_plugin ( "exit_plugins" , "delete_from_registry" ) else : logger . info ( "removing pulp_sync+delete_from_registry from request, " "requires pulp_registry and a v2 registry" ) self . dj . remove_plugin ( "postbuild_plugins" , "pulp_sync" ) self . dj . remove_plugin ( "exit_plugins" , "delete_from_registry" )
If a pulp registry is specified use the pulp plugin as well as the delete_from_registry to delete the image after sync
1,398
def render_pulp_tag ( self ) : if not self . dj . dock_json_has_plugin_conf ( 'postbuild_plugins' , 'pulp_tag' ) : return pulp_registry = self . spec . pulp_registry . value if pulp_registry : self . dj . dock_json_set_arg ( 'postbuild_plugins' , 'pulp_tag' , 'pulp_registry_name' , pulp_registry ) if self . spec . pulp_secret . value is None : conf = self . dj . dock_json_get_plugin_conf ( 'postbuild_plugins' , 'pulp_tag' ) args = conf . get ( 'args' , { } ) if 'username' not in args : raise OsbsValidationException ( "Pulp registry specified " "but no auth config" ) else : logger . info ( "removing pulp_tag from request, " "requires pulp_registry" ) self . dj . remove_plugin ( "postbuild_plugins" , "pulp_tag" )
Configure the pulp_tag plugin .
1,399
def render_group_manifests ( self ) : if not self . dj . dock_json_has_plugin_conf ( 'postbuild_plugins' , 'group_manifests' ) : return push_conf = self . dj . dock_json_get_plugin_conf ( 'postbuild_plugins' , 'group_manifests' ) args = push_conf . setdefault ( 'args' , { } ) registries = args . setdefault ( 'registries' , { } ) placeholder = '{{REGISTRY_URI}}' if placeholder in registries : for registry , secret in zip_longest ( self . spec . registry_uris . value , self . spec . registry_secrets . value ) : if not registry . uri : continue regdict = registries [ placeholder ] . copy ( ) regdict [ 'version' ] = registry . version if secret : regdict [ 'secret' ] = os . path . join ( SECRETS_PATH , secret ) registries [ registry . docker_uri ] = regdict del registries [ placeholder ] self . dj . dock_json_set_arg ( 'postbuild_plugins' , 'group_manifests' , 'group' , self . spec . group_manifests . value ) goarch = { } for platform , architecture in self . platform_descriptors . items ( ) : goarch [ platform ] = architecture [ 'architecture' ] self . dj . dock_json_set_arg ( 'postbuild_plugins' , 'group_manifests' , 'goarch' , goarch )
Configure the group_manifests plugin . Group is always set to false for now .