idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
---|---|---|
1,500 |
def pause ( self , unique_id , configs = None ) : pids = self . get_pid ( unique_id , configs ) if pids != constants . PROCESS_NOT_RUNNING_PID : pid_str = ' ' . join ( str ( pid ) for pid in pids ) hostname = self . processes [ unique_id ] . hostname with get_ssh_client ( hostname , username = runtime . get_username ( ) , password = runtime . get_password ( ) ) as ssh : better_exec_command ( ssh , "kill -SIGSTOP {0}" . format ( pid_str ) , "PAUSING PROCESS {0}" . format ( unique_id ) )
|
Issues a sigstop for the specified process
|
1,501 |
def _send_signal ( self , unique_id , signalno , configs ) : pids = self . get_pid ( unique_id , configs ) if pids != constants . PROCESS_NOT_RUNNING_PID : pid_str = ' ' . join ( str ( pid ) for pid in pids ) hostname = self . processes [ unique_id ] . hostname msg = Deployer . _signalnames . get ( signalno , "SENDING SIGNAL %s TO" % signalno ) with get_ssh_client ( hostname , username = runtime . get_username ( ) , password = runtime . get_password ( ) ) as ssh : better_exec_command ( ssh , "kill -{0} {1}" . format ( signalno , pid_str ) , "{0} PROCESS {1}" . format ( msg , unique_id ) )
|
Issues a signal for the specified process
|
1,502 |
def resume ( self , unique_id , configs = None ) : self . _send_signal ( unique_id , signal . SIGCONT , configs )
|
Issues a sigcont for the specified process
|
1,503 |
def terminate ( self , unique_id , configs = None ) : self . _send_signal ( unique_id , signal . SIGTERM , configs )
|
Issues a kill - 15 to the specified process
|
1,504 |
def hangup ( self , unique_id , configs = None ) : self . _send_signal ( unique_id , signal . SIGHUP , configs )
|
Issue a signal to hangup the specified process
|
1,505 |
def get_logs ( self , unique_id , logs , directory , pattern = constants . FILTER_NAME_ALLOW_NONE ) : self . fetch_logs ( unique_id , logs , directory , pattern )
|
deprecated name for fetch_logs
|
1,506 |
def fetch_logs ( self , unique_id , logs , directory , pattern = constants . FILTER_NAME_ALLOW_NONE ) : hostname = self . processes [ unique_id ] . hostname install_path = self . processes [ unique_id ] . install_path self . fetch_logs_from_host ( hostname , install_path , unique_id , logs , directory , pattern )
|
Copies logs from the remote host that the process is running on to the provided directory
|
1,507 |
def fetch_logs_from_host ( hostname , install_path , prefix , logs , directory , pattern ) : if hostname is not None : with get_sftp_client ( hostname , username = runtime . get_username ( ) , password = runtime . get_password ( ) ) as ftp : for f in logs : try : mode = ftp . stat ( f ) . st_mode except IOError , e : if e . errno == errno . ENOENT : logger . error ( "Log file " + f + " does not exist on " + hostname ) pass else : copy_dir ( ftp , f , directory , prefix ) if install_path is not None : copy_dir ( ftp , install_path , directory , prefix , pattern )
|
Static method Copies logs from specified host on the specified install path
|
1,508 |
def get_pid ( self , unique_id , configs = None ) : RECV_BLOCK_SIZE = 16 if configs is None : configs = { } tmp = self . default_configs . copy ( ) tmp . update ( configs ) configs = tmp if unique_id in self . processes : hostname = self . processes [ unique_id ] . hostname else : return constants . PROCESS_NOT_RUNNING_PID if self . processes [ unique_id ] . start_command is None : return constants . PROCESS_NOT_RUNNING_PID if self . processes [ unique_id ] . pid_file is not None : with open_remote_file ( hostname , self . processes [ unique_id ] . pid_file , username = runtime . get_username ( ) , password = runtime . get_password ( ) ) as pid_file : full_output = pid_file . read ( ) elif 'pid_file' in configs . keys ( ) : with open_remote_file ( hostname , configs [ 'pid_file' ] , username = runtime . get_username ( ) , password = runtime . get_password ( ) ) as pid_file : full_output = pid_file . read ( ) else : pid_keyword = self . processes [ unique_id ] . start_command if self . processes [ unique_id ] . args is not None : pid_keyword = "{0} {1}" . format ( pid_keyword , ' ' . join ( self . processes [ unique_id ] . args ) ) pid_keyword = configs . get ( 'pid_keyword' , pid_keyword ) pid_command = "ps aux | grep '{0}' | grep -v grep | tr -s ' ' | cut -d ' ' -f 2 | grep -Eo '[0-9]+'" . format ( pid_keyword ) pid_command = configs . get ( 'pid_command' , pid_command ) non_failing_command = "{0}; if [ $? -le 1 ]; then true; else false; fi;" . format ( pid_command ) env = configs . get ( "env" , { } ) with get_ssh_client ( hostname , username = runtime . get_username ( ) , password = runtime . get_password ( ) ) as ssh : chan = exec_with_env ( ssh , non_failing_command , msg = "Failed to get PID" , env = env ) output = chan . recv ( RECV_BLOCK_SIZE ) full_output = output while len ( output ) > 0 : output = chan . recv ( RECV_BLOCK_SIZE ) full_output += output if len ( full_output ) > 0 : pids = [ int ( pid_str ) for pid_str in full_output . split ( '\n' ) if pid_str . isdigit ( ) ] if len ( pids ) > 0 : return pids return constants . PROCESS_NOT_RUNNING_PID
|
Gets the pid of the process with unique_id . If the deployer does not know of a process with unique_id then it should return a value of constants . PROCESS_NOT_RUNNING_PID
|
1,509 |
def get_host ( self , unique_id ) : if unique_id in self . processes : return self . processes [ unique_id ] . hostname logger . error ( "{0} not a known process" . format ( unique_id ) ) raise NameError ( "{0} not a known process" . format ( unique_id ) )
|
Gets the host of the process with unique_id . If the deployer does not know of a process with unique_id then it should return a value of SOME_SENTINAL_VALUE
|
1,510 |
def kill_all_process ( self ) : if ( runtime . get_active_config ( "cleanup_pending_process" , False ) ) : for process in self . get_processes ( ) : self . terminate ( process . unique_id )
|
Terminates all the running processes . By default it is set to false . Users can set to true in config once the method to get_pid is done deterministically either using pid_file or an accurate keyword
|
1,511 |
def string_to_level ( log_level ) : if ( log_level . strip ( ) . upper ( ) == "DEBUG" ) : return logging . DEBUG if ( log_level . strip ( ) . upper ( ) == "INFO" ) : return logging . INFO if ( log_level . strip ( ) . upper ( ) == "WARNING" ) : return logging . WARNING if ( log_level . strip ( ) . upper ( ) == "ERROR" ) : return logging . ERROR
|
Converts a string to the corresponding log level
|
1,512 |
def execute ( self , conn , dataset , dataset_access_type , transaction = False ) : if not conn : dbsExceptionHandler ( "dbsException-failed-connect2host" , "Oracle/Dataset/UpdateType. Expects db connection from upper layer." , self . logger . exception ) binds = { "dataset" : dataset , "dataset_access_type" : dataset_access_type , "myuser" : dbsUtils ( ) . getCreateBy ( ) , "mydate" : dbsUtils ( ) . getTime ( ) } result = self . dbi . processData ( self . sql , binds , conn , transaction )
|
for a given file
|
1,513 |
def validateStringInput ( input_key , input_data , read = False ) : log = clog . error_log func = None if '*' in input_data or '%' in input_data : func = validationFunctionWildcard . get ( input_key ) if func is None : func = searchstr elif input_key == 'migration_input' : if input_data . find ( '#' ) != - 1 : func = block else : func = dataset else : if not read : func = validationFunction . get ( input_key ) if func is None : func = namestr else : if input_key == 'dataset' : func = reading_dataset_check elif input_key == 'block_name' : func = reading_block_check elif input_key == 'logical_file_name' : func = reading_lfn_check else : func = namestr try : func ( input_data ) except AssertionError as ae : serverLog = str ( ae ) + " key-value pair (%s, %s) cannot pass input checking" % ( input_key , input_data ) dbsExceptionHandler ( "dbsException-invalid-input2" , message = "Invalid Input Data %s...: Not Match Required Format" % input_data [ : 10 ] , logger = log . error , serverError = serverLog ) return input_data
|
To check if a string has the required format . This is only used for POST APIs .
|
1,514 |
def jsonstreamer ( func ) : def wrapper ( self , * args , ** kwds ) : gen = func ( self , * args , ** kwds ) yield "[" firstItem = True for item in gen : if not firstItem : yield "," else : firstItem = False yield cjson . encode ( item ) yield "]" return wrapper
|
JSON streamer decorator
|
1,515 |
def listDatasetAccessTypes ( self , dataset_access_type = "" ) : if isinstance ( dataset_access_type , basestring ) : try : dataset_access_type = str ( dataset_access_type ) except : dbsExceptionHandler ( 'dbsException-invalid-input' , 'dataset_access_type given is not valid : %s' % dataset_access_type ) else : dbsExceptionHandler ( 'dbsException-invalid-input' , 'dataset_access_type given is not valid : %s' % dataset_access_type ) conn = self . dbi . connection ( ) try : plist = self . datasetAccessType . execute ( conn , dataset_access_type . upper ( ) ) result = [ { } ] if plist : t = [ ] for i in plist : for k , v in i . iteritems ( ) : t . append ( v ) result [ 0 ] [ 'dataset_access_type' ] = t return result finally : if conn : conn . close ( )
|
List dataset access types
|
1,516 |
def block_before ( self ) : if request . path . startswith ( url_for ( 'static' , filename = '' ) ) : return ignored_extensions = ( 'ico' , 'png' , 'txt' , 'xml' ) if request . path . rsplit ( '.' , 1 ) [ - 1 ] in ignored_extensions : return ips = request . headers . getlist ( 'X-Forwarded-For' ) if not ips : return ip = ips [ 0 ] . strip ( ) if ip [ - 1 ] == ',' : ip = ip [ : - 1 ] ip = ip . rsplit ( ',' , 1 ) [ - 1 ] . strip ( ) if self . matches_ip ( ip ) : if self . logger is not None : self . logger . info ( "IPBlock: matched {}, {}" . format ( ip , self . block_msg ) ) if self . blocking_enabled : return 'IP Blocked' , 200
|
Check the current request and block it if the IP address it s coming from is blacklisted .
|
1,517 |
def matches_ip ( self , ip ) : if self . cache is not None : matches_ip = self . cache . get ( ip ) if matches_ip is not None : return matches_ip matches_ip = IPNetwork . matches_ip ( ip , read_preference = self . read_preference ) if self . cache is not None : self . cache [ ip ] = matches_ip return matches_ip
|
Return True if the given IP is blacklisted False otherwise .
|
1,518 |
def processDatasetBlocks ( self , url , conn , inputdataset , order_counter ) : ordered_dict = { } srcblks = self . getSrcBlocks ( url , dataset = inputdataset ) if len ( srcblks ) < 0 : e = "DBSMigration: No blocks in the required dataset %s found at source %s." % ( inputdataset , url ) dbsExceptionHandler ( 'dbsException-invalid-input2' , e , self . logger . exception , e ) dstblks = self . blocklist . execute ( conn , dataset = inputdataset ) self . logger . debug ( "******* dstblks for dataset %s ***********" % inputdataset ) self . logger . debug ( dstblks ) blocksInSrcNames = [ y [ 'block_name' ] for y in srcblks ] blocksInDstNames = [ ] for item in dstblks : blocksInDstNames . append ( item [ 'block_name' ] ) ordered_dict [ order_counter ] = [ ] for ablk in blocksInSrcNames : if not ablk in blocksInDstNames : ordered_dict [ order_counter ] . append ( ablk ) if ordered_dict [ order_counter ] != [ ] : self . logger . debug ( "**** ordered_dict dict length ****" ) self . logger . debug ( len ( ordered_dict ) ) return ordered_dict else : return { }
|
Utility function that comapares blocks of a dataset at source and dst and returns an ordered list of blocks not already at dst for migration
|
1,519 |
def removeMigrationRequest ( self , migration_rqst ) : conn = self . dbi . connection ( ) try : tran = conn . begin ( ) self . mgrremove . execute ( conn , migration_rqst ) tran . commit ( ) except dbsException as he : if conn : conn . close ( ) raise except Exception as ex : if conn : conn . close ( ) raise if conn : conn . close ( )
|
Method to remove pending or failed migration request from the queue .
|
1,520 |
def listMigrationBlocks ( self , migration_request_id = "" ) : conn = self . dbi . connection ( ) try : return self . mgrblklist . execute ( conn , migration_request_id = migration_request_id ) finally : if conn : conn . close ( )
|
get eveything of block that is has status = 0 and migration_request_id as specified .
|
1,521 |
def getSrcBlocks ( self , url , dataset = "" , block = "" ) : if block : params = { 'block_name' : block , 'open_for_writing' : 0 } elif dataset : params = { 'dataset' : dataset , 'open_for_writing' : 0 } else : m = 'DBSMigration: Invalid input. Either block or dataset name has to be provided' e = 'DBSMigrate/getSrcBlocks: Invalid input. Either block or dataset name has to be provided' dbsExceptionHandler ( 'dbsException-invalid-input2' , m , self . logger . exception , e ) return cjson . decode ( self . callDBSService ( url , 'blocks' , params , { } ) )
|
Need to list all blocks of the dataset and its parents starting from the top For now just list the blocks from this dataset . Client type call ...
|
1,522 |
def executeSingle ( self , conn , daoinput , tablename , transaction = False ) : sql1 = " insert into %s%s( " % ( self . owner , tablename ) sql2 = " values(" "Now loop over all the input keys. We need to check if all the keys are valid !!!" for key in daoinput : sql1 += "%s," % key . upper ( ) sql2 += ":%s," % key . lower ( ) sql = sql1 . strip ( ',' ) + ') ' + sql2 . strip ( ',' ) + ' )' self . dbi . processData ( sql , daoinput , conn , transaction )
|
build dynamic sql based on daoinput
|
1,523 |
def parse_requirements ( requirements_file ) : if os . path . exists ( requirements_file ) : return open ( requirements_file , 'r' ) . read ( ) . splitlines ( ) else : print ( "ERROR: requirements file " + requirements_file + " not found." ) sys . exit ( 1 )
|
Create a list for the install_requires component of the setup function by parsing a requirements file
|
1,524 |
def execute ( self , conn , dsType = "" , dataset = "" , transaction = False ) : sql = self . sql binds = { } if not dsType and not dataset : pass elif dsType and dataset in ( "" , None , '%' ) : op = ( "=" , "like" ) [ "%" in dsType ] sql += "WHERE PDT.PRIMARY_DS_TYPE %s :primdstype" % op binds = { "primdstype" : dsType } elif dataset and dsType in ( "" , None , '%' ) : op = ( "=" , "like" ) [ "%" in dataset ] sql += "JOIN %sPRIMARY_DATASETS PDS on PDS.PRIMARY_DS_TYPE_ID = PDT.PRIMARY_DS_TYPE_ID \ JOIN %sDATASETS DS ON DS.PRIMARY_DS_ID = PDS.PRIMARY_DS_ID \ WHERE DS.DATASET %s :dataset" % ( self . owner , self . owner , op ) binds = { "dataset" : dataset } elif dataset and dsType : op = ( "=" , "like" ) [ "%" in dsType ] op1 = ( "=" , "like" ) [ "%" in dataset ] sql += "JOIN %sPRIMARY_DATASETS PDS on PDS.PRIMARY_DS_TYPE_ID = PDT.PRIMARY_DS_TYPE_ID \ JOIN %sDATASETS DS ON DS.PRIMARY_DS_ID = PDS.PRIMARY_DS_ID \ WHERE DS.DATASET %s :dataset and PDT.PRIMARY_DS_TYPE %s :primdstype" % ( self . owner , self . owner , op1 , op ) binds = { "primdstype" : dsType , "dataset" : dataset } else : dbsExceptionHandler ( 'dbsException-invalid-input' , "DAO Primary_DS_TYPE List accepts no input, or\ dataset,primary_ds_type as input." , self . logger . exception ) cursors = self . dbi . processData ( sql , binds , conn , transaction , returnCursor = True ) result = [ ] for c in cursors : result . extend ( self . formatCursor ( c , size = 100 ) ) return result
|
Lists all primary dataset types if no user input is provided .
|
1,525 |
def listReleaseVersions ( self , release_version = "" , dataset = '' , logical_file_name = '' ) : if dataset and ( '%' in dataset or '*' in dataset ) : dbsExceptionHandler ( 'dbsException-invalid-input' , " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in dataset.\n." ) if logical_file_name and ( '%' in logical_file_name or '*' in logical_file_name ) : dbsExceptionHandler ( 'dbsException-invalid-input' , " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in logical_file_name.\n." ) conn = self . dbi . connection ( ) try : plist = self . releaseVersion . execute ( conn , release_version . upper ( ) , dataset , logical_file_name ) result = [ { } ] if plist : t = [ ] for i in plist : for k , v in i . iteritems ( ) : t . append ( v ) result [ 0 ] [ 'release_version' ] = t return result finally : if conn : conn . close ( )
|
List release versions
|
1,526 |
def __search_ca_path ( self ) : if "X509_CERT_DIR" in os . environ : self . _ca_path = os . environ [ 'X509_CERT_DIR' ] elif os . path . exists ( '/etc/grid-security/certificates' ) : self . _ca_path = '/etc/grid-security/certificates' else : raise ClientAuthException ( "Could not find a valid CA path" )
|
Get CA Path to check the validity of the server host certificate on the client side
|
1,527 |
def authInsert ( user , role , group , site ) : if not role : return True for k , v in user [ 'roles' ] . iteritems ( ) : for g in v [ 'group' ] : if k in role . get ( g , '' ) . split ( ':' ) : return True return False
|
Authorization function for general insert
|
1,528 |
def listDatasetParents ( self , dataset = "" ) : if ( dataset == "" ) : dbsExceptionHandler ( "dbsException-invalid-input" , "DBSDataset/listDatasetParents. Child Dataset name is required." ) conn = self . dbi . connection ( ) try : result = self . datasetparentlist . execute ( conn , dataset ) return result finally : if conn : conn . close ( )
|
takes required dataset parameter returns only parent dataset name
|
1,529 |
def listDatasetChildren ( self , dataset ) : if ( dataset == "" ) : dbsExceptionHandler ( "dbsException-invalid-input" , "DBSDataset/listDatasetChildren. Parent Dataset name is required." ) conn = self . dbi . connection ( ) try : result = self . datasetchildlist . execute ( conn , dataset ) return result finally : if conn : conn . close ( )
|
takes required dataset parameter returns only children dataset name
|
1,530 |
def listDatasets ( self , dataset = "" , parent_dataset = "" , is_dataset_valid = 1 , release_version = "" , pset_hash = "" , app_name = "" , output_module_label = "" , global_tag = "" , processing_version = 0 , acquisition_era = "" , run_num = - 1 , physics_group_name = "" , logical_file_name = "" , primary_ds_name = "" , primary_ds_type = "" , processed_ds_name = "" , data_tier_name = "" , dataset_access_type = "VALID" , prep_id = "" , create_by = '' , last_modified_by = '' , min_cdate = 0 , max_cdate = 0 , min_ldate = 0 , max_ldate = 0 , cdate = 0 , ldate = 0 , detail = False , dataset_id = - 1 ) : if ( logical_file_name and logical_file_name . find ( "%" ) != - 1 ) : dbsExceptionHandler ( 'dbsException-invalid-input' , 'DBSDataset/listDatasets API requires \ fullly qualified logical_file_name. NO wildcard is allowed in logical_file_name.' ) if ( dataset and dataset . find ( "/%/%/%" ) != - 1 ) : dataset = '' with self . dbi . connection ( ) as conn : dao = ( self . datasetbrieflist , self . datasetlist ) [ detail ] if dataset_access_type : dataset_access_type = dataset_access_type . upper ( ) if data_tier_name : data_tier_name = data_tier_name . upper ( ) for item in dao . execute ( conn , dataset , is_dataset_valid , parent_dataset , release_version , pset_hash , app_name , output_module_label , global_tag , processing_version , acquisition_era , run_num , physics_group_name , logical_file_name , primary_ds_name , primary_ds_type , processed_ds_name , data_tier_name , dataset_access_type , prep_id , create_by , last_modified_by , min_cdate , max_cdate , min_ldate , max_ldate , cdate , ldate , dataset_id ) : yield item
|
lists all datasets if dataset parameter is not given . The parameter can include % character . all other parameters are not wild card ones .
|
1,531 |
def execute ( self , conn , block_name , origin_site_name , transaction = False ) : if not conn : dbsExceptionHandler ( "dbsException-failed-connect2host" , "Oracle/Block/UpdateStatus. \Expects db connection from upper layer." , self . logger . exception ) binds = { "block_name" : block_name , "origin_site_name" : origin_site_name , "mtime" : dbsUtils ( ) . getTime ( ) , "myuser" : dbsUtils ( ) . getCreateBy ( ) } self . dbi . processData ( self . sql , binds , conn , transaction )
|
Update origin_site_name for a given block_name
|
1,532 |
def increment ( self , conn , seqName , transaction = False , incCount = 1 ) : try : seqTable = "%sS" % seqName tlock = "lock tables %s write" % seqTable self . dbi . processData ( tlock , [ ] , conn , transaction ) sql = "select ID from %s" % seqTable result = self . dbi . processData ( sql , [ ] , conn , transaction ) resultlist = self . formatDict ( result ) newSeq = resultlist [ 0 ] [ 'id' ] + incCount sql = "UPDATE %s SET ID=:seq_count" % seqTable seqparms = { "seq_count" : newSeq } self . dbi . processData ( sql , seqparms , conn , transaction ) tunlock = "unlock tables" self . dbi . processData ( tunlock , [ ] , conn , transaction ) return newSeq except : tunlock = "unlock tables" self . dbi . processData ( tunlock , [ ] , conn , transaction ) raise
|
increments the sequence seqName by default Incremented by one and returns its value
|
1,533 |
def listRuns ( self , run_num = - 1 , logical_file_name = "" , block_name = "" , dataset = "" ) : if ( '%' in logical_file_name or '%' in block_name or '%' in dataset ) : dbsExceptionHandler ( 'dbsException-invalid-input' , " DBSDatasetRun/listRuns. No wildcards are allowed in logical_file_name, block_name or dataset.\n." ) conn = self . dbi . connection ( ) tran = False try : ret = self . runlist . execute ( conn , run_num , logical_file_name , block_name , dataset , tran ) result = [ ] rnum = [ ] for i in ret : rnum . append ( i [ 'run_num' ] ) result . append ( { 'run_num' : rnum } ) return result finally : if conn : conn . close ( )
|
List run known to DBS .
|
1,534 |
def insertPrimaryDataset ( self ) : try : body = request . body . read ( ) indata = cjson . decode ( body ) indata = validateJSONInputNoCopy ( "primds" , indata ) indata . update ( { "creation_date" : dbsUtils ( ) . getTime ( ) , "create_by" : dbsUtils ( ) . getCreateBy ( ) } ) self . dbsPrimaryDataset . insertPrimaryDataset ( indata ) except cjson . DecodeError as dc : dbsExceptionHandler ( "dbsException-invalid-input2" , "Wrong format/data from insert PrimaryDataset input" , self . logger . exception , str ( dc ) ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . message ) except HTTPError as he : raise he except Exception as ex : sError = "DBSWriterModel/insertPrimaryDataset. %s\n Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to insert A primary dataset in DBS
|
1,535 |
def insertBlock ( self ) : try : body = request . body . read ( ) indata = cjson . decode ( body ) indata = validateJSONInputNoCopy ( "block" , indata ) self . dbsBlock . insertBlock ( indata ) except cjson . DecodeError as dc : dbsExceptionHandler ( "dbsException-invalid-input2" , "Wrong format/data from insert Block input" , self . logger . exception , str ( dc ) ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . message ) except Exception as ex : sError = "DBSWriterModel/insertBlock. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to insert a block into DBS
|
1,536 |
def updateFile ( self , logical_file_name = [ ] , is_file_valid = 1 , lost = 0 , dataset = '' ) : if lost in [ 1 , True , 'True' , 'true' , '1' , 'y' , 'yes' ] : lost = 1 if is_file_valid in [ 1 , True , 'True' , 'true' , '1' , 'y' , 'yes' ] : dbsExceptionHandler ( "dbsException-invalid-input2" , dbsExceptionCode [ "dbsException-invalid-input2" ] , self . logger . exception , "Lost file must set to invalid" ) else : lost = 0 for f in logical_file_name , dataset : if '*' in f or '%' in f : dbsExceptionHandler ( "dbsException-invalid-input2" , dbsExceptionCode [ "dbsException-invalid-input2" ] , self . logger . exception , "No \ wildcard allow in LFN or dataset for updatefile API." ) try : self . dbsFile . updateStatus ( logical_file_name , is_file_valid , lost , dataset ) except HTTPError as he : raise he except Exception as ex : sError = "DBSWriterModel/updateFile. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to update file status
|
1,537 |
def qs_for_ip ( cls , ip_str ) : ip = int ( netaddr . IPAddress ( ip_str ) ) if ip > 4294967295 : return cls . objects . none ( ) ip_range_query = { 'start__lte' : ip , 'stop__gte' : ip } return cls . objects . filter ( ** ip_range_query )
|
Returns a queryset with matching IPNetwork objects for the given IP .
|
1,538 |
def matches_ip ( cls , ip_str , read_preference = None ) : qs = cls . qs_for_ip ( ip_str ) . only ( 'whitelist' ) if read_preference : qs = qs . read_preference ( read_preference ) return bool ( qs ) and not any ( obj . whitelist for obj in qs )
|
Return True if provided IP exists in the blacklist and doesn t exist in the whitelist . Otherwise return False .
|
1,539 |
def dbsExceptionHandler ( eCode = '' , message = '' , logger = None , serverError = '' ) : if logger : if eCode == "dbsException-invalid-input" : raise HTTPError ( 400 , message ) elif eCode == "dbsException-missing-data" : logger ( time . asctime ( time . gmtime ( ) ) + " " + eCode + ": " + serverError ) raise HTTPError ( 412 , message ) elif eCode == "dbsException-input-too-large" : logger ( time . asctime ( time . gmtime ( ) ) + " " + eCode + ": " + serverError ) raise HTTPError ( 413 , message ) elif eCode == "dbsException-invalid-input2" : logger ( time . asctime ( time . gmtime ( ) ) + " " + eCode + ": " + serverError ) raise HTTPError ( 400 , message ) elif eCode == "dbsException-conflict-data" : logger ( time . asctime ( time . gmtime ( ) ) + " " + eCode + ": " + serverError ) raise HTTPError ( 409 , message ) elif eCode == "dbsException-failed-connect2host" : logger ( time . asctime ( time . gmtime ( ) ) + " " + eCode + ": " + serverError ) raise HTTPError ( 443 , message ) else : logger ( time . asctime ( time . gmtime ( ) ) + " " + eCode + ": " + serverError ) raise HTTPError ( 500 , message ) else : raise dbsException ( eCode , message , serverError )
|
This utility function handles all dbs exceptions . It will log raise exception based on input condition . It loggs the traceback on the server log . Send HTTPError 400 for invalid client input and HTTPError 404 for NOT FOUND required pre - existing condition .
|
1,540 |
def configure_proxy ( self , curl_object ) : curl_object . setopt ( curl_object . PROXY , self . _proxy_hostname ) curl_object . setopt ( curl_object . PROXYPORT , self . _proxy_port ) curl_object . setopt ( curl_object . PROXYTYPE , curl_object . PROXYTYPE_SOCKS5 ) if self . _proxy_user and self . _proxy_passwd : curl_object . setopt ( curl_object . PROXYUSERPWD , '%s:%s' % ( self . _proxy_user , self . _proxy_port ) )
|
configure pycurl proxy settings
|
1,541 |
def execute ( self , conn , acquisition_era_name , end_date , transaction = False ) : if not conn : dbsExceptionHandler ( "dbsException-failed-connect2host" , "dbs/dao/Oracle/AcquisitionEra/updateEndDate expects db connection from upper layer." , self . logger . exception ) binds = { "acquisition_era_name" : acquisition_era_name , "end_date" : end_date } result = self . dbi . processData ( self . sql , binds , conn , transaction )
|
for a given block_id
|
1,542 |
def updateStatus ( self , block_name = "" , open_for_writing = 0 ) : if open_for_writing not in [ 1 , 0 , '1' , '0' ] : msg = "DBSBlock/updateStatus. open_for_writing can only be 0 or 1 : passed %s." % open_for_writing dbsExceptionHandler ( 'dbsException-invalid-input' , msg ) conn = self . dbi . connection ( ) trans = conn . begin ( ) try : open_for_writing = int ( open_for_writing ) self . updatestatus . execute ( conn , block_name , open_for_writing , dbsUtils ( ) . getTime ( ) , trans ) trans . commit ( ) trans = None except Exception as ex : if trans : trans . rollback ( ) if conn : conn . close ( ) raise ex finally : if conn : conn . close ( )
|
Used to toggle the status of a block open_for_writing = 1 open for writing open_for_writing = 0 closed
|
1,543 |
def updateSiteName ( self , block_name , origin_site_name ) : if not origin_site_name : dbsExceptionHandler ( 'dbsException-invalid-input' , "DBSBlock/updateSiteName. origin_site_name is mandatory." ) conn = self . dbi . connection ( ) trans = conn . begin ( ) try : self . updatesitename . execute ( conn , block_name , origin_site_name ) except : if trans : trans . rollback ( ) raise else : if trans : trans . commit ( ) finally : if conn : conn . close ( )
|
Update the origin_site_name for a given block name
|
1,544 |
def listBlocks ( self , dataset = "" , block_name = "" , data_tier_name = "" , origin_site_name = "" , logical_file_name = "" , run_num = - 1 , min_cdate = 0 , max_cdate = 0 , min_ldate = 0 , max_ldate = 0 , cdate = 0 , ldate = 0 , open_for_writing = - 1 , detail = False ) : if ( not dataset ) or re . search ( "['%','*']" , dataset ) : if ( not block_name ) or re . search ( "['%','*']" , block_name ) : if ( not logical_file_name ) or re . search ( "['%','*']" , logical_file_name ) : if not data_tier_name or re . search ( "['%','*']" , data_tier_name ) : msg = "DBSBlock/listBlock. You must specify at least one parameter(dataset, block_name,\ data_tier_name, logical_file_name) with listBlocks api" dbsExceptionHandler ( 'dbsException-invalid-input2' , msg , self . logger . exception , msg ) if data_tier_name : if not ( min_cdate and max_cdate ) or ( max_cdate - min_cdate ) > 32 * 24 * 3600 : msg = "min_cdate and max_cdate are mandatory parameters. If data_tier_name parameter is used \ the maximal time range allowed is 31 days" dbsExceptionHandler ( 'dbsException-invalid-input2' , msg , self . logger . exception , msg ) if detail : msg = "DBSBlock/listBlock. Detail parameter not allowed togther with data_tier_name" dbsExceptionHandler ( 'dbsException-invalid-input2' , msg , self . logger . exception , msg ) with self . dbi . connection ( ) as conn : dao = ( self . blockbrieflist , self . blocklist ) [ detail ] for item in dao . execute ( conn , dataset , block_name , data_tier_name , origin_site_name , logical_file_name , run_num , min_cdate , max_cdate , min_ldate , max_ldate , cdate , ldate ) : yield item
|
dataset block_name data_tier_name or logical_file_name must be passed .
|
1,545 |
def execute ( self , conn , site_name = "" , transaction = False ) : sql = self . sql if site_name == "" : result = self . dbi . processData ( sql , conn = conn , transaction = transaction ) else : sql += "WHERE S.SITE_NAME = :site_name" binds = { "site_name" : site_name } result = self . dbi . processData ( sql , binds , conn , transaction ) return self . formatDict ( result )
|
Lists all sites types if site_name is not provided .
|
1,546 |
def getBlocks ( self ) : try : conn = self . dbi . connection ( ) result = self . buflistblks . execute ( conn ) return result finally : if conn : conn . close ( )
|
Get the blocks that need to be migrated
|
1,547 |
def getBufferedFiles ( self , block_id ) : try : conn = self . dbi . connection ( ) result = self . buflist . execute ( conn , block_id ) return result finally : if conn : conn . close ( )
|
Get some files from the insert buffer
|
1,548 |
def execute ( self , conn , data_tier_name = '' , transaction = False , cache = None ) : if cache : ret = cache . get ( "DATA_TIERS" ) if not ret == None : return ret sql = self . sql binds = { } if data_tier_name : op = ( '=' , 'like' ) [ '%' in data_tier_name ] sql += "WHERE DT.DATA_TIER_NAME %s :datatier" % op binds = { "datatier" : data_tier_name } result = self . dbi . processData ( sql , binds , conn , transaction ) plist = self . formatDict ( result ) return plist
|
returns id for a given datatier name
|
1,549 |
def execute ( self , conn , migration_url = "" , migration_input = "" , create_by = "" , migration_request_id = "" , transaction = False ) : binds = { } result = self . dbi . processData ( self . sql , binds , conn , transaction ) result = self . formatDict ( result ) if len ( result ) == 0 : return [ ] if result [ 0 ] [ "migration_request_id" ] in ( '' , None ) : return [ ] return result
|
Lists the oldest request queued
|
1,550 |
def listProcessingEras ( self , processing_version = '' ) : conn = self . dbi . connection ( ) try : result = self . pelst . execute ( conn , processing_version ) return result finally : if conn : conn . close ( )
|
Returns all processing eras in dbs
|
1,551 |
def listPhysicsGroups ( self , physics_group_name = "" ) : if not isinstance ( physics_group_name , basestring ) : dbsExceptionHandler ( 'dbsException-invalid-input' , 'physics group name given is not valid : %s' % physics_group_name ) else : try : physics_group_name = str ( physics_group_name ) except : dbsExceptionHandler ( 'dbsException-invalid-input' , 'physics group name given is not valid : %s' % physics_group_name ) conn = self . dbi . connection ( ) try : result = self . pglist . execute ( conn , physics_group_name ) return result finally : if conn : conn . close ( )
|
Returns all physics groups if physics group names are not passed .
|
1,552 |
def getServices ( self ) : try : conn = self . dbi . connection ( ) result = self . serviceslist . execute ( conn ) return result except Exception as ex : msg = ( ( "%s DBSServicesRegistry/getServices." + " %s\n. Exception trace: \n %s" ) % ( DBSEXCEPTIONS [ 'dbsException-3' ] , ex , traceback . format_exc ( ) ) ) self . logger . exception ( msg ) raise Exception ( "dbsException-3" , msg ) finally : conn . close ( )
|
Simple method that returs list of all know DBS instances instances known to this registry
|
1,553 |
def addService ( self ) : conn = self . dbi . connection ( ) tran = conn . begin ( ) try : body = request . body . read ( ) service = cjson . decode ( body ) addthis = { } addthis [ 'service_id' ] = self . sm . increment ( conn , "SEQ_RS" , tran ) addthis [ 'name' ] = service . get ( 'NAME' , '' ) if addthis [ 'name' ] == '' : msg = ( ( "%s DBSServicesRegistry/addServices." + " Service Must be Named\n" ) % DBSEXCEPTIONS [ 'dbsException-3' ] ) raise Exception ( "dbsException-3" , msg ) addthis [ 'type' ] = service . get ( 'TYPE' , 'GENERIC' ) addthis [ 'location' ] = service . get ( 'LOCATION' , 'HYPERSPACE' ) addthis [ 'status' ] = service . get ( 'STATUS' , 'UNKNOWN' ) addthis [ 'admin' ] = service . get ( 'ADMIN' , 'UNADMINISTRATED' ) addthis [ 'uri' ] = service . get ( 'URI' , '' ) if addthis [ 'uri' ] == '' : msg = ( ( "%s DBSServicesRegistry/addServices." + " Service URI must be provided.\n" ) % DBSEXCEPTIONS [ 'dbsException-3' ] ) self . logger . exception ( msg ) raise Exception ( "dbsException-3" , msg ) addthis [ 'db' ] = service . get ( 'DB' , 'NO_DATABASE' ) addthis [ 'version' ] = service . get ( 'VERSION' , 'UNKNOWN' ) addthis [ 'last_contact' ] = dbsUtils ( ) . getTime ( ) addthis [ 'comments' ] = service . get ( 'COMMENTS' , 'NO COMMENTS' ) addthis [ 'alias' ] = service . get ( 'ALIAS' , 'No Alias' ) self . servicesadd . execute ( conn , addthis , tran ) tran . commit ( ) except exceptions . IntegrityError as ex : if ( str ( ex ) . find ( "unique constraint" ) != - 1 or str ( ex ) . lower ( ) . find ( "duplicate" ) != - 1 ) : try : self . servicesupdate . execute ( conn , addthis , tran ) tran . commit ( ) except Exception as ex : msg = ( ( "%s DBSServiceRegistry/addServices." + " %s\n. Exception trace: \n %s" ) % ( DBSEXCEPTIONS [ 'dbsException-3' ] , ex , traceback . format_exc ( ) ) ) self . logger . exception ( msg ) raise Exception ( "dbsException-3" , msg ) except Exception as ex : tran . rollback ( ) msg = ( ( "%s DBSServiceRegistry/addServices." + " %s\n. Exception trace: \n %s" ) % ( DBSEXCEPTIONS [ 'dbsException-3' ] , ex , traceback . format_exc ( ) ) ) self . logger . exception ( msg ) raise Exception ( "dbsException-3" , msg ) finally : conn . close ( )
|
Add a service to service registry
|
1,554 |
def execute ( self , conn , migration_url = "" , migration_input = "" , create_by = "" , migration_request_id = "" , oldest = False , transaction = False ) : sql = self . sql binds = { } if migration_request_id : sql += " WHERE MR.MIGRATION_REQUEST_ID=:migration_request_id" binds [ 'migration_request_id' ] = migration_request_id elif oldest : sql += binds [ 'current_date' ] = dbsUtils ( ) . getTime ( ) else : if migration_url or migration_input or create_by : sql += " WHERE " if migration_url : sql += " MR.MIGRATION_URL=:migration_url" binds [ 'migration_url' ] = migration_url if migration_input : if migration_url : sql += " AND " op = ( "=" , "like" ) [ "%" in migration_input ] sql += " MR.MIGRATION_INPUT %s :migration_input" % op binds [ 'migration_input' ] = migration_input if create_by : if migration_url or migration_input : sql += " AND " sql += " MR.CREATE_BY=:create_by" % create_by binds [ 'create_by' ] = create_by cursors = self . dbi . processData ( sql , binds , conn , transaction , returnCursor = True ) result = [ ] for c in cursors : result . extend ( self . formatCursor ( c , size = 100 ) ) return result
|
Lists all requests if pattern is not provided .
|
1,555 |
def listPrimaryDatasets ( self , primary_ds_name = "" , primary_ds_type = "" ) : conn = self . dbi . connection ( ) try : result = self . primdslist . execute ( conn , primary_ds_name , primary_ds_type ) if conn : conn . close ( ) return result finally : if conn : conn . close ( )
|
Returns all primary dataset if primary_ds_name or primary_ds_type are not passed .
|
1,556 |
def listPrimaryDSTypes ( self , primary_ds_type = "" , dataset = "" ) : conn = self . dbi . connection ( ) try : result = self . primdstypeList . execute ( conn , primary_ds_type , dataset ) if conn : conn . close ( ) return result finally : if conn : conn . close ( )
|
Returns all primary dataset types if dataset or primary_ds_type are not passed .
|
1,557 |
def execute ( self , conn , name = '' , transaction = False ) : binds = { } if name : op = ( '=' , 'like' ) [ '%' in name ] sql = self . sql + " WHERE pg.physics_group_name %s :physicsgroup" % ( op ) binds = { "physicsgroup" : name } else : sql = self . sql self . logger . debug ( sql ) result = self . dbi . processData ( sql , binds , conn , transaction ) plist = self . formatDict ( result ) self . logger . debug ( plist ) if len ( plist ) < 1 : return [ ] return plist
|
returns id for a given physics group name
|
1,558 |
def getHelp ( self , call = "" ) : if call : params = self . methods [ 'GET' ] [ call ] [ 'args' ] doc = self . methods [ 'GET' ] [ call ] [ 'call' ] . __doc__ return dict ( params = params , doc = doc ) else : return self . methods [ 'GET' ] . keys ( )
|
API to get a list of supported REST APIs . In the case a particular API is specified the docstring of that API is displayed .
|
1,559 |
def listPrimaryDatasets ( self , primary_ds_name = "" , primary_ds_type = "" ) : primary_ds_name = primary_ds_name . replace ( "*" , "%" ) primary_ds_type = primary_ds_type . replace ( "*" , "%" ) try : return self . dbsPrimaryDataset . listPrimaryDatasets ( primary_ds_name , primary_ds_type ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . message ) except Exception as ex : sError = "DBSReaderModel/listPrimaryDatasets. %s\n Exception trace: \n %s." % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list primary datasets
|
1,560 |
def listDatasetArray ( self ) : ret = [ ] try : body = request . body . read ( ) if body : data = cjson . decode ( body ) data = validateJSONInputNoCopy ( "dataset" , data , read = True ) max_array_size = 1000 if ( 'dataset' in data . keys ( ) and isinstance ( data [ 'dataset' ] , list ) and len ( data [ 'dataset' ] ) > max_array_size ) or ( 'dataset_id' in data . keys ( ) and isinstance ( data [ 'dataset_id' ] , list ) and len ( data [ 'dataset_id' ] ) > max_array_size ) : dbsExceptionHandler ( "dbsException-invalid-input" , "The Max list length supported in listDatasetArray is %s." % max_array_size , self . logger . exception ) ret = self . dbsDataset . listDatasetArray ( data ) except cjson . DecodeError as De : dbsExceptionHandler ( 'dbsException-invalid-input2' , "Invalid input" , self . logger . exception , str ( De ) ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except HTTPError as he : raise he except Exception as ex : sError = "DBSReaderModel/listDatasetArray. %s \n Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError ) for item in ret : yield item
|
API to list datasets in DBS . To be called by datasetlist url with post call .
|
1,561 |
def listDataTiers ( self , data_tier_name = "" ) : data_tier_name = data_tier_name . replace ( "*" , "%" ) try : conn = self . dbi . connection ( ) return self . dbsDataTierListDAO . execute ( conn , data_tier_name . upper ( ) ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . message ) except ValueError as ve : dbsExceptionHandler ( "dbsException-invalid-input2" , "Invalid Input Data" , self . logger . exception , ve . message ) except TypeError as te : dbsExceptionHandler ( "dbsException-invalid-input2" , "Invalid Input DataType" , self . logger . exception , te . message ) except NameError as ne : dbsExceptionHandler ( "dbsException-invalid-input2" , "Invalid Input Searching Key" , self . logger . exception , ne . message ) except Exception as ex : sError = "DBSReaderModel/listDataTiers. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError ) finally : if conn : conn . close ( )
|
API to list data tiers known to DBS .
|
1,562 |
def listBlockOrigin ( self , origin_site_name = "" , dataset = "" , block_name = "" ) : try : return self . dbsBlock . listBlocksOrigin ( origin_site_name , dataset , block_name ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listBlocks. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list blocks first generated in origin_site_name .
|
1,563 |
def listBlocksParents ( self ) : try : body = request . body . read ( ) data = cjson . decode ( body ) data = validateJSONInputNoCopy ( "block" , data , read = True ) max_array_size = 1000 if ( 'block_names' in data . keys ( ) and isinstance ( data [ 'block_names' ] , list ) and len ( data [ 'block_names' ] ) > max_array_size ) : dbsExceptionHandler ( "dbsException-invalid-input" , "The Max list length supported in listBlocksParents is %s." % max_array_size , self . logger . exception ) return self . dbsBlock . listBlockParents ( data [ "block_name" ] ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except cjson . DecodeError as de : sError = "DBSReaderModel/listBlockParents. %s\n. Exception trace: \n %s" % ( de , traceback . format_exc ( ) ) msg = "DBSReaderModel/listBlockParents. %s" % de dbsExceptionHandler ( 'dbsException-invalid-input2' , msg , self . logger . exception , sError ) except HTTPError as he : raise he except Exception as ex : sError = "DBSReaderModel/listBlockParents. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list block parents of multiple blocks . To be called by blockparents url with post call .
|
1,564 |
def listBlockChildren ( self , block_name = "" ) : block_name = block_name . replace ( "*" , "%" ) try : return self . dbsBlock . listBlockChildren ( block_name ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listBlockChildren. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list block children .
|
1,565 |
def listBlockSummaries ( self , block_name = "" , dataset = "" , detail = False ) : if bool ( dataset ) + bool ( block_name ) != 1 : dbsExceptionHandler ( "dbsException-invalid-input2" , dbsExceptionCode [ "dbsException-invalid-input2" ] , self . logger . exception , "Dataset or block_names must be specified at a time." ) if block_name and isinstance ( block_name , basestring ) : try : block_name = [ str ( block_name ) ] except : dbsExceptionHandler ( "dbsException-invalid-input" , "Invalid block_name for listBlockSummaries. " ) for this_block_name in block_name : if re . search ( "[*, %]" , this_block_name ) : dbsExceptionHandler ( "dbsException-invalid-input2" , dbsExceptionCode [ "dbsException-invalid-input2" ] , self . logger . exception , "No wildcards are allowed in block_name list" ) if re . search ( "[*, %]" , dataset ) : dbsExceptionHandler ( "dbsException-invalid-input2" , dbsExceptionCode [ "dbsException-invalid-input2" ] , self . logger . exception , "No wildcards are allowed in dataset" ) data = [ ] try : with self . dbi . connection ( ) as conn : data = self . dbsBlockSummaryListDAO . execute ( conn , block_name , dataset , detail ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listBlockSummaries. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError ) for item in data : yield item
|
API that returns summary information like total size and total number of events in a dataset or a list of blocks
|
1,566 |
def listDatasetParents ( self , dataset = '' ) : try : return self . dbsDataset . listDatasetParents ( dataset ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listDatasetParents. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list A datasets parents in DBS .
|
1,567 |
def listOutputConfigs ( self , dataset = "" , logical_file_name = "" , release_version = "" , pset_hash = "" , app_name = "" , output_module_label = "" , block_id = 0 , global_tag = '' ) : release_version = release_version . replace ( "*" , "%" ) pset_hash = pset_hash . replace ( "*" , "%" ) app_name = app_name . replace ( "*" , "%" ) output_module_label = output_module_label . replace ( "*" , "%" ) try : return self . dbsOutputConfig . listOutputConfigs ( dataset , logical_file_name , release_version , pset_hash , app_name , output_module_label , block_id , global_tag ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listOutputConfigs. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list OutputConfigs in DBS .
|
1,568 |
def listFileParents ( self , logical_file_name = '' , block_id = 0 , block_name = '' ) : try : r = self . dbsFile . listFileParents ( logical_file_name , block_id , block_name ) for item in r : yield item except HTTPError as he : raise he except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listFileParents. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , ex . message , self . logger . exception , sError )
|
API to list file parents
|
1,569 |
def listFileChildren ( self , logical_file_name = '' , block_name = '' , block_id = 0 ) : if isinstance ( logical_file_name , list ) : for f in logical_file_name : if '*' in f or '%' in f : dbsExceptionHandler ( "dbsException-invalid-input2" , dbsExceptionCode [ "dbsException-invalid-input2" ] , self . logger . exception , "No \ wildcard allow in LFN list" ) try : return self . dbsFile . listFileChildren ( logical_file_name , block_name , block_id ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listFileChildren. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list file children . One of the parameters in mandatory .
|
1,570 |
def listFileLumis ( self , logical_file_name = "" , block_name = "" , run_num = - 1 , validFileOnly = 0 ) : if ( run_num != - 1 and logical_file_name == '' ) : for r in parseRunRange ( run_num ) : if isinstance ( r , basestring ) or isinstance ( r , int ) or isinstance ( r , long ) : if r == 1 or r == '1' : dbsExceptionHandler ( "dbsException-invalid-input" , "Run_num=1 is not a valid input." , self . logger . exception ) elif isinstance ( r , run_tuple ) : if r [ 0 ] == r [ 1 ] : dbsExceptionHandler ( "dbsException-invalid-input" , "DBS run range must be apart at least by 1." , self . logger . exception ) elif r [ 0 ] <= 1 <= r [ 1 ] : dbsExceptionHandler ( "dbsException-invalid-input" , "Run_num=1 is not a valid input." , self . logger . exception ) try : return self . dbsFile . listFileLumis ( logical_file_name , block_name , run_num , validFileOnly ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listFileLumis. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list Lumi for files . Either logical_file_name or block_name is required . No wild card support in this API
|
1,571 |
def listRuns ( self , run_num = - 1 , logical_file_name = "" , block_name = "" , dataset = "" ) : if ( run_num != - 1 and logical_file_name == '' ) : for r in parseRunRange ( run_num ) : if isinstance ( r , basestring ) or isinstance ( r , int ) or isinstance ( r , long ) : if r == 1 or r == '1' : dbsExceptionHandler ( "dbsException-invalid-input" , "Run_num=1 is not a valid input." , self . logger . exception ) elif isinstance ( r , run_tuple ) : if r [ 0 ] == r [ 1 ] : dbsExceptionHandler ( "dbsException-invalid-input" , "DBS run range must be apart at least by 1." , self . logger . exception ) elif r [ 0 ] <= 1 <= r [ 1 ] : dbsExceptionHandler ( "dbsException-invalid-input" , "Run_num=1 is not a valid input." , self . logger . exception ) if run_num == - 1 and not logical_file_name and not dataset and not block_name : dbsExceptionHandler ( "dbsException-invalid-input" , "run_num, logical_file_name, block_name or dataset parameter is mandatory" , self . logger . exception ) try : if logical_file_name : logical_file_name = logical_file_name . replace ( "*" , "%" ) if block_name : block_name = block_name . replace ( "*" , "%" ) if dataset : dataset = dataset . replace ( "*" , "%" ) return self . dbsRun . listRuns ( run_num , logical_file_name , block_name , dataset ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listRun. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list all runs in DBS . At least one parameter is mandatory .
|
1,572 |
def dumpBlock ( self , block_name ) : try : return self . dbsBlock . dumpBlock ( block_name ) except HTTPError as he : raise he except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/dumpBlock. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , ex . message , self . logger . exception , sError )
|
API the list all information related with the block_name
|
1,573 |
def listAcquisitionEras ( self , acquisition_era_name = '' ) : try : acquisition_era_name = acquisition_era_name . replace ( '*' , '%' ) return self . dbsAcqEra . listAcquisitionEras ( acquisition_era_name ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listAcquisitionEras. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list all Acquisition Eras in DBS .
|
1,574 |
def listProcessingEras ( self , processing_version = 0 ) : try : return self . dbsProcEra . listProcessingEras ( processing_version ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listProcessingEras. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list all Processing Eras in DBS .
|
1,575 |
def listReleaseVersions ( self , release_version = '' , dataset = '' , logical_file_name = '' ) : if release_version : release_version = release_version . replace ( "*" , "%" ) try : return self . dbsReleaseVersion . listReleaseVersions ( release_version , dataset , logical_file_name ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listReleaseVersions. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list all release versions in DBS
|
1,576 |
def listDatasetAccessTypes ( self , dataset_access_type = '' ) : if dataset_access_type : dataset_access_type = dataset_access_type . replace ( "*" , "%" ) try : return self . dbsDatasetAccessType . listDatasetAccessTypes ( dataset_access_type ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listDatasetAccessTypes. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list dataset access types .
|
1,577 |
def listPhysicsGroups ( self , physics_group_name = '' ) : if physics_group_name : physics_group_name = physics_group_name . replace ( '*' , '%' ) try : return self . dbsPhysicsGroup . listPhysicsGroups ( physics_group_name ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listPhysicsGroups. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError )
|
API to list all physics groups .
|
1,578 |
def listRunSummaries ( self , dataset = "" , run_num = - 1 ) : if run_num == - 1 : dbsExceptionHandler ( "dbsException-invalid-input" , "The run_num parameter is mandatory" , self . logger . exception ) if re . search ( '[*,%]' , dataset ) : dbsExceptionHandler ( "dbsException-invalid-input" , "No wildcards are allowed in dataset" , self . logger . exception ) if ( ( run_num == - 1 or run_num == '-1' ) and dataset == '' ) : dbsExceptionHandler ( "dbsException-invalid-input" , "Run_num=1 is not a valid input when no dataset is present." , self . logger . exception ) conn = None try : conn = self . dbi . connection ( ) return self . dbsRunSummaryListDAO . execute ( conn , dataset , run_num ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listRunSummaries. %s\n. Exception trace: \n %s" % ( ex , traceback . format_exc ( ) ) dbsExceptionHandler ( 'dbsException-server-error' , dbsExceptionCode [ 'dbsException-server-error' ] , self . logger . exception , sError ) finally : if conn : conn . close ( )
|
API to list run summaries like the maximal lumisection in a run .
|
1,579 |
def list ( ) : entries = lambder . list_events ( ) for e in entries : click . echo ( str ( e ) )
|
List all events
|
1,580 |
def add ( name , function_name , cron ) : lambder . add_event ( name = name , function_name = function_name , cron = cron )
|
Create an event
|
1,581 |
def load ( file ) : with open ( file , 'r' ) as f : contents = f . read ( ) lambder . load_events ( contents )
|
Load events from a json file
|
1,582 |
def functions ( context ) : config_file = "./lambder.json" if os . path . isfile ( config_file ) : context . obj = FunctionConfig ( config_file ) pass
|
Manage AWS Lambda functions
|
1,583 |
def list ( ) : functions = lambder . list_functions ( ) output = json . dumps ( functions , sort_keys = True , indent = 4 , separators = ( ',' , ':' ) ) click . echo ( output )
|
List lambder functions
|
1,584 |
def new ( name , bucket , timeout , memory , description , subnet_ids , security_group_ids ) : config = { } if timeout : config [ 'timeout' ] = timeout if memory : config [ 'memory' ] = memory if description : config [ 'description' ] = description if subnet_ids : config [ 'subnet_ids' ] = subnet_ids if security_group_ids : config [ 'security_group_ids' ] = security_group_ids lambder . create_project ( name , bucket , config )
|
Create a new lambda project
|
1,585 |
def rm ( config , name , bucket ) : myname = name or config . name mybucket = bucket or config . bucket click . echo ( 'Deleting {} from {}' . format ( myname , mybucket ) ) lambder . delete_function ( myname , mybucket )
|
Delete lambda function role and zipfile
|
1,586 |
def invoke ( config , name , input ) : myname = name or config . name click . echo ( 'Invoking ' + myname ) output = lambder . invoke_function ( myname , input ) click . echo ( output )
|
Invoke function in AWS
|
1,587 |
def putBlock ( self , blockcontent , migration = False ) : try : self . logger . debug ( "insert configuration" ) configList = self . insertOutputModuleConfig ( blockcontent [ 'dataset_conf_list' ] , migration ) self . logger . debug ( "insert dataset" ) datasetId = self . insertDataset ( blockcontent , configList , migration ) self . logger . debug ( "insert block & files." ) self . insertBlockFile ( blockcontent , datasetId , migration ) except KeyError as ex : dbsExceptionHandler ( "dbsException-invalid-input2" , "DBSBlockInsert/putBlock: \ KeyError exception: %s. " % ex . args [ 0 ] , self . logger . exception , "DBSBlockInsert/putBlock: KeyError exception: %s. " % ex . args [ 0 ] ) except Exception as ex : raise
|
Insert the data in sereral steps and commit when each step finishes or rollback if there is a problem .
|
1,588 |
def listSites ( self , block_name = "" , site_name = "" ) : try : conn = self . dbi . connection ( ) if block_name : result = self . blksitelist . execute ( conn , block_name ) else : result = self . sitelist . execute ( conn , site_name ) return result finally : if conn : conn . close ( )
|
Returns sites .
|
1,589 |
def checkInputParameter ( method , parameters , validParameters , requiredParameters = None ) : for parameter in parameters : if parameter not in validParameters : raise dbsClientException ( "Invalid input" , "API %s does not support parameter %s. Supported parameters are %s" % ( method , parameter , validParameters ) ) if requiredParameters is not None : if 'multiple' in requiredParameters : match = False for requiredParameter in requiredParameters [ 'multiple' ] : if requiredParameter != 'detail' and requiredParameter in parameters : match = True break if not match : raise dbsClientException ( "Invalid input" , "API %s does require one of the parameters %s" % ( method , requiredParameters [ 'multiple' ] ) ) if 'forced' in requiredParameters : for requiredParameter in requiredParameters [ 'forced' ] : if requiredParameter not in parameters : raise dbsClientException ( "Invalid input" , "API %s does require the parameter %s. Forced required parameters are %s" % ( method , requiredParameter , requiredParameters [ 'forced' ] ) ) if 'standalone' in requiredParameters : overlap = [ ] for requiredParameter in requiredParameters [ 'standalone' ] : if requiredParameter in parameters : overlap . append ( requiredParameter ) if len ( overlap ) != 1 : raise dbsClientException ( "Invalid input" , "API %s does requires only *one* of the parameters %s." % ( method , requiredParameters [ 'standalone' ] ) )
|
Helper function to check input by using before sending to the server
|
1,590 |
def split_calls ( func ) : def wrapper ( * args , ** kwargs ) : size_limit = 8000 encoded_url = urllib . urlencode ( kwargs ) if len ( encoded_url ) > size_limit : for key , value in kwargs . iteritems ( ) : if key in ( 'logical_file_name' , 'block_name' , 'lumi_list' , 'run_num' ) and isinstance ( value , list ) : ret_val = [ ] for splitted_param in list_parameter_splitting ( data = dict ( kwargs ) , key = key , size_limit = size_limit ) : try : ret_val . extend ( func ( * args , ** splitted_param ) ) except ( TypeError , AttributeError ) : ret_val = [ ] return ret_val raise dbsClientException ( "Invalid input" , "The lenght of the urlencoded parameters to API %s \ is exceeding %s bytes and cannot be splitted." % ( func . __name__ , size_limit ) ) else : return func ( * args , ** kwargs ) return wrapper
|
Decorator to split up server calls for methods using url parameters due to the lenght limitation of the URI in Apache . By default 8190 bytes
|
1,591 |
def __callServer ( self , method = "" , params = { } , data = { } , callmethod = 'GET' , content = 'application/json' ) : UserID = os . environ [ 'USER' ] + '@' + socket . gethostname ( ) try : UserAgent = "DBSClient/" + os . environ [ 'DBS3_CLIENT_VERSION' ] + "/" + self . userAgent except : UserAgent = "DBSClient/Unknown" + "/" + self . userAgent request_headers = { "Content-Type" : content , "Accept" : content , "UserID" : UserID , "User-Agent" : UserAgent } method_func = getattr ( self . rest_api , callmethod . lower ( ) ) data = cjson . encode ( data ) try : self . http_response = method_func ( self . url , method , params , data , request_headers ) except HTTPError as http_error : self . __parseForException ( http_error ) if content != "application/json" : return self . http_response . body try : json_ret = cjson . decode ( self . http_response . body ) except cjson . DecodeError : print ( "The server output is not a valid json, most probably you have a typo in the url.\n%s.\n" % self . url , file = sys . stderr ) raise dbsClientException ( "Invalid url" , "Possible urls are %s" % self . http_response . body ) return json_ret
|
A private method to make HTTP call to the DBS Server
|
1,592 |
def __parseForException ( self , http_error ) : data = http_error . body try : if isinstance ( data , str ) : data = cjson . decode ( data ) except : raise http_error if isinstance ( data , dict ) and 'exception' in data : raise HTTPError ( http_error . url , data [ 'exception' ] , data [ 'message' ] , http_error . header , http_error . body ) raise http_error
|
An internal method should not be used by clients
|
1,593 |
def requestTimingInfo ( self ) : try : return tuple ( item . split ( '=' ) [ 1 ] for item in self . http_response . header . get ( 'CMS-Server-Time' ) . split ( ) ) except AttributeError : return None , None
|
Returns the time needed to process the request by the frontend server in microseconds and the EPOC timestamp of the request in microseconds .
|
1,594 |
def listFileParentsByLumi ( self , ** kwargs ) : validParameters = [ 'block_name' , 'logical_file_name' ] requiredParameters = { 'forced' : [ 'block_name' ] } checkInputParameter ( method = "listFileParentsByLumi" , parameters = kwargs . keys ( ) , validParameters = validParameters , requiredParameters = requiredParameters ) return self . __callServer ( "fileparentsbylumi" , data = kwargs , callmethod = 'POST' )
|
API to list file parents using lumi section info .
|
1,595 |
def listBlockParents ( self , ** kwargs ) : validParameters = [ 'block_name' ] requiredParameters = { 'forced' : validParameters } checkInputParameter ( method = "listBlockParents" , parameters = kwargs . keys ( ) , validParameters = validParameters , requiredParameters = requiredParameters ) if isinstance ( kwargs [ "block_name" ] , list ) : return self . __callServer ( "blockparents" , data = kwargs , callmethod = 'POST' ) else : return self . __callServer ( "blockparents" , params = kwargs )
|
API to list block parents .
|
1,596 |
def listDatasetArray ( self , ** kwargs ) : validParameters = [ 'dataset' , 'dataset_access_type' , 'detail' , 'dataset_id' ] requiredParameters = { 'multiple' : [ 'dataset' , 'dataset_id' ] } checkInputParameter ( method = "listDatasetArray" , parameters = kwargs . keys ( ) , validParameters = validParameters , requiredParameters = requiredParameters ) if 'detail' not in kwargs . keys ( ) : kwargs [ 'detail' ] = False return self . __callServer ( "datasetlist" , data = kwargs , callmethod = 'POST' )
|
API to list datasets in DBS .
|
1,597 |
def find_devices ( ) : num_devices = api . py_aa_find_devices ( 0 , array . array ( 'H' ) ) _raise_error_if_negative ( num_devices ) if num_devices == 0 : return list ( ) ports = array . array ( 'H' , ( 0 , ) * num_devices ) unique_ids = array . array ( 'I' , ( 0 , ) * num_devices ) num_devices = api . py_aa_find_devices_ext ( len ( ports ) , len ( unique_ids ) , ports , unique_ids ) _raise_error_if_negative ( num_devices ) if num_devices == 0 : return list ( ) del ports [ num_devices : ] del unique_ids [ num_devices : ] devices = list ( ) for port , uid in zip ( ports , unique_ids ) : in_use = bool ( port & PORT_NOT_FREE ) dev = dict ( port = port & ~ PORT_NOT_FREE , serial_number = _unique_id_str ( uid ) , in_use = in_use ) devices . append ( dev ) return devices
|
Return a list of dictionaries . Each dictionary represents one device .
|
1,598 |
def i2c_bitrate ( self ) : ret = api . py_aa_i2c_bitrate ( self . handle , 0 ) _raise_error_if_negative ( ret ) return ret
|
I2C bitrate in kHz . Not every bitrate is supported by the host adapter . Therefore the actual bitrate may be less than the value which is set .
|
1,599 |
def i2c_pullups ( self ) : ret = api . py_aa_i2c_pullup ( self . handle , I2C_PULLUP_QUERY ) _raise_error_if_negative ( ret ) return ret
|
Setting this to True will enable the I2C pullup resistors . If set to False the pullup resistors will be disabled .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.