text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Encode a list of arguments <END_TASK> <USER_TASK:> Description: def encode_args(args, extra=False): """ Encode a list of arguments """
if not args: return '' methodargs = ', '.join([encode(a) for a in args]) if extra: methodargs += ', ' return methodargs
<SYSTEM_TASK:> Fill a specified form field in the current document. <END_TASK> <USER_TASK:> Description: def fill(self, field, value): """ Fill a specified form field in the current document. :param field: an instance of :class:`zombie.dom.DOMNode` :param value: any string value :return: self to allow function chaining. """
self.client.nowait('browser.fill', (field, value)) return self
<SYSTEM_TASK:> Used to set the ``value`` of form elements. <END_TASK> <USER_TASK:> Description: def value(self, value): """ Used to set the ``value`` of form elements. """
self.client.nowait( 'set_field', (Literal('browser'), self.element, value))
<SYSTEM_TASK:> Fires a specified DOM event on the current node. <END_TASK> <USER_TASK:> Description: def fire(self, event): """ Fires a specified DOM event on the current node. :param event: the name of the event to fire (e.g., 'click'). Returns the :class:`zombie.dom.DOMNode` to allow function chaining. """
self.browser.fire(self.element, event) return self
<SYSTEM_TASK:> Ensures all values are encoded in UTF-8 and converts them to lowercase <END_TASK> <USER_TASK:> Description: def _utf8_encode(self, d): """ Ensures all values are encoded in UTF-8 and converts them to lowercase """
for k, v in d.items(): if isinstance(v, str): d[k] = v.encode('utf8').lower() if isinstance(v, list): for index,item in enumerate(v): item = item.encode('utf8').lower() v[index] = item if isinstance(v, dict): d[k] = self._utf8_encode(v) return d
<SYSTEM_TASK:> Parse the home and away game rosters <END_TASK> <USER_TASK:> Description: def parse_rosters(self): """ Parse the home and away game rosters :returns: ``self`` on success, ``None`` otherwise """
lx_doc = self.html_doc() if not self.__blocks: self.__pl_blocks(lx_doc) for t in ['home', 'away']: self.rosters[t] = self.__clean_pl_block(self.__blocks[t]) return self if self.rosters else None
<SYSTEM_TASK:> Parse the home and away healthy scratches <END_TASK> <USER_TASK:> Description: def parse_scratches(self): """ Parse the home and away healthy scratches :returns: ``self`` on success, ``None`` otherwise """
lx_doc = self.html_doc() if not self.__blocks: self.__pl_blocks(lx_doc) for t in ['aw_scr', 'h_scr']: ix = 'away' if t == 'aw_scr' else 'home' self.scratches[ix] = self.__clean_pl_block(self.__blocks[t]) return self if self.scratches else None
<SYSTEM_TASK:> Parse the home and away coaches <END_TASK> <USER_TASK:> Description: def parse_coaches(self): """ Parse the home and away coaches :returns: ``self`` on success, ``None`` otherwise """
lx_doc = self.html_doc() tr = lx_doc.xpath('//tr[@id="HeadCoaches"]')[0] for i, td in enumerate(tr): txt = td.xpath('.//text()') txt = ex_junk(txt, ['\n','\r']) team = 'away' if i == 0 else 'home' self.coaches[team] = txt[0] return self if self.coaches else None
<SYSTEM_TASK:> Parse the officials <END_TASK> <USER_TASK:> Description: def parse_officials(self): """ Parse the officials :returns: ``self`` on success, ``None`` otherwise """
# begin proper body of method lx_doc = self.html_doc() off_parser = opm(self.game_key.season) self.officials = off_parser(lx_doc) return self if self.officials else None
<SYSTEM_TASK:> Check whether a domain has a valid MX or A record. <END_TASK> <USER_TASK:> Description: def is_valid(self, domain, diagnose=False): """Check whether a domain has a valid MX or A record. Keyword arguments: domain --- the domain to check diagnose --- flag to report a diagnosis or a boolean (default False) """
return_status = [ValidDiagnosis()] dns_checked = False # http://tools.ietf.org/html/rfc5321#section-2.3.5 # Names that can be resolved to MX RRs or address (i.e., A or AAAA) # RRs (as discussed in Section 5) are permitted, as are CNAME RRs # whose targets can be resolved, in turn, to MX or address RRs. # # http://tools.ietf.org/html/rfc5321#section-5.1 # The lookup first attempts to locate an MX record associated with # the name. If a CNAME record is found, the resulting name is # processed as if it were the initial name. ... If an empty list of # MXs is returned, the address is treated as if it was associated # with an implicit MX RR, with a preference of 0, pointing to that # host. # # is_email() author's note: We will regard the existence of a CNAME to # be sufficient evidence of the domain's existence. For performance # reasons we will not repeat the DNS lookup for the CNAME's target, but # we will raise a warning because we didn't immediately find an MX # record. try: dns.resolver.query(domain, 'MX') dns_checked = True except (dns.resolver.NXDOMAIN, dns.name.NameTooLong): # Domain can't be found in DNS return_status.append(DNSDiagnosis('NO_RECORD')) # Since dns.resolver gives more information than the PHP analog, we # can say that TLDs that throw an NXDOMAIN or NameTooLong error # have been checked if len(domain.split('.')) == 1: dns_checked = True except dns.resolver.NoAnswer: # MX-record for domain can't be found return_status.append(DNSDiagnosis('NO_MX_RECORD')) try: # TODO: See if we can/need to narrow to A / CNAME dns.resolver.query(domain) except dns.resolver.NoAnswer: # No usable records for the domain can be found return_status.append(DNSDiagnosis('NO_RECORD')) except dns.resolver.NoNameservers: return_status.append(DNSDiagnosis('NO_NAMESERVERS')) except (dns.exception.Timeout, dns.resolver.Timeout): return_status.append(DNSDiagnosis('DNS_TIMEDOUT')) # Check for TLD addresses # ----------------------- # TLD addresses are specifically allowed in RFC 5321 but they are # unusual to say the least. We will allocate a separate # status to these addresses on the basis that they are more likely # to be typos than genuine addresses (unless we've already # established that the domain does have an MX record) # # http://tools.ietf.org/html/rfc5321#section-2.3.5 # In the case of a top-level domain used by itself in an address, a # single string is used without any dots. This makes the requirement, # described in more detail below, that only fully-qualified domain # names appear in SMTP transactions on the public Internet, # particularly important where top-level domains are involved. # # TLD format # ---------- # The format of TLDs has changed a number of times. The standards # used by IANA have been largely ignored by ICANN, leading to # confusion over the standards being followed. These are not defined # anywhere, except as a general component of a DNS host name (a label). # However, this could potentially lead to 123.123.123.123 being a # valid DNS name (rather than an IP address) and thereby creating # an ambiguity. The most authoritative statement on TLD formats that # the author can find is in a (rejected!) erratum to RFC 1123 # submitted by John Klensin, the author of RFC 5321: # # http://www.rfc-editor.org/errata_search.php?rfc=1123&eid=1353 # However, a valid host name can never have the dotted-decimal # form #.#.#.#, since this change does not permit the highest-level # component label to start with a digit even if it is not # all-numeric. if not dns_checked: atom_list = domain.split(".") if len(atom_list) == 1: return_status.append(RFC5321Diagnosis('TLD')) try: float(atom_list[len(atom_list)-1][0]) return_status.append(RFC5321Diagnosis('TLDNUMERIC')) except ValueError: pass final_status = max(return_status) return final_status if diagnose else final_status == ValidDiagnosis()
<SYSTEM_TASK:> Generate and yield a stream of parsed plays. Useful for per play processing. <END_TASK> <USER_TASK:> Description: def parse_plays_stream(self): """Generate and yield a stream of parsed plays. Useful for per play processing."""
lx_doc = self.html_doc() if lx_doc is not None: parser = PlayParser(self.game_key.season, self.game_key.game_type) plays = lx_doc.xpath('//tr[@class = "evenColor"]') for p in plays: p_obj = parser.build_play(p) self.plays.append(p_obj) yield p_obj
<SYSTEM_TASK:> This function takes a list of files as input and vstacks them <END_TASK> <USER_TASK:> Description: def stack_files(files, hemi, source, target): """ This function takes a list of files as input and vstacks them """
import csv import os import numpy as np fname = "sdist_%s_%s_%s.csv" % (hemi, source, target) filename = os.path.join(os.getcwd(),fname) alldist = [] for dfile in files: alldist.append(np.genfromtxt(dfile, delimiter=',')) alldist = np.array(alldist) alldist.tofile(filename,",") return filename
<SYSTEM_TASK:> Transforms the ASCII control character symbols to their real char. <END_TASK> <USER_TASK:> Description: def to_char(token): """Transforms the ASCII control character symbols to their real char. Note: If the token is not an ASCII control character symbol, just return the token. Keyword arguments: token -- the token to transform """
if ord(token) in _range(9216, 9229 + 1): token = _unichr(ord(token) - 9216) return token
<SYSTEM_TASK:> Get source node list for a specified freesurfer label. <END_TASK> <USER_TASK:> Description: def load_freesurfer_label(annot_input, label_name, cortex=None): """ Get source node list for a specified freesurfer label. Inputs ------- annot_input : freesurfer annotation label file label_name : freesurfer label name cortex : not used """
if cortex is not None: print("Warning: cortex is not used to load the freesurfer label") labels, color_table, names = nib.freesurfer.read_annot(annot_input) names = [i.decode('utf-8') for i in names] label_value = names.index(label_name) label_nodes = np.array(np.where(np.in1d(labels, label_value)), dtype=np.int32) return label_nodes
<SYSTEM_TASK:> Print freesurfer label names. <END_TASK> <USER_TASK:> Description: def get_freesurfer_label(annot_input, verbose = True): """ Print freesurfer label names. """
labels, color_table, names = nib.freesurfer.read_annot(annot_input) if verbose: print(names) return names
<SYSTEM_TASK:> Remove medial wall from cortical surface to ensure that shortest paths are only calculated through the cortex. <END_TASK> <USER_TASK:> Description: def surf_keep_cortex(surf, cortex): """ Remove medial wall from cortical surface to ensure that shortest paths are only calculated through the cortex. Inputs ------- surf : Tuple containing two numpy arrays of shape (n_nodes,3). Each node of the first array specifies the x, y, z coordinates one node of the surface mesh. Each node of the second array specifies the indices of the three nodes building one triangle of the surface mesh. (e.g. the output from nibabel.freesurfer.io.read_geometry) cortex : Array with indices of vertices included in within the cortex. (e.g. the output from nibabel.freesurfer.io.read_label) """
# split surface into vertices and triangles vertices, triangles = surf # keep only the vertices within the cortex label cortex_vertices = np.array(vertices[cortex], dtype=np.float64) # keep only the triangles within the cortex label cortex_triangles = triangles_keep_cortex(triangles, cortex) return cortex_vertices, cortex_triangles
<SYSTEM_TASK:> Remove triangles with nodes not contained in the cortex label array <END_TASK> <USER_TASK:> Description: def triangles_keep_cortex(triangles, cortex): """ Remove triangles with nodes not contained in the cortex label array """
# for or each face/triangle keep only those that only contain nodes within the list of cortex nodes input_shape = triangles.shape triangle_is_in_cortex = np.all(np.reshape(np.in1d(triangles.ravel(), cortex), input_shape), axis=1) cortex_triangles_old = np.array(triangles[triangle_is_in_cortex], dtype=np.int32) # reassign node index before outputting triangles new_index = np.digitize(cortex_triangles_old.ravel(), cortex, right=True) cortex_triangles = np.array(np.arange(len(cortex))[new_index].reshape(cortex_triangles_old.shape), dtype=np.int32) return cortex_triangles
<SYSTEM_TASK:> Calculate exact geodesic distance along cortical surface from set of source nodes. <END_TASK> <USER_TASK:> Description: def dist_calc(surf, cortex, source_nodes): """ Calculate exact geodesic distance along cortical surface from set of source nodes. "dist_type" specifies whether to calculate "min", "mean", "median", or "max" distance values from a region-of-interest. If running only on single node, defaults to "min". """
cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex) translated_source_nodes = translate_src(source_nodes, cortex) data = gdist.compute_gdist(cortex_vertices, cortex_triangles, source_indices = translated_source_nodes) dist = recort(data, surf, cortex) del data return dist
<SYSTEM_TASK:> Calculate closest nodes to each source node using exact geodesic distance along the cortical surface. <END_TASK> <USER_TASK:> Description: def zone_calc(surf, cortex, src): """ Calculate closest nodes to each source node using exact geodesic distance along the cortical surface. """
cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex) dist_vals = np.zeros((len(source_nodes), len(cortex_vertices))) for x in range(len(source_nodes)): translated_source_nodes = translate_src(source_nodes[x], cortex) dist_vals[x, :] = gdist.compute_gdist(cortex_vertices, cortex_triangles, source_indices = translated_source_nodes) data = np.argsort(dist_vals, axis=0)[0, :] + 1 zone = recort(data, surf, cortex) del data return zone
<SYSTEM_TASK:> Loads a module by filename <END_TASK> <USER_TASK:> Description: def load_module(filename): """ Loads a module by filename """
basename = os.path.basename(filename) path = os.path.dirname(filename) sys.path.append(path) # TODO(tlan) need to figure out how to handle errors thrown here return __import__(os.path.splitext(basename)[0])
<SYSTEM_TASK:> Convert the machine list argument from a list of names into a mapping of logical names to <END_TASK> <USER_TASK:> Description: def make_machine_mapping(machine_list): """ Convert the machine list argument from a list of names into a mapping of logical names to physical hosts. This is similar to the _parse_configs function but separated to provide the opportunity for extension and additional checking of machine access """
if machine_list is None: return {} else: mapping = {} for pair in machine_list: if (constants.MACHINE_SEPARATOR not in pair) or (pair.count(constants.MACHINE_SEPARATOR) != 1): raise ValueError("machine pairs must be passed as two strings separted by a %s", constants.MACHINE_SEPARATOR) (logical, physical) = pair.split(constants.MACHINE_SEPARATOR) # add checks for reachability mapping[logical] = physical return mapping
<SYSTEM_TASK:> Parse a list of configuration properties separated by '=' <END_TASK> <USER_TASK:> Description: def parse_config_list(config_list): """ Parse a list of configuration properties separated by '=' """
if config_list is None: return {} else: mapping = {} for pair in config_list: if (constants.CONFIG_SEPARATOR not in pair) or (pair.count(constants.CONFIG_SEPARATOR) != 1): raise ValueError("configs must be passed as two strings separted by a %s", constants.CONFIG_SEPARATOR) (config, value) = pair.split(constants.CONFIG_SEPARATOR) mapping[config] = value return mapping
<SYSTEM_TASK:> Deploys the service to the host. This should at least perform the same actions as install and start <END_TASK> <USER_TASK:> Description: def deploy(self, unique_id, configs=None): """Deploys the service to the host. This should at least perform the same actions as install and start but may perform additional tasks as needed. :Parameter unique_id: the name of the process :Parameter configs: a mao of configs the deployer may use to modify the deployment """
self.install(unique_id, configs) self.start(unique_id, configs)
<SYSTEM_TASK:> Undeploys the service. This should at least perform the same actions as stop and uninstall <END_TASK> <USER_TASK:> Description: def undeploy(self, unique_id, configs=None): """Undeploys the service. This should at least perform the same actions as stop and uninstall but may perform additional tasks as needed. :Parameter unique_id: the name of the process :Parameter configs: a map of configs the deployer may use """
self.stop(unique_id, configs) self.uninstall(unique_id, configs)
<SYSTEM_TASK:> Pauses the process for the specified delay and then resumes it <END_TASK> <USER_TASK:> Description: def sleep(self, unique_id, delay, configs=None): """ Pauses the process for the specified delay and then resumes it :Parameter unique_id: the name of the process :Parameter delay: delay time in seconds """
self.pause(unique_id, configs) time.sleep(delay) self.resume(unique_id, configs)
<SYSTEM_TASK:> Issues a sigstop for the specified process <END_TASK> <USER_TASK:> Description: def pause(self, unique_id, configs=None): """ Issues a sigstop for the specified process :Parameter unique_id: the name of the process """
pids = self.get_pid(unique_id, configs) if pids != constants.PROCESS_NOT_RUNNING_PID: pid_str = ' '.join(str(pid) for pid in pids) hostname = self.processes[unique_id].hostname with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh: better_exec_command(ssh, "kill -SIGSTOP {0}".format(pid_str), "PAUSING PROCESS {0}".format(unique_id))
<SYSTEM_TASK:> Issues a signal for the specified process <END_TASK> <USER_TASK:> Description: def _send_signal(self, unique_id, signalno, configs): """ Issues a signal for the specified process :Parameter unique_id: the name of the process """
pids = self.get_pid(unique_id, configs) if pids != constants.PROCESS_NOT_RUNNING_PID: pid_str = ' '.join(str(pid) for pid in pids) hostname = self.processes[unique_id].hostname msg= Deployer._signalnames.get(signalno,"SENDING SIGNAL %s TO"%signalno) with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh: better_exec_command(ssh, "kill -{0} {1}".format(signalno, pid_str), "{0} PROCESS {1}".format(msg, unique_id))
<SYSTEM_TASK:> Issues a sigcont for the specified process <END_TASK> <USER_TASK:> Description: def resume(self, unique_id, configs=None): """ Issues a sigcont for the specified process :Parameter unique_id: the name of the process """
self._send_signal(unique_id, signal.SIGCONT,configs)
<SYSTEM_TASK:> Issues a kill -15 to the specified process <END_TASK> <USER_TASK:> Description: def terminate(self, unique_id, configs=None): """ Issues a kill -15 to the specified process :Parameter unique_id: the name of the process """
self._send_signal(unique_id, signal.SIGTERM, configs)
<SYSTEM_TASK:> Issue a signal to hangup the specified process <END_TASK> <USER_TASK:> Description: def hangup(self, unique_id, configs=None): """ Issue a signal to hangup the specified process :Parameter unique_id: the name of the process """
self._send_signal(unique_id, signal.SIGHUP, configs)
<SYSTEM_TASK:> deprecated name for fetch_logs <END_TASK> <USER_TASK:> Description: def get_logs(self, unique_id, logs, directory, pattern=constants.FILTER_NAME_ALLOW_NONE): """deprecated name for fetch_logs"""
self.fetch_logs(unique_id, logs, directory, pattern)
<SYSTEM_TASK:> Copies logs from the remote host that the process is running on to the provided directory <END_TASK> <USER_TASK:> Description: def fetch_logs(self, unique_id, logs, directory, pattern=constants.FILTER_NAME_ALLOW_NONE): """ Copies logs from the remote host that the process is running on to the provided directory :Parameter unique_id the unique_id of the process in question :Parameter logs a list of logs given by absolute path from the remote host :Parameter directory the local directory to store the copied logs :Parameter pattern a pattern to apply to files to restrict the set of logs copied """
hostname = self.processes[unique_id].hostname install_path = self.processes[unique_id].install_path self.fetch_logs_from_host(hostname, install_path, unique_id, logs, directory, pattern)
<SYSTEM_TASK:> Static method Copies logs from specified host on the specified install path <END_TASK> <USER_TASK:> Description: def fetch_logs_from_host(hostname, install_path, prefix, logs, directory, pattern): """ Static method Copies logs from specified host on the specified install path :Parameter hostname the remote host from where we need to fetch the logs :Parameter install_path path where the app is installed :Parameter prefix prefix used to copy logs. Generall the unique_id of process :Parameter logs a list of logs given by absolute path from the remote host :Parameter directory the local directory to store the copied logs :Parameter pattern a pattern to apply to files to restrict the set of logs copied """
if hostname is not None: with get_sftp_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ftp: for f in logs: try: mode = ftp.stat(f).st_mode except IOError, e: if e.errno == errno.ENOENT: logger.error("Log file " + f + " does not exist on " + hostname) pass else: copy_dir(ftp, f, directory, prefix) if install_path is not None: copy_dir(ftp, install_path, directory, prefix, pattern)
<SYSTEM_TASK:> Gets the pid of the process with `unique_id`. If the deployer does not know of a process <END_TASK> <USER_TASK:> Description: def get_pid(self, unique_id, configs=None): """Gets the pid of the process with `unique_id`. If the deployer does not know of a process with `unique_id` then it should return a value of constants.PROCESS_NOT_RUNNING_PID """
RECV_BLOCK_SIZE = 16 # the following is necessay to set the configs for this function as the combination of the # default configurations and the parameter with the parameter superceding the defaults but # not modifying the defaults if configs is None: configs = {} tmp = self.default_configs.copy() tmp.update(configs) configs = tmp if unique_id in self.processes: hostname = self.processes[unique_id].hostname else: return constants.PROCESS_NOT_RUNNING_PID if self.processes[unique_id].start_command is None: return constants.PROCESS_NOT_RUNNING_PID if self.processes[unique_id].pid_file is not None: with open_remote_file(hostname, self.processes[unique_id].pid_file, username=runtime.get_username(), password=runtime.get_password()) as pid_file: full_output = pid_file.read() elif 'pid_file' in configs.keys(): with open_remote_file(hostname, configs['pid_file'], username=runtime.get_username(), password=runtime.get_password()) as pid_file: full_output = pid_file.read() else: pid_keyword = self.processes[unique_id].start_command if self.processes[unique_id].args is not None: pid_keyword = "{0} {1}".format(pid_keyword, ' '.join(self.processes[unique_id].args)) pid_keyword = configs.get('pid_keyword', pid_keyword) # TODO(jehrlich): come up with a simpler approach to this pid_command = "ps aux | grep '{0}' | grep -v grep | tr -s ' ' | cut -d ' ' -f 2 | grep -Eo '[0-9]+'".format(pid_keyword) pid_command = configs.get('pid_command', pid_command) non_failing_command = "{0}; if [ $? -le 1 ]; then true; else false; fi;".format(pid_command) env = configs.get("env", {}) with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh: chan = exec_with_env(ssh, non_failing_command, msg="Failed to get PID", env=env) output = chan.recv(RECV_BLOCK_SIZE) full_output = output while len(output) > 0: output = chan.recv(RECV_BLOCK_SIZE) full_output += output if len(full_output) > 0: pids = [int(pid_str) for pid_str in full_output.split('\n') if pid_str.isdigit()] if len(pids) > 0: return pids return constants.PROCESS_NOT_RUNNING_PID
<SYSTEM_TASK:> Gets the host of the process with `unique_id`. If the deployer does not know of a process <END_TASK> <USER_TASK:> Description: def get_host(self, unique_id): """Gets the host of the process with `unique_id`. If the deployer does not know of a process with `unique_id` then it should return a value of SOME_SENTINAL_VALUE :Parameter unique_id: the name of the process :raises NameError if the name is not valid process """
if unique_id in self.processes: return self.processes[unique_id].hostname logger.error("{0} not a known process".format(unique_id)) raise NameError("{0} not a known process".format(unique_id))
<SYSTEM_TASK:> Terminates all the running processes. By default it is set to false. <END_TASK> <USER_TASK:> Description: def kill_all_process(self): """ Terminates all the running processes. By default it is set to false. Users can set to true in config once the method to get_pid is done deterministically either using pid_file or an accurate keyword """
if (runtime.get_active_config("cleanup_pending_process",False)): for process in self.get_processes(): self.terminate(process.unique_id)
<SYSTEM_TASK:> Converts a string to the corresponding log level <END_TASK> <USER_TASK:> Description: def string_to_level(log_level): """ Converts a string to the corresponding log level """
if (log_level.strip().upper() == "DEBUG"): return logging.DEBUG if (log_level.strip().upper() == "INFO"): return logging.INFO if (log_level.strip().upper() == "WARNING"): return logging.WARNING if (log_level.strip().upper() == "ERROR"): return logging.ERROR
<SYSTEM_TASK:> for a given file <END_TASK> <USER_TASK:> Description: def execute ( self, conn, dataset, dataset_access_type, transaction=False ): """ for a given file """
if not conn: dbsExceptionHandler("dbsException-failed-connect2host", "Oracle/Dataset/UpdateType. Expects db connection from upper layer.", self.logger.exception) binds = { "dataset" : dataset , "dataset_access_type" : dataset_access_type ,"myuser": dbsUtils().getCreateBy(), "mydate": dbsUtils().getTime() } result = self.dbi.processData(self.sql, binds, conn, transaction)
<SYSTEM_TASK:> To check if a string has the required format. This is only used for POST APIs. <END_TASK> <USER_TASK:> Description: def validateStringInput(input_key,input_data, read=False): """ To check if a string has the required format. This is only used for POST APIs. """
log = clog.error_log func = None if '*' in input_data or '%' in input_data: func = validationFunctionWildcard.get(input_key) if func is None: func = searchstr elif input_key == 'migration_input' : if input_data.find('#') != -1 : func = block else : func = dataset else: if not read: func = validationFunction.get(input_key) if func is None: func = namestr else: if input_key == 'dataset': func = reading_dataset_check elif input_key == 'block_name': func = reading_block_check elif input_key == 'logical_file_name': func = reading_lfn_check else: func = namestr try: func(input_data) except AssertionError as ae: serverLog = str(ae) + " key-value pair (%s, %s) cannot pass input checking" %(input_key, input_data) #print serverLog dbsExceptionHandler("dbsException-invalid-input2", message="Invalid Input Data %s...: Not Match Required Format" %input_data[:10], \ logger=log.error, serverError=serverLog) return input_data
<SYSTEM_TASK:> Check the current request and block it if the IP address it's <END_TASK> <USER_TASK:> Description: def block_before(self): """ Check the current request and block it if the IP address it's coming from is blacklisted. """
# To avoid unnecessary database queries, ignore the IP check for # requests for static files if request.path.startswith(url_for('static', filename='')): return # Some static files might be served from the root path (e.g. # favicon.ico, robots.txt, etc.). Ignore the IP check for most # common extensions of those files. ignored_extensions = ('ico', 'png', 'txt', 'xml') if request.path.rsplit('.', 1)[-1] in ignored_extensions: return ips = request.headers.getlist('X-Forwarded-For') if not ips: return # If the X-Forwarded-For header contains multiple comma-separated # IP addresses, we're only interested in the last one. ip = ips[0].strip() if ip[-1] == ',': ip = ip[:-1] ip = ip.rsplit(',', 1)[-1].strip() if self.matches_ip(ip): if self.logger is not None: self.logger.info("IPBlock: matched {}, {}".format(ip, self.block_msg)) if self.blocking_enabled: return 'IP Blocked', 200
<SYSTEM_TASK:> Return True if the given IP is blacklisted, False otherwise. <END_TASK> <USER_TASK:> Description: def matches_ip(self, ip): """Return True if the given IP is blacklisted, False otherwise."""
# Check the cache if caching is enabled if self.cache is not None: matches_ip = self.cache.get(ip) if matches_ip is not None: return matches_ip # Query MongoDB to see if the IP is blacklisted matches_ip = IPNetwork.matches_ip( ip, read_preference=self.read_preference) # Cache the result if caching is enabled if self.cache is not None: self.cache[ip] = matches_ip return matches_ip
<SYSTEM_TASK:> Utility function, that comapares blocks of a dataset at source and dst <END_TASK> <USER_TASK:> Description: def processDatasetBlocks(self, url, conn, inputdataset, order_counter): """ Utility function, that comapares blocks of a dataset at source and dst and returns an ordered list of blocks not already at dst for migration """
ordered_dict = {} srcblks = self.getSrcBlocks(url, dataset=inputdataset) if len(srcblks) < 0: e = "DBSMigration: No blocks in the required dataset %s found at source %s."%(inputdataset, url) dbsExceptionHandler('dbsException-invalid-input2', e, self.logger.exception, e) dstblks = self.blocklist.execute(conn, dataset=inputdataset) self.logger.debug("******* dstblks for dataset %s ***********" %inputdataset) self.logger.debug(dstblks) blocksInSrcNames = [ y['block_name'] for y in srcblks] blocksInDstNames = [] for item in dstblks: blocksInDstNames.append(item['block_name']) ordered_dict[order_counter] = [] for ablk in blocksInSrcNames: if not ablk in blocksInDstNames: ordered_dict[order_counter].append(ablk) if ordered_dict[order_counter] != []: self.logger.debug("**** ordered_dict dict length ****") self.logger.debug(len(ordered_dict)) return ordered_dict else: return {}
<SYSTEM_TASK:> Method to remove pending or failed migration request from the queue. <END_TASK> <USER_TASK:> Description: def removeMigrationRequest(self, migration_rqst): """ Method to remove pending or failed migration request from the queue. """
conn = self.dbi.connection() try: tran = conn.begin() self.mgrremove.execute(conn, migration_rqst) tran.commit() except dbsException as he: if conn: conn.close() raise except Exception as ex: if conn: conn.close() raise if conn: conn.close()
<SYSTEM_TASK:> get eveything of block that is has status = 0 and migration_request_id as specified. <END_TASK> <USER_TASK:> Description: def listMigrationBlocks(self, migration_request_id=""): """ get eveything of block that is has status = 0 and migration_request_id as specified. """
conn = self.dbi.connection() try: return self.mgrblklist.execute(conn, migration_request_id=migration_request_id) finally: if conn: conn.close()
<SYSTEM_TASK:> Need to list all blocks of the dataset and its parents starting from the top <END_TASK> <USER_TASK:> Description: def getSrcBlocks(self, url, dataset="", block=""): """ Need to list all blocks of the dataset and its parents starting from the top For now just list the blocks from this dataset. Client type call... """
if block: params={'block_name':block, 'open_for_writing':0} elif dataset: params={'dataset':dataset, 'open_for_writing':0} else: m = 'DBSMigration: Invalid input. Either block or dataset name has to be provided' e = 'DBSMigrate/getSrcBlocks: Invalid input. Either block or dataset name has to be provided' dbsExceptionHandler('dbsException-invalid-input2', m, self.logger.exception, e ) return cjson.decode(self.callDBSService(url, 'blocks', params, {}))
<SYSTEM_TASK:> build dynamic sql based on daoinput <END_TASK> <USER_TASK:> Description: def executeSingle( self, conn, daoinput, tablename, transaction = False): """build dynamic sql based on daoinput"""
sql1 = " insert into %s%s( " %(self.owner, tablename) sql2 =" values(" "Now loop over all the input keys. We need to check if all the keys are valid !!!" for key in daoinput: sql1 += "%s," %key.upper() sql2 += ":%s," %key.lower() sql = sql1.strip(',') + ') ' + sql2.strip(',') + ' )' self.dbi.processData(sql, daoinput, conn, transaction)
<SYSTEM_TASK:> Create a list for the 'install_requires' component of the setup function <END_TASK> <USER_TASK:> Description: def parse_requirements(requirements_file): """ Create a list for the 'install_requires' component of the setup function by parsing a requirements file """
if os.path.exists(requirements_file): # return a list that contains each line of the requirements file return open(requirements_file, 'r').read().splitlines() else: print("ERROR: requirements file " + requirements_file + " not found.") sys.exit(1)
<SYSTEM_TASK:> Lists all primary dataset types if no user input is provided. <END_TASK> <USER_TASK:> Description: def execute(self, conn, dsType = "", dataset="", transaction = False): """ Lists all primary dataset types if no user input is provided. """
sql = self.sql binds={} if not dsType and not dataset: pass elif dsType and dataset in ("", None, '%'): op = ("=", "like")["%" in dsType] sql += "WHERE PDT.PRIMARY_DS_TYPE %s :primdstype"%op binds = {"primdstype":dsType} elif dataset and dsType in ("", None, '%'): op = ("=", "like")["%" in dataset] sql += "JOIN %sPRIMARY_DATASETS PDS on PDS.PRIMARY_DS_TYPE_ID = PDT.PRIMARY_DS_TYPE_ID \ JOIN %sDATASETS DS ON DS.PRIMARY_DS_ID = PDS.PRIMARY_DS_ID \ WHERE DS.DATASET %s :dataset" %(self.owner, self.owner, op) binds={"dataset":dataset} elif dataset and dsType: op = ("=", "like")["%" in dsType] op1 = ("=", "like")["%" in dataset] sql += "JOIN %sPRIMARY_DATASETS PDS on PDS.PRIMARY_DS_TYPE_ID = PDT.PRIMARY_DS_TYPE_ID \ JOIN %sDATASETS DS ON DS.PRIMARY_DS_ID = PDS.PRIMARY_DS_ID \ WHERE DS.DATASET %s :dataset and PDT.PRIMARY_DS_TYPE %s :primdstype" \ %(self.owner, self.owner, op1, op) binds = {"primdstype":dsType, "dataset":dataset} else: dbsExceptionHandler('dbsException-invalid-input', "DAO Primary_DS_TYPE List accepts no input, or\ dataset,primary_ds_type as input.", self.logger.exception) cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True) result = [] for c in cursors: result.extend(self.formatCursor(c, size=100)) return result
<SYSTEM_TASK:> List release versions <END_TASK> <USER_TASK:> Description: def listReleaseVersions(self, release_version="", dataset='', logical_file_name=''): """ List release versions """
if dataset and ('%' in dataset or '*' in dataset): dbsExceptionHandler('dbsException-invalid-input', " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in dataset.\n.") if logical_file_name and ('%' in logical_file_name or '*' in logical_file_name): dbsExceptionHandler('dbsException-invalid-input', " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in logical_file_name.\n.") conn = self.dbi.connection() try: plist = self.releaseVersion.execute(conn, release_version.upper(), dataset, logical_file_name) result = [{}] if plist: t = [] for i in plist: for k, v in i.iteritems(): t.append(v) result[0]['release_version'] = t return result finally: if conn: conn.close()
<SYSTEM_TASK:> Get CA Path to check the validity of the server host certificate on the client side <END_TASK> <USER_TASK:> Description: def __search_ca_path(self): """ Get CA Path to check the validity of the server host certificate on the client side """
if "X509_CERT_DIR" in os.environ: self._ca_path = os.environ['X509_CERT_DIR'] elif os.path.exists('/etc/grid-security/certificates'): self._ca_path = '/etc/grid-security/certificates' else: raise ClientAuthException("Could not find a valid CA path")
<SYSTEM_TASK:> Authorization function for general insert <END_TASK> <USER_TASK:> Description: def authInsert(user, role, group, site): """ Authorization function for general insert """
if not role: return True for k, v in user['roles'].iteritems(): for g in v['group']: if k in role.get(g, '').split(':'): return True return False
<SYSTEM_TASK:> takes required dataset parameter <END_TASK> <USER_TASK:> Description: def listDatasetParents(self, dataset=""): """ takes required dataset parameter returns only parent dataset name """
if( dataset == "" ): dbsExceptionHandler("dbsException-invalid-input", "DBSDataset/listDatasetParents. Child Dataset name is required.") conn = self.dbi.connection() try: result = self.datasetparentlist.execute(conn, dataset) return result finally: if conn: conn.close()
<SYSTEM_TASK:> takes required dataset parameter <END_TASK> <USER_TASK:> Description: def listDatasetChildren(self, dataset): """ takes required dataset parameter returns only children dataset name """
if( dataset == "" ): dbsExceptionHandler("dbsException-invalid-input", "DBSDataset/listDatasetChildren. Parent Dataset name is required.") conn = self.dbi.connection() try: result = self.datasetchildlist.execute(conn, dataset) return result finally: if conn: conn.close()
<SYSTEM_TASK:> lists all datasets if dataset parameter is not given. <END_TASK> <USER_TASK:> Description: def listDatasets(self, dataset="", parent_dataset="", is_dataset_valid=1, release_version="", pset_hash="", app_name="", output_module_label="", global_tag="", processing_version=0, acquisition_era="", run_num=-1, physics_group_name="", logical_file_name="", primary_ds_name="", primary_ds_type="", processed_ds_name="", data_tier_name="", dataset_access_type="VALID", prep_id="", create_by='', last_modified_by='', min_cdate=0, max_cdate=0, min_ldate=0, max_ldate=0, cdate=0, ldate=0, detail=False, dataset_id=-1): """ lists all datasets if dataset parameter is not given. The parameter can include % character. all other parameters are not wild card ones. """
if(logical_file_name and logical_file_name.find("%")!=-1): dbsExceptionHandler('dbsException-invalid-input', 'DBSDataset/listDatasets API requires \ fullly qualified logical_file_name. NO wildcard is allowed in logical_file_name.') if(dataset and dataset.find("/%/%/%")!=-1): dataset='' with self.dbi.connection() as conn: dao = (self.datasetbrieflist, self.datasetlist)[detail] if dataset_access_type: dataset_access_type = dataset_access_type.upper() if data_tier_name: data_tier_name = data_tier_name.upper() #if processing_version: processing_version = processing_version.upper() #if acquisition_era: acquisition_era = acquisition_era.upper() for item in dao.execute(conn, dataset, is_dataset_valid, parent_dataset, release_version, pset_hash, app_name, output_module_label, global_tag, processing_version, acquisition_era, run_num, physics_group_name, logical_file_name, primary_ds_name, primary_ds_type, processed_ds_name, data_tier_name, dataset_access_type, prep_id, create_by, last_modified_by, min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate, dataset_id): yield item
<SYSTEM_TASK:> Update origin_site_name for a given block_name <END_TASK> <USER_TASK:> Description: def execute(self, conn, block_name, origin_site_name, transaction=False): """ Update origin_site_name for a given block_name """
if not conn: dbsExceptionHandler("dbsException-failed-connect2host", "Oracle/Block/UpdateStatus. \ Expects db connection from upper layer.", self.logger.exception) binds = {"block_name": block_name, "origin_site_name": origin_site_name, "mtime": dbsUtils().getTime(), "myuser": dbsUtils().getCreateBy()} self.dbi.processData(self.sql, binds, conn, transaction)
<SYSTEM_TASK:> increments the sequence `seqName` by default `Incremented by one` <END_TASK> <USER_TASK:> Description: def increment(self, conn, seqName, transaction = False, incCount=1): """ increments the sequence `seqName` by default `Incremented by one` and returns its value """
try: seqTable = "%sS" %seqName tlock = "lock tables %s write" %seqTable self.dbi.processData(tlock, [], conn, transaction) sql = "select ID from %s" % seqTable result = self.dbi.processData(sql, [], conn, transaction) resultlist = self.formatDict(result) newSeq = resultlist[0]['id']+incCount sql = "UPDATE %s SET ID=:seq_count" % seqTable seqparms={"seq_count" : newSeq} self.dbi.processData(sql, seqparms, conn, transaction) tunlock = "unlock tables" self.dbi.processData(tunlock, [], conn, transaction) return newSeq except: #FIXME tunlock = "unlock tables" self.dbi.processData(tunlock, [], conn, transaction) raise
<SYSTEM_TASK:> List run known to DBS. <END_TASK> <USER_TASK:> Description: def listRuns(self, run_num=-1, logical_file_name="", block_name="", dataset=""): """ List run known to DBS. """
if( '%' in logical_file_name or '%' in block_name or '%' in dataset ): dbsExceptionHandler('dbsException-invalid-input', " DBSDatasetRun/listRuns. No wildcards are allowed in logical_file_name, block_name or dataset.\n.") conn = self.dbi.connection() tran = False try: ret = self.runlist.execute(conn, run_num, logical_file_name, block_name, dataset, tran) result = [] rnum = [] for i in ret: rnum.append(i['run_num']) result.append({'run_num' : rnum}) return result finally: if conn: conn.close()
<SYSTEM_TASK:> API to update file status <END_TASK> <USER_TASK:> Description: def updateFile(self, logical_file_name=[], is_file_valid=1, lost=0, dataset=''): """ API to update file status :param logical_file_name: logical_file_name to update (optional), but must have either a fln or a dataset :type logical_file_name: str :param is_file_valid: valid=1, invalid=0 (Required) :type is_file_valid: bool :param lost: default lost=0 (optional) :type lost: bool :param dataset: default dataset='' (optional),but must have either a fln or a dataset :type dataset: basestring """
if lost in [1, True, 'True', 'true', '1', 'y', 'yes']: lost = 1 if is_file_valid in [1, True, 'True', 'true', '1', 'y', 'yes']: dbsExceptionHandler("dbsException-invalid-input2", dbsExceptionCode["dbsException-invalid-input2"], self.logger.exception,\ "Lost file must set to invalid" ) else: lost = 0 for f in logical_file_name, dataset: if '*' in f or '%' in f: dbsExceptionHandler("dbsException-invalid-input2", dbsExceptionCode["dbsException-invalid-input2"], self.logger.exception, "No \ wildcard allow in LFN or dataset for updatefile API." ) try: self.dbsFile.updateStatus(logical_file_name, is_file_valid, lost, dataset) except HTTPError as he: raise he except Exception as ex: sError = "DBSWriterModel/updateFile. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> Returns a queryset with matching IPNetwork objects for the given IP. <END_TASK> <USER_TASK:> Description: def qs_for_ip(cls, ip_str): """ Returns a queryset with matching IPNetwork objects for the given IP. """
ip = int(netaddr.IPAddress(ip_str)) # ignore IPv6 addresses for now (4294967295 is 0xffffffff, aka the # biggest 32-bit number) if ip > 4294967295: return cls.objects.none() ip_range_query = { 'start__lte': ip, 'stop__gte': ip } return cls.objects.filter(**ip_range_query)
<SYSTEM_TASK:> Return True if provided IP exists in the blacklist and doesn't exist <END_TASK> <USER_TASK:> Description: def matches_ip(cls, ip_str, read_preference=None): """ Return True if provided IP exists in the blacklist and doesn't exist in the whitelist. Otherwise, return False. """
qs = cls.qs_for_ip(ip_str).only('whitelist') if read_preference: qs = qs.read_preference(read_preference) # Return True if any docs match the IP and none of them represent # a whitelist return bool(qs) and not any(obj.whitelist for obj in qs)
<SYSTEM_TASK:> This utility function handles all dbs exceptions. It will log , raise exception <END_TASK> <USER_TASK:> Description: def dbsExceptionHandler(eCode='', message='', logger=None , serverError=''): """ This utility function handles all dbs exceptions. It will log , raise exception based on input condition. It loggs the traceback on the server log. Send HTTPError 400 for invalid client input and HTTPError 404 for NOT FOUND required pre-existing condition. """
if logger: #HTTP Error if eCode == "dbsException-invalid-input": #logger(eCode + ": " + serverError) raise HTTPError(400, message) elif eCode == "dbsException-missing-data": logger( time.asctime(time.gmtime()) + " " + eCode + ": " + serverError) #print (eCode + ": " + serverError) raise HTTPError(412, message) elif eCode == "dbsException-input-too-large": logger(time.asctime(time.gmtime()) + " " + eCode + ": " + serverError) raise HTTPError(413, message) elif eCode == "dbsException-invalid-input2": logger( time.asctime(time.gmtime()) + " " + eCode + ": " + serverError) raise HTTPError(400, message) elif eCode == "dbsException-conflict-data": logger( time.asctime(time.gmtime()) + " " + eCode + ": " + serverError) raise HTTPError(409, message) elif eCode == "dbsException-failed-connect2host": logger( time.asctime(time.gmtime()) + " " + eCode + ": " + serverError) raise HTTPError(443, message) else: #client gets httperror 500 for server internal error #print eCode + ": " + serverError logger( time.asctime(time.gmtime()) + " " + eCode + ": " + serverError) raise HTTPError(500, message) else: #not HTTP Error raise dbsException(eCode, message, serverError)
<SYSTEM_TASK:> configure pycurl proxy settings <END_TASK> <USER_TASK:> Description: def configure_proxy(self, curl_object): """configure pycurl proxy settings"""
curl_object.setopt(curl_object.PROXY, self._proxy_hostname) curl_object.setopt(curl_object.PROXYPORT, self._proxy_port) curl_object.setopt(curl_object.PROXYTYPE, curl_object.PROXYTYPE_SOCKS5) if self._proxy_user and self._proxy_passwd: curl_object.setopt(curl_object.PROXYUSERPWD, '%s:%s' % (self._proxy_user, self._proxy_port))
<SYSTEM_TASK:> for a given block_id <END_TASK> <USER_TASK:> Description: def execute(self, conn, acquisition_era_name,end_date, transaction = False): """ for a given block_id """
if not conn: dbsExceptionHandler("dbsException-failed-connect2host", "dbs/dao/Oracle/AcquisitionEra/updateEndDate expects db connection from upper layer.", self.logger.exception) binds = { "acquisition_era_name" :acquisition_era_name , "end_date" : end_date } result = self.dbi.processData(self.sql, binds, conn, transaction)
<SYSTEM_TASK:> Used to toggle the status of a block open_for_writing=1, open for writing, open_for_writing=0, closed <END_TASK> <USER_TASK:> Description: def updateStatus(self, block_name="", open_for_writing=0): """ Used to toggle the status of a block open_for_writing=1, open for writing, open_for_writing=0, closed """
if open_for_writing not in [1, 0, '1', '0']: msg = "DBSBlock/updateStatus. open_for_writing can only be 0 or 1 : passed %s."\ % open_for_writing dbsExceptionHandler('dbsException-invalid-input', msg) conn = self.dbi.connection() trans = conn.begin() try : open_for_writing = int(open_for_writing) self.updatestatus.execute(conn, block_name, open_for_writing, dbsUtils().getTime(), trans) trans.commit() trans = None except Exception as ex: if trans: trans.rollback() if conn:conn.close() raise ex finally: if conn:conn.close()
<SYSTEM_TASK:> Update the origin_site_name for a given block name <END_TASK> <USER_TASK:> Description: def updateSiteName(self, block_name, origin_site_name): """ Update the origin_site_name for a given block name """
if not origin_site_name: dbsExceptionHandler('dbsException-invalid-input', "DBSBlock/updateSiteName. origin_site_name is mandatory.") conn = self.dbi.connection() trans = conn.begin() try: self.updatesitename.execute(conn, block_name, origin_site_name) except: if trans: trans.rollback() raise else: if trans: trans.commit() finally: if conn: conn.close()
<SYSTEM_TASK:> dataset, block_name, data_tier_name or logical_file_name must be passed. <END_TASK> <USER_TASK:> Description: def listBlocks(self, dataset="", block_name="", data_tier_name="", origin_site_name="", logical_file_name="", run_num=-1, min_cdate=0, max_cdate=0, min_ldate=0, max_ldate=0, cdate=0, ldate=0, open_for_writing=-1, detail=False): """ dataset, block_name, data_tier_name or logical_file_name must be passed. """
if (not dataset) or re.search("['%','*']", dataset): if (not block_name) or re.search("['%','*']", block_name): if (not logical_file_name) or re.search("['%','*']", logical_file_name): if not data_tier_name or re.search("['%','*']", data_tier_name): msg = "DBSBlock/listBlock. You must specify at least one parameter(dataset, block_name,\ data_tier_name, logical_file_name) with listBlocks api" dbsExceptionHandler('dbsException-invalid-input2', msg, self.logger.exception, msg) if data_tier_name: if not (min_cdate and max_cdate) or (max_cdate-min_cdate)>32*24*3600: msg = "min_cdate and max_cdate are mandatory parameters. If data_tier_name parameter is used \ the maximal time range allowed is 31 days" dbsExceptionHandler('dbsException-invalid-input2', msg, self.logger.exception, msg) if detail: msg = "DBSBlock/listBlock. Detail parameter not allowed togther with data_tier_name" dbsExceptionHandler('dbsException-invalid-input2', msg, self.logger.exception, msg) with self.dbi.connection() as conn: dao = (self.blockbrieflist, self.blocklist)[detail] for item in dao.execute(conn, dataset, block_name, data_tier_name, origin_site_name, logical_file_name, run_num, min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate): yield item
<SYSTEM_TASK:> Lists all sites types if site_name is not provided. <END_TASK> <USER_TASK:> Description: def execute(self, conn, site_name= "", transaction = False): """ Lists all sites types if site_name is not provided. """
sql = self.sql if site_name == "": result = self.dbi.processData(sql, conn=conn, transaction=transaction) else: sql += "WHERE S.SITE_NAME = :site_name" binds = { "site_name" : site_name } result = self.dbi.processData(sql, binds, conn, transaction) return self.formatDict(result)
<SYSTEM_TASK:> Get the blocks that need to be migrated <END_TASK> <USER_TASK:> Description: def getBlocks(self): """ Get the blocks that need to be migrated """
try: conn = self.dbi.connection() result = self.buflistblks.execute(conn) return result finally: if conn: conn.close()
<SYSTEM_TASK:> Get some files from the insert buffer <END_TASK> <USER_TASK:> Description: def getBufferedFiles(self, block_id): """ Get some files from the insert buffer """
try: conn = self.dbi.connection() result = self.buflist.execute(conn, block_id) return result finally: if conn: conn.close()
<SYSTEM_TASK:> returns id for a given datatier name <END_TASK> <USER_TASK:> Description: def execute(self, conn, data_tier_name='', transaction = False, cache=None): """ returns id for a given datatier name """
if cache: ret=cache.get("DATA_TIERS") if not ret==None: return ret sql = self.sql binds={} if data_tier_name: op = ('=', 'like')['%' in data_tier_name] sql += "WHERE DT.DATA_TIER_NAME %s :datatier" %op binds = {"datatier":data_tier_name} result = self.dbi.processData(sql, binds, conn, transaction) plist = self.formatDict(result) return plist
<SYSTEM_TASK:> Lists the oldest request queued <END_TASK> <USER_TASK:> Description: def execute(self, conn, migration_url="", migration_input="", create_by="", migration_request_id="", transaction=False): """ Lists the oldest request queued """
binds = {} result = self.dbi.processData(self.sql, binds, conn, transaction) result = self.formatDict(result) if len(result) == 0 : return [] if result[0]["migration_request_id"] in ('', None) : return [] return result
<SYSTEM_TASK:> Returns all physics groups if physics group names are not passed. <END_TASK> <USER_TASK:> Description: def listPhysicsGroups(self, physics_group_name=""): """ Returns all physics groups if physics group names are not passed. """
if not isinstance(physics_group_name, basestring): dbsExceptionHandler('dbsException-invalid-input', 'physics group name given is not valid : %s' % physics_group_name) else: try: physics_group_name = str(physics_group_name) except: dbsExceptionHandler('dbsException-invalid-input', 'physics group name given is not valid : %s' % physics_group_name) conn = self.dbi.connection() try: result = self.pglist.execute(conn, physics_group_name) return result finally: if conn: conn.close()
<SYSTEM_TASK:> Simple method that returs list of all know DBS instances, instances known to this registry <END_TASK> <USER_TASK:> Description: def getServices(self): """ Simple method that returs list of all know DBS instances, instances known to this registry """
try: conn = self.dbi.connection() result = self.serviceslist.execute(conn) return result except Exception as ex: msg = (("%s DBSServicesRegistry/getServices." + " %s\n. Exception trace: \n %s") % (DBSEXCEPTIONS['dbsException-3'], ex, traceback.format_exc())) self.logger.exception(msg ) raise Exception ("dbsException-3", msg ) finally: conn.close()
<SYSTEM_TASK:> Add a service to service registry <END_TASK> <USER_TASK:> Description: def addService(self): """ Add a service to service registry """
conn = self.dbi.connection() tran = conn.begin() try: body = request.body.read() service = cjson.decode(body) addthis = {} addthis['service_id'] = self.sm.increment(conn, "SEQ_RS", tran) addthis['name'] = service.get('NAME', '') if addthis['name'] == '': msg = (("%s DBSServicesRegistry/addServices." + " Service Must be Named\n") % DBSEXCEPTIONS['dbsException-3']) raise Exception("dbsException-3", msg) addthis['type'] = service.get('TYPE', 'GENERIC') addthis['location'] = service.get('LOCATION', 'HYPERSPACE') addthis['status'] = service.get('STATUS', 'UNKNOWN') addthis['admin'] = service.get('ADMIN', 'UNADMINISTRATED') addthis['uri'] = service.get('URI', '') if addthis['uri'] == '': msg = (("%s DBSServicesRegistry/addServices." + " Service URI must be provided.\n") % DBSEXCEPTIONS['dbsException-3']) self.logger.exception(msg) raise Exception("dbsException-3", msg) addthis['db'] = service.get('DB', 'NO_DATABASE') addthis['version'] = service.get('VERSION', 'UNKNOWN' ) addthis['last_contact'] = dbsUtils().getTime() addthis['comments'] = service.get('COMMENTS', 'NO COMMENTS') addthis['alias'] = service.get('ALIAS', 'No Alias') self.servicesadd.execute(conn, addthis, tran) tran.commit() except exceptions.IntegrityError as ex: if (str(ex).find("unique constraint") != -1 or str(ex).lower().find("duplicate") != -1) : #Update the service instead try: self.servicesupdate.execute(conn, addthis, tran) tran.commit() except Exception as ex: msg = (("%s DBSServiceRegistry/addServices." + " %s\n. Exception trace: \n %s") % (DBSEXCEPTIONS['dbsException-3'], ex, traceback.format_exc())) self.logger.exception(msg ) raise Exception ("dbsException-3", msg ) except Exception as ex: tran.rollback() msg = (("%s DBSServiceRegistry/addServices." + " %s\n. Exception trace: \n %s") % (DBSEXCEPTIONS['dbsException-3'], ex, traceback.format_exc())) self.logger.exception(msg ) raise Exception ("dbsException-3", msg ) finally: conn.close()
<SYSTEM_TASK:> Lists all requests if pattern is not provided. <END_TASK> <USER_TASK:> Description: def execute(self, conn, migration_url="", migration_input="", create_by="", migration_request_id="", oldest= False, transaction=False): """ Lists all requests if pattern is not provided. """
sql = self.sql binds = {} if migration_request_id: sql += " WHERE MR.MIGRATION_REQUEST_ID=:migration_request_id" binds['migration_request_id']=migration_request_id elif oldest: #FIXME: Need to write the sql.YG #current_date = dbsUtils().getTime() #we require waiting time for #retry_count=0 is 1 minutes #retry_count=1 is 2 minutes #retyr_count=2 is 4 minutes sql += """ WHERE MR.MIGRATION_STATUS=0 or (MR.migration_status=3 and MR.retry_count=0 and MR.last_modification_date <= :current_date-60) or (MR.migration_status=3 and MR.retry_count=1 and MR.last_modification_date <= :current_date-120) or (MR.migration_status=3 and MR.retry_count=2 and MR.last_modification_date <= :current_date-240) ORDER BY MR.creation_date """ binds['current_date'] = dbsUtils().getTime() #print "time= " + str(binds['current_date']) else: if migration_url or migration_input or create_by: sql += " WHERE " if migration_url: sql += " MR.MIGRATION_URL=:migration_url" binds['migration_url']=migration_url if migration_input: if migration_url: sql += " AND " op = ("=", "like")["%" in migration_input] sql += " MR.MIGRATION_INPUT %s :migration_input" % op binds['migration_input']=migration_input if create_by: if migration_url or migration_input: sql += " AND " sql += " MR.CREATE_BY=:create_by" %create_by binds['create_by']=create_by cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True) result = [] for c in cursors: result.extend(self.formatCursor(c, size=100)) return result
<SYSTEM_TASK:> Returns all primary dataset if primary_ds_name or primary_ds_type are not passed. <END_TASK> <USER_TASK:> Description: def listPrimaryDatasets(self, primary_ds_name="", primary_ds_type=""): """ Returns all primary dataset if primary_ds_name or primary_ds_type are not passed. """
conn = self.dbi.connection() try: result = self.primdslist.execute(conn, primary_ds_name, primary_ds_type) if conn: conn.close() return result finally: if conn: conn.close()
<SYSTEM_TASK:> Returns all primary dataset types if dataset or primary_ds_type are not passed. <END_TASK> <USER_TASK:> Description: def listPrimaryDSTypes(self, primary_ds_type="", dataset=""): """ Returns all primary dataset types if dataset or primary_ds_type are not passed. """
conn = self.dbi.connection() try: result = self.primdstypeList.execute(conn, primary_ds_type, dataset) if conn: conn.close() return result finally: if conn: conn.close()
<SYSTEM_TASK:> returns id for a given physics group name <END_TASK> <USER_TASK:> Description: def execute(self, conn, name='', transaction = False): """ returns id for a given physics group name """
binds={} if name: op = ('=', 'like')['%' in name] sql = self.sql + " WHERE pg.physics_group_name %s :physicsgroup" % (op) binds = {"physicsgroup": name} else: sql = self.sql self.logger.debug(sql) result = self.dbi.processData(sql, binds, conn, transaction) plist = self.formatDict(result) self.logger.debug(plist) if len(plist) < 1: return [] return plist
<SYSTEM_TASK:> API to get a list of supported REST APIs. In the case a particular API is specified, <END_TASK> <USER_TASK:> Description: def getHelp(self, call=""): """ API to get a list of supported REST APIs. In the case a particular API is specified, the docstring of that API is displayed. :param call: call to get detailed information about (Optional) :type call: str :return: List of APIs or detailed information about a specific call (parameters and docstring) :rtype: List of strings or a dictionary containing params and doc keys depending on the input parameter """
if call: params = self.methods['GET'][call]['args'] doc = self.methods['GET'][call]['call'].__doc__ return dict(params=params, doc=doc) else: return self.methods['GET'].keys()
<SYSTEM_TASK:> API to list primary datasets <END_TASK> <USER_TASK:> Description: def listPrimaryDatasets(self, primary_ds_name="", primary_ds_type=""): """ API to list primary datasets :param primary_ds_type: List primary datasets with primary dataset type (Optional) :type primary_ds_type: str :param primary_ds_name: List that primary dataset (Optional) :type primary_ds_name: str :returns: List of dictionaries containing the following keys (primary_ds_type_id, data_type) :rtype: list of dicts :returns: List of dictionaries containing the following keys (create_by, primary_ds_type, primary_ds_id, primary_ds_name, creation_date) :rtype: list of dicts """
primary_ds_name = primary_ds_name.replace("*", "%") primary_ds_type = primary_ds_type.replace("*", "%") try: return self.dbsPrimaryDataset.listPrimaryDatasets(primary_ds_name, primary_ds_type) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message) except Exception as ex: sError = "DBSReaderModel/listPrimaryDatasets. %s\n Exception trace: \n %s." \ % (ex, traceback.format_exc() ) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list datasets in DBS. To be called by datasetlist url with post call. <END_TASK> <USER_TASK:> Description: def listDatasetArray(self): """ API to list datasets in DBS. To be called by datasetlist url with post call. :param dataset: list of datasets [dataset1,dataset2,..,dataset n] (must have either a list of dataset or dataset_id), Max length 1000. :type dataset: list :param dataset_id: list of dataset ids [dataset_id1,dataset_id2,..,dataset_idn, "dsid_min-dsid_max"] ((must have either a list of dataset or dataset_id) :type dataset_id: list :param dataset_access_type: List only datasets with that dataset access type (Optional) :type dataset_access_type: str :param detail: brief list or detailed list 1/0 :type detail: bool :returns: List of dictionaries containing the following keys (dataset). If the detail option is used. The dictionary contains the following keys (primary_ds_name, physics_group_name, acquisition_era_name, create_by, dataset_access_type, data_tier_name, last_modified_by, creation_date, processing_version, processed_ds_name, xtcrosssection, last_modification_date, dataset_id, dataset, prep_id, primary_ds_type) :rtype: list of dicts """
ret = [] try : body = request.body.read() if body: data = cjson.decode(body) data = validateJSONInputNoCopy("dataset", data, read=True) #Because CMSWEB has a 300 seconds responding time. We have to limit the array siz to make sure that #the API can be finished in 300 second. # YG Nov-05-2015 max_array_size = 1000 if ( 'dataset' in data.keys() and isinstance(data['dataset'], list) and len(data['dataset'])>max_array_size)\ or ('dataset_id' in data.keys() and isinstance(data['dataset_id'], list) and len(data['dataset_id'])>max_array_size): dbsExceptionHandler("dbsException-invalid-input", "The Max list length supported in listDatasetArray is %s." %max_array_size, self.logger.exception) ret = self.dbsDataset.listDatasetArray(data) except cjson.DecodeError as De: dbsExceptionHandler('dbsException-invalid-input2', "Invalid input", self.logger.exception, str(De)) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except HTTPError as he: raise he except Exception as ex: sError = "DBSReaderModel/listDatasetArray. %s \n Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) for item in ret: yield item
<SYSTEM_TASK:> API to list data tiers known to DBS. <END_TASK> <USER_TASK:> Description: def listDataTiers(self, data_tier_name=""): """ API to list data tiers known to DBS. :param data_tier_name: List details on that data tier (Optional) :type data_tier_name: str :returns: List of dictionaries containing the following keys (data_tier_id, data_tier_name, create_by, creation_date) """
data_tier_name = data_tier_name.replace("*", "%") try: conn = self.dbi.connection() return self.dbsDataTierListDAO.execute(conn, data_tier_name.upper()) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message) except ValueError as ve: dbsExceptionHandler("dbsException-invalid-input2", "Invalid Input Data", self.logger.exception, ve.message) except TypeError as te: dbsExceptionHandler("dbsException-invalid-input2", "Invalid Input DataType", self.logger.exception, te.message) except NameError as ne: dbsExceptionHandler("dbsException-invalid-input2", "Invalid Input Searching Key", self.logger.exception, ne.message) except Exception as ex: sError = "DBSReaderModel/listDataTiers. %s\n. Exception trace: \n %s" \ % ( ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) finally: if conn: conn.close()
<SYSTEM_TASK:> API to list blocks first generated in origin_site_name. <END_TASK> <USER_TASK:> Description: def listBlockOrigin(self, origin_site_name="", dataset="", block_name=""): """ API to list blocks first generated in origin_site_name. :param origin_site_name: Origin Site Name (Optional, No wildcards) :type origin_site_name: str :param dataset: dataset ( No wildcards, either dataset or block name needed) :type dataset: str :param block_name: :type block_name: str :returns: List of dictionaries containing the following keys (create_by, creation_date, open_for_writing, last_modified_by, dataset, block_name, file_count, origin_site_name, last_modification_date, block_size) :rtype: list of dicts """
try: return self.dbsBlock.listBlocksOrigin(origin_site_name, dataset, block_name) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listBlocks. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list block parents of multiple blocks. To be called by blockparents url with post call. <END_TASK> <USER_TASK:> Description: def listBlocksParents(self): """ API to list block parents of multiple blocks. To be called by blockparents url with post call. :param block_names: list of block names [block_name1, block_name2, ...] (Required). Mwx length 1000. :type block_names: list """
try : body = request.body.read() data = cjson.decode(body) data = validateJSONInputNoCopy("block", data, read=True) #Because CMSWEB has a 300 seconds responding time. We have to limit the array siz to make sure that #the API can be finished in 300 second. # YG Nov-05-2015 max_array_size = 1000 if ( 'block_names' in data.keys() and isinstance(data['block_names'], list) and len(data['block_names'])>max_array_size): dbsExceptionHandler("dbsException-invalid-input", "The Max list length supported in listBlocksParents is %s." %max_array_size, self.logger.exception) return self.dbsBlock.listBlockParents(data["block_name"]) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except cjson.DecodeError as de: sError = "DBSReaderModel/listBlockParents. %s\n. Exception trace: \n %s" \ % (de, traceback.format_exc()) msg = "DBSReaderModel/listBlockParents. %s" % de dbsExceptionHandler('dbsException-invalid-input2', msg, self.logger.exception, sError) except HTTPError as he: raise he except Exception as ex: sError = "DBSReaderModel/listBlockParents. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list block children. <END_TASK> <USER_TASK:> Description: def listBlockChildren(self, block_name=""): """ API to list block children. :param block_name: name of block who's children needs to be found (Required) :type block_name: str :returns: List of dictionaries containing following keys (block_name) :rtype: list of dicts """
block_name = block_name.replace("*", "%") try: return self.dbsBlock.listBlockChildren(block_name) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listBlockChildren. %s\n. Exception trace: \n %s" % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API that returns summary information like total size and total number of events in a dataset or a list of blocks <END_TASK> <USER_TASK:> Description: def listBlockSummaries(self, block_name="", dataset="", detail=False): """ API that returns summary information like total size and total number of events in a dataset or a list of blocks :param block_name: list block summaries for block_name(s) :type block_name: str, list :param dataset: list block summaries for all blocks in dataset :type dataset: str :param detail: list summary by block names if detail=True, default=False :type detail: str, bool :returns: list of dicts containing total block_sizes, file_counts and event_counts of dataset or blocks provided """
if bool(dataset)+bool(block_name)!=1: dbsExceptionHandler("dbsException-invalid-input2", dbsExceptionCode["dbsException-invalid-input2"], self.logger.exception, "Dataset or block_names must be specified at a time.") if block_name and isinstance(block_name, basestring): try: block_name = [str(block_name)] except: dbsExceptionHandler("dbsException-invalid-input", "Invalid block_name for listBlockSummaries. ") for this_block_name in block_name: if re.search("[*, %]", this_block_name): dbsExceptionHandler("dbsException-invalid-input2", dbsExceptionCode["dbsException-invalid-input2"], self.logger.exception, "No wildcards are allowed in block_name list") if re.search("[*, %]", dataset): dbsExceptionHandler("dbsException-invalid-input2", dbsExceptionCode["dbsException-invalid-input2"], self.logger.exception, "No wildcards are allowed in dataset") data = [] try: with self.dbi.connection() as conn: data = self.dbsBlockSummaryListDAO.execute(conn, block_name, dataset, detail) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listBlockSummaries. %s\n. Exception trace: \n %s" % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) for item in data: yield item
<SYSTEM_TASK:> API to list A datasets parents in DBS. <END_TASK> <USER_TASK:> Description: def listDatasetParents(self, dataset=''): """ API to list A datasets parents in DBS. :param dataset: dataset (Required) :type dataset: str :returns: List of dictionaries containing the following keys (this_dataset, parent_dataset_id, parent_dataset) :rtype: list of dicts """
try: return self.dbsDataset.listDatasetParents(dataset) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listDatasetParents. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list OutputConfigs in DBS. <END_TASK> <USER_TASK:> Description: def listOutputConfigs(self, dataset="", logical_file_name="", release_version="", pset_hash="", app_name="", output_module_label="", block_id=0, global_tag=''): """ API to list OutputConfigs in DBS. * You can use any combination of these parameters in this API * All parameters are optional, if you do not provide any parameter, all configs will be listed from DBS :param dataset: Full dataset (path) of the dataset :type dataset: str :param logical_file_name: logical_file_name of the file :type logical_file_name: str :param release_version: cmssw version :type release_version: str :param pset_hash: pset hash :type pset_hash: str :param app_name: Application name (generally it is cmsRun) :type app_name: str :param output_module_label: output_module_label :type output_module_label: str :param block_id: ID of the block :type block_id: int :param global_tag: Global Tag :type global_tag: str :returns: List of dictionaries containing the following keys (app_name, output_module_label, create_by, pset_hash, creation_date, release_version, global_tag, pset_name) :rtype: list of dicts """
release_version = release_version.replace("*", "%") pset_hash = pset_hash.replace("*", "%") app_name = app_name.replace("*", "%") output_module_label = output_module_label.replace("*", "%") try: return self.dbsOutputConfig.listOutputConfigs(dataset, logical_file_name, release_version, pset_hash, app_name, output_module_label, block_id, global_tag) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listOutputConfigs. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list file parents <END_TASK> <USER_TASK:> Description: def listFileParents(self, logical_file_name='', block_id=0, block_name=''): """ API to list file parents :param logical_file_name: logical_file_name of file (Required) :type logical_file_name: str, list :param block_id: ID of the a block, whose files should be listed :type block_id: int, str :param block_name: Name of the block, whose files should be listed :type block_name: int, str :returns: List of dictionaries containing the following keys (parent_logical_file_name, logical_file_name) :rtype: list of dicts """
try: r = self.dbsFile.listFileParents(logical_file_name, block_id, block_name) for item in r: yield item except HTTPError as he: raise he except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listFileParents. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', ex.message, self.logger.exception, sError)
<SYSTEM_TASK:> API to list file children. One of the parameters in mandatory. <END_TASK> <USER_TASK:> Description: def listFileChildren(self, logical_file_name='', block_name='', block_id=0): """ API to list file children. One of the parameters in mandatory. :param logical_file_name: logical_file_name of file (Required) :type logical_file_name: str, list :param block_name: block_name :type block_name: str :param block_id: block_id :type block_id: str, int :returns: List of dictionaries containing the following keys (child_logical_file_name, logical_file_name) :rtype: List of dicts """
if isinstance(logical_file_name, list): for f in logical_file_name: if '*' in f or '%' in f: dbsExceptionHandler("dbsException-invalid-input2", dbsExceptionCode["dbsException-invalid-input2"], self.logger.exception, "No \ wildcard allow in LFN list" ) try: return self.dbsFile.listFileChildren(logical_file_name, block_name, block_id) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listFileChildren. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list Lumi for files. Either logical_file_name or block_name is required. No wild card support in this API <END_TASK> <USER_TASK:> Description: def listFileLumis(self, logical_file_name="", block_name="", run_num=-1, validFileOnly=0): """ API to list Lumi for files. Either logical_file_name or block_name is required. No wild card support in this API :param block_name: Name of the block :type block_name: str :param logical_file_name: logical_file_name of file :type logical_file_name: str, list :param run_num: List lumi sections for a given run number (Optional). Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...]. run_num=1 is for MC data and caused almost full table scan. So run_num=1 will cause an input error. :type run_num: int, str, or list :returns: List of dictionaries containing the following keys (lumi_section_num, logical_file_name, run_num, event_count) :rtype: list of dicts :param validFileOnly: optional valid file flag. Default = 0 (include all files) :type: validFileOnly: int, or str """
# run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours # We will disbale all the run_num=1 calls in DBS. Run_num=1 will be OK when logical_file_name is given. # YG Jan. 16 2019 if (run_num != -1 and logical_file_name ==''): for r in parseRunRange(run_num): if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long): if r == 1 or r == '1': dbsExceptionHandler("dbsException-invalid-input", "Run_num=1 is not a valid input.", self.logger.exception) elif isinstance(r, run_tuple): if r[0] == r[1]: dbsExceptionHandler("dbsException-invalid-input", "DBS run range must be apart at least by 1.",self.logger.exception) elif r[0] <= 1 <= r[1]: dbsExceptionHandler("dbsException-invalid-input", "Run_num=1 is not a valid input.", self.logger.exception) try: return self.dbsFile.listFileLumis(logical_file_name, block_name, run_num, validFileOnly ) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listFileLumis. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list all runs in DBS. At least one parameter is mandatory. <END_TASK> <USER_TASK:> Description: def listRuns(self, run_num=-1, logical_file_name="", block_name="", dataset=""): """ API to list all runs in DBS. At least one parameter is mandatory. :param logical_file_name: List all runs in the file :type logical_file_name: str :param block_name: List all runs in the block :type block_name: str :param dataset: List all runs in that dataset :type dataset: str :param run_num: List all runs :type run_num: int, string or list """
# run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours # We will disbale all the run_num=1 calls in DBS. Run_num=1 will be OK when logical_file_name is given. # YG Jan. 16 2019 if (run_num != -1 and logical_file_name ==''): for r in parseRunRange(run_num): if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long): if r == 1 or r == '1': dbsExceptionHandler("dbsException-invalid-input", "Run_num=1 is not a valid input.", self.logger.exception) elif isinstance(r, run_tuple): if r[0] == r[1]: dbsExceptionHandler("dbsException-invalid-input", "DBS run range must be apart at least by 1.", self.logger.exception) elif r[0] <= 1 <= r[1]: dbsExceptionHandler("dbsException-invalid-input", "Run_num=1 is not a valid input.", self.logger.exception) if run_num==-1 and not logical_file_name and not dataset and not block_name: dbsExceptionHandler("dbsException-invalid-input", "run_num, logical_file_name, block_name or dataset parameter is mandatory", self.logger.exception) try: if logical_file_name: logical_file_name = logical_file_name.replace("*", "%") if block_name: block_name = block_name.replace("*", "%") if dataset: dataset = dataset.replace("*", "%") return self.dbsRun.listRuns(run_num, logical_file_name, block_name, dataset) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listRun. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API the list all information related with the block_name <END_TASK> <USER_TASK:> Description: def dumpBlock(self, block_name): """ API the list all information related with the block_name :param block_name: Name of block to be dumped (Required) :type block_name: str """
try: return self.dbsBlock.dumpBlock(block_name) except HTTPError as he: raise he except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/dumpBlock. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', ex.message, self.logger.exception, sError)
<SYSTEM_TASK:> API to list all Acquisition Eras in DBS. <END_TASK> <USER_TASK:> Description: def listAcquisitionEras(self, acquisition_era_name=''): """ API to list all Acquisition Eras in DBS. :param acquisition_era_name: Acquisition era name (Optional, wild cards allowed) :type acquisition_era_name: str :returns: List of dictionaries containing following keys (description, end_date, acquisition_era_name, create_by, creation_date and start_date) :rtype: list of dicts """
try: acquisition_era_name = acquisition_era_name.replace('*', '%') return self.dbsAcqEra.listAcquisitionEras(acquisition_era_name) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listAcquisitionEras. %s\n. Exception trace: \n %s" % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list all Processing Eras in DBS. <END_TASK> <USER_TASK:> Description: def listProcessingEras(self, processing_version=0): """ API to list all Processing Eras in DBS. :param processing_version: Processing Version (Optional). If provided just this processing_version will be listed :type processing_version: str :returns: List of dictionaries containing the following keys (create_by, processing_version, description, creation_date) :rtype: list of dicts """
try: #processing_version = processing_version.replace("*", "%") return self.dbsProcEra.listProcessingEras(processing_version) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listProcessingEras. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list all release versions in DBS <END_TASK> <USER_TASK:> Description: def listReleaseVersions(self, release_version='', dataset='', logical_file_name=''): """ API to list all release versions in DBS :param release_version: List only that release version :type release_version: str :param dataset: List release version of the specified dataset :type dataset: str :param logical_file_name: List release version of the logical file name :type logical_file_name: str :returns: List of dictionaries containing following keys (release_version) :rtype: list of dicts """
if release_version: release_version = release_version.replace("*", "%") try: return self.dbsReleaseVersion.listReleaseVersions(release_version, dataset, logical_file_name ) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listReleaseVersions. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list all physics groups. <END_TASK> <USER_TASK:> Description: def listPhysicsGroups(self, physics_group_name=''): """ API to list all physics groups. :param physics_group_name: List that specific physics group (Optional) :type physics_group_name: basestring :returns: List of dictionaries containing the following key (physics_group_name) :rtype: list of dicts """
if physics_group_name: physics_group_name = physics_group_name.replace('*', '%') try: return self.dbsPhysicsGroup.listPhysicsGroups(physics_group_name) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listPhysicsGroups. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
<SYSTEM_TASK:> API to list run summaries, like the maximal lumisection in a run. <END_TASK> <USER_TASK:> Description: def listRunSummaries(self, dataset="", run_num=-1): """ API to list run summaries, like the maximal lumisection in a run. :param dataset: dataset name (Optional) :type dataset: str :param run_num: Run number (Required) :type run_num: str, long, int :rtype: list containing a dictionary with key max_lumi """
if run_num==-1: dbsExceptionHandler("dbsException-invalid-input", "The run_num parameter is mandatory", self.logger.exception) if re.search('[*,%]', dataset): dbsExceptionHandler("dbsException-invalid-input", "No wildcards are allowed in dataset", self.logger.exception) # run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours # We will disbale all the run_num=1 calls in DBS. Run_num=1 will be OK when dataset is given in this API. # YG Jan. 16 2019 if ((run_num == -1 or run_num == '-1') and dataset==''): dbsExceptionHandler("dbsException-invalid-input", "Run_num=1 is not a valid input when no dataset is present.", self.logger.exception) conn = None try: conn = self.dbi.connection() return self.dbsRunSummaryListDAO.execute(conn, dataset, run_num) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listRunSummaries. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) finally: if conn: conn.close()
<SYSTEM_TASK:> Load events from a json file <END_TASK> <USER_TASK:> Description: def load(file): """ Load events from a json file """
with open(file, 'r') as f: contents = f.read() lambder.load_events(contents)
<SYSTEM_TASK:> Create a new lambda project <END_TASK> <USER_TASK:> Description: def new( name, bucket, timeout, memory, description, subnet_ids, security_group_ids ): """ Create a new lambda project """
config = {} if timeout: config['timeout'] = timeout if memory: config['memory'] = memory if description: config['description'] = description if subnet_ids: config['subnet_ids'] = subnet_ids if security_group_ids: config['security_group_ids'] = security_group_ids lambder.create_project(name, bucket, config)