text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Grab the project name out of setup.py <END_TASK> <USER_TASK:> Description: def get_project_name(): """ Grab the project name out of setup.py """
setup_py_content = helpers.get_file_content('setup.py') ret = helpers.value_of_named_argument_in_function( 'name', 'setup', setup_py_content, resolve_varname=True ) if ret and ret[0] == ret[-1] in ('"', "'"): ret = ret[1:-1] return ret
<SYSTEM_TASK:> Get the version which is currently configured by the package <END_TASK> <USER_TASK:> Description: def get_version(package_name, ignore_cache=False): """ Get the version which is currently configured by the package """
if ignore_cache: with microcache.temporarily_disabled(): found = helpers.regex_in_package_file( VERSION_SET_REGEX, '_version.py', package_name, return_match=True ) else: found = helpers.regex_in_package_file( VERSION_SET_REGEX, '_version.py', package_name, return_match=True ) if found is None: raise ProjectError('found {}, but __version__ is not defined') current_version = found['version'] return current_version
<SYSTEM_TASK:> Set the version in _version.py to version_str <END_TASK> <USER_TASK:> Description: def set_version(package_name, version_str): """ Set the version in _version.py to version_str """
current_version = get_version(package_name) version_file_path = helpers.package_file_path('_version.py', package_name) version_file_content = helpers.get_file_content(version_file_path) version_file_content = version_file_content.replace(current_version, version_str) with open(version_file_path, 'w') as version_file: version_file.write(version_file_content)
<SYSTEM_TASK:> Check to see if the version specified is a valid as far as pkg_resources is concerned <END_TASK> <USER_TASK:> Description: def version_is_valid(version_str): """ Check to see if the version specified is a valid as far as pkg_resources is concerned >>> version_is_valid('blah') False >>> version_is_valid('1.2.3') True """
try: packaging.version.Version(version_str) except packaging.version.InvalidVersion: return False return True
<SYSTEM_TASK:> Query the pypi index at index_url using warehouse api to find all of the "releases" <END_TASK> <USER_TASK:> Description: def _get_uploaded_versions_warehouse(project_name, index_url, requests_verify=True): """ Query the pypi index at index_url using warehouse api to find all of the "releases" """
url = '/'.join((index_url, project_name, 'json')) response = requests.get(url, verify=requests_verify) if response.status_code == 200: return response.json()['releases'].keys() return None
<SYSTEM_TASK:> Query the pypi index at index_url using pypicloud api to find all versions <END_TASK> <USER_TASK:> Description: def _get_uploaded_versions_pypicloud(project_name, index_url, requests_verify=True): """ Query the pypi index at index_url using pypicloud api to find all versions """
api_url = index_url for suffix in ('/pypi', '/pypi/', '/simple', '/simple/'): if api_url.endswith(suffix): api_url = api_url[:len(suffix) * -1] + '/api/package' break url = '/'.join((api_url, project_name)) response = requests.get(url, verify=requests_verify) if response.status_code == 200: return [p['version'] for p in response.json()['packages']] return None
<SYSTEM_TASK:> Check to see if the version specified has already been uploaded to the configured index <END_TASK> <USER_TASK:> Description: def version_already_uploaded(project_name, version_str, index_url, requests_verify=True): """ Check to see if the version specified has already been uploaded to the configured index """
all_versions = _get_uploaded_versions(project_name, index_url, requests_verify) return version_str in all_versions
<SYSTEM_TASK:> Collect relative paths to all files which have already been packaged <END_TASK> <USER_TASK:> Description: def get_packaged_files(package_name): """ Collect relative paths to all files which have already been packaged """
if not os.path.isdir('dist'): return [] return [os.path.join('dist', filename) for filename in os.listdir('dist')]
<SYSTEM_TASK:> Look through built package directory and see if there are multiple versions there <END_TASK> <USER_TASK:> Description: def multiple_packaged_versions(package_name): """ Look through built package directory and see if there are multiple versions there """
dist_files = os.listdir('dist') versions = set() for filename in dist_files: version = funcy.re_find(r'{}-(.+).tar.gz'.format(package_name), filename) if version: versions.add(version) return len(versions) > 1
<SYSTEM_TASK:> Calculate the neighborhood probability over the full period of the forecast <END_TASK> <USER_TASK:> Description: def period_neighborhood_probability(self, radius, smoothing, threshold, stride,start_time,end_time): """ Calculate the neighborhood probability over the full period of the forecast Args: radius: circular radius from each point in km smoothing: width of Gaussian smoother in km threshold: intensity of exceedance stride: number of grid points to skip for reduced neighborhood grid Returns: (neighborhood probabilities) """
neighbor_x = self.x[::stride, ::stride] neighbor_y = self.y[::stride, ::stride] neighbor_kd_tree = cKDTree(np.vstack((neighbor_x.ravel(), neighbor_y.ravel())).T) neighbor_prob = np.zeros((self.data.shape[0], neighbor_x.shape[0], neighbor_x.shape[1])) print('Forecast Hours: {0}-{1}'.format(start_time, end_time)) for m in range(len(self.members)): period_max = self.data[m,start_time:end_time,:,:].max(axis=0) valid_i, valid_j = np.where(period_max >= threshold) print(self.members[m], len(valid_i)) if len(valid_i) > 0: var_kd_tree = cKDTree(np.vstack((self.x[valid_i, valid_j], self.y[valid_i, valid_j])).T) exceed_points = np.unique(np.concatenate(var_kd_tree.query_ball_tree(neighbor_kd_tree, radius))).astype(int) exceed_i, exceed_j = np.unravel_index(exceed_points, neighbor_x.shape) neighbor_prob[m][exceed_i, exceed_j] = 1 if smoothing > 0: neighbor_prob[m] = gaussian_filter(neighbor_prob[m], smoothing,mode='constant') return neighbor_prob
<SYSTEM_TASK:> Load map projection information and create latitude, longitude, x, y, i, and j grids for the projection. <END_TASK> <USER_TASK:> Description: def load_map_info(self, map_file): """ Load map projection information and create latitude, longitude, x, y, i, and j grids for the projection. Args: map_file: File specifying the projection information. """
if self.ensemble_name.upper() == "SSEF": proj_dict, grid_dict = read_arps_map_file(map_file) self.dx = int(grid_dict["dx"]) mapping_data = make_proj_grids(proj_dict, grid_dict) for m, v in mapping_data.items(): setattr(self, m, v) self.i, self.j = np.indices(self.lon.shape) self.proj = get_proj_obj(proj_dict) elif self.ensemble_name.upper() in ["NCAR", "NCARSTORM", "HRRR", "VSE", "HREFV2"]: proj_dict, grid_dict = read_ncar_map_file(map_file) if self.member_name[0:7] == "1km_pbl": # Don't just look at the first 3 characters. You have to differentiate '1km_pbl1' and '1km_on_3km_pbl1' grid_dict["dx"] = 1000 grid_dict["dy"] = 1000 grid_dict["sw_lon"] = 258.697 grid_dict["sw_lat"] = 23.999 grid_dict["ne_lon"] = 282.868269206236 grid_dict["ne_lat"] = 36.4822338520542 self.dx = int(grid_dict["dx"]) mapping_data = make_proj_grids(proj_dict, grid_dict) for m, v in mapping_data.items(): setattr(self, m, v) self.i, self.j = np.indices(self.lon.shape) self.proj = get_proj_obj(proj_dict)
<SYSTEM_TASK:> Reads a geojson file containing an STObject and initializes a new STObject from the information in the file. <END_TASK> <USER_TASK:> Description: def read_geojson(filename): """ Reads a geojson file containing an STObject and initializes a new STObject from the information in the file. Args: filename: Name of the geojson file Returns: an STObject """
json_file = open(filename) data = json.load(json_file) json_file.close() times = data["properties"]["times"] main_data = dict(timesteps=[], masks=[], x=[], y=[], i=[], j=[]) attribute_data = dict() for feature in data["features"]: for main_name in main_data.keys(): main_data[main_name].append(np.array(feature["properties"][main_name])) for k, v in feature["properties"]["attributes"].items(): if k not in attribute_data.keys(): attribute_data[k] = [np.array(v)] else: attribute_data[k].append(np.array(v)) kwargs = {} for kw in ["dx", "step", "u", "v"]: if kw in data["properties"].keys(): kwargs[kw] = data["properties"][kw] sto = STObject(main_data["timesteps"], main_data["masks"], main_data["x"], main_data["y"], main_data["i"], main_data["j"], times[0], times[-1], **kwargs) for k, v in attribute_data.items(): sto.attributes[k] = v return sto
<SYSTEM_TASK:> Calculate the center of mass at a given timestep. <END_TASK> <USER_TASK:> Description: def center_of_mass(self, time): """ Calculate the center of mass at a given timestep. Args: time: Time at which the center of mass calculation is performed Returns: The x- and y-coordinates of the center of mass. """
if self.start_time <= time <= self.end_time: diff = time - self.start_time valid = np.flatnonzero(self.masks[diff] != 0) if valid.size > 0: com_x = 1.0 / self.timesteps[diff].ravel()[valid].sum() * np.sum(self.timesteps[diff].ravel()[valid] * self.x[diff].ravel()[valid]) com_y = 1.0 / self.timesteps[diff].ravel()[valid].sum() * np.sum(self.timesteps[diff].ravel()[valid] * self.y[diff].ravel()[valid]) else: com_x = np.mean(self.x[diff]) com_y = np.mean(self.y[diff]) else: com_x = None com_y = None return com_x, com_y
<SYSTEM_TASK:> Calculates the center of mass for each time step and outputs an array <END_TASK> <USER_TASK:> Description: def trajectory(self): """ Calculates the center of mass for each time step and outputs an array Returns: """
traj = np.zeros((2, self.times.size)) for t, time in enumerate(self.times): traj[:, t] = self.center_of_mass(time) return traj
<SYSTEM_TASK:> Gets the corner array indices of the STObject at a given time that corresponds <END_TASK> <USER_TASK:> Description: def get_corner(self, time): """ Gets the corner array indices of the STObject at a given time that corresponds to the upper left corner of the bounding box for the STObject. Args: time: time at which the corner is being extracted. Returns: corner index. """
if self.start_time <= time <= self.end_time: diff = time - self.start_time return self.i[diff][0, 0], self.j[diff][0, 0] else: return -1, -1
<SYSTEM_TASK:> Gets the size of the object at a given time. <END_TASK> <USER_TASK:> Description: def size(self, time): """ Gets the size of the object at a given time. Args: time: Time value being queried. Returns: size of the object in pixels """
if self.start_time <= time <= self.end_time: return self.masks[time - self.start_time].sum() else: return 0
<SYSTEM_TASK:> Calculate the maximum intensity found at a timestep. <END_TASK> <USER_TASK:> Description: def max_intensity(self, time): """ Calculate the maximum intensity found at a timestep. """
ti = np.where(time == self.times)[0][0] return self.timesteps[ti].max()
<SYSTEM_TASK:> Get coordinates of object boundary in counter-clockwise order <END_TASK> <USER_TASK:> Description: def boundary_polygon(self, time): """ Get coordinates of object boundary in counter-clockwise order """
ti = np.where(time == self.times)[0][0] com_x, com_y = self.center_of_mass(time) # If at least one point along perimeter of the mask rectangle is unmasked, find_boundaries() works. # But if all perimeter points are masked, find_boundaries() does not find the object. # Therefore, pad the mask with zeroes first and run find_boundaries on the padded array. padded_mask = np.pad(self.masks[ti], 1, 'constant', constant_values=0) chull = convex_hull_image(padded_mask) boundary_image = find_boundaries(chull, mode='inner', background=0) # Now remove the padding. boundary_image = boundary_image[1:-1,1:-1] boundary_x = self.x[ti].ravel()[boundary_image.ravel()] boundary_y = self.y[ti].ravel()[boundary_image.ravel()] r = np.sqrt((boundary_x - com_x) ** 2 + (boundary_y - com_y) ** 2) theta = np.arctan2((boundary_y - com_y), (boundary_x - com_x)) * 180.0 / np.pi + 360 polar_coords = np.array([(r[x], theta[x]) for x in range(r.size)], dtype=[('r', 'f4'), ('theta', 'f4')]) coord_order = np.argsort(polar_coords, order=['theta', 'r']) ordered_coords = np.vstack([boundary_x[coord_order], boundary_y[coord_order]]) return ordered_coords
<SYSTEM_TASK:> Estimate the motion of the object with cross-correlation on the intensity values from the previous time step. <END_TASK> <USER_TASK:> Description: def estimate_motion(self, time, intensity_grid, max_u, max_v): """ Estimate the motion of the object with cross-correlation on the intensity values from the previous time step. Args: time: time being evaluated. intensity_grid: 2D array of intensities used in cross correlation. max_u: Maximum x-component of motion. Used to limit search area. max_v: Maximum y-component of motion. Used to limit search area Returns: u, v, and the minimum error. """
ti = np.where(time == self.times)[0][0] mask_vals = np.where(self.masks[ti].ravel() == 1) i_vals = self.i[ti].ravel()[mask_vals] j_vals = self.j[ti].ravel()[mask_vals] obj_vals = self.timesteps[ti].ravel()[mask_vals] u_shifts = np.arange(-max_u, max_u + 1) v_shifts = np.arange(-max_v, max_v + 1) min_error = 99999999999.0 best_u = 0 best_v = 0 for u in u_shifts: j_shift = j_vals - u for v in v_shifts: i_shift = i_vals - v if np.all((0 <= i_shift) & (i_shift < intensity_grid.shape[0]) & (0 <= j_shift) & (j_shift < intensity_grid.shape[1])): shift_vals = intensity_grid[i_shift, j_shift] else: shift_vals = np.zeros(i_shift.shape) # This isn't correlation; it is mean absolute error. error = np.abs(shift_vals - obj_vals).mean() if error < min_error: min_error = error best_u = u * self.dx best_v = v * self.dx # 60 seems arbitrarily high #if min_error > 60: # best_u = 0 # best_v = 0 self.u[ti] = best_u self.v[ti] = best_v return best_u, best_v, min_error
<SYSTEM_TASK:> Counts the number of points that overlap between this STObject and another STObject. Used for tracking. <END_TASK> <USER_TASK:> Description: def count_overlap(self, time, other_object, other_time): """ Counts the number of points that overlap between this STObject and another STObject. Used for tracking. """
ti = np.where(time == self.times)[0][0] ma = np.where(self.masks[ti].ravel() == 1) oti = np.where(other_time == other_object.times)[0] obj_coords = np.zeros(self.masks[ti].sum(), dtype=[('x', int), ('y', int)]) other_obj_coords = np.zeros(other_object.masks[oti].sum(), dtype=[('x', int), ('y', int)]) obj_coords['x'] = self.i[ti].ravel()[ma] obj_coords['y'] = self.j[ti].ravel()[ma] other_obj_coords['x'] = other_object.i[oti][other_object.masks[oti] == 1] other_obj_coords['y'] = other_object.j[oti][other_object.masks[oti] == 1] return float(np.intersect1d(obj_coords, other_obj_coords).size) / np.maximum(self.masks[ti].sum(), other_object.masks[oti].sum())
<SYSTEM_TASK:> Extracts data from a 2D array that has the same dimensions as the grid used to identify the object. <END_TASK> <USER_TASK:> Description: def extract_attribute_array(self, data_array, var_name): """ Extracts data from a 2D array that has the same dimensions as the grid used to identify the object. Args: data_array: 2D numpy array """
if var_name not in self.attributes.keys(): self.attributes[var_name] = [] for t in range(self.times.size): self.attributes[var_name].append(data_array[self.i[t], self.j[t]])
<SYSTEM_TASK:> Extracts the difference in model outputs <END_TASK> <USER_TASK:> Description: def extract_tendency_grid(self, model_grid): """ Extracts the difference in model outputs Args: model_grid: ModelOutput or ModelGrid object. """
var_name = model_grid.variable + "-tendency" self.attributes[var_name] = [] timesteps = np.arange(self.start_time, self.end_time + 1) for ti, t in enumerate(timesteps): t_index = t - model_grid.start_hour self.attributes[var_name].append( model_grid.data[t_index, self.i[ti], self.j[ti]] - model_grid.data[t_index - 1, self.i[ti], self.j[ti]] )
<SYSTEM_TASK:> Calculate statistics from the primary attribute of the StObject. <END_TASK> <USER_TASK:> Description: def calc_timestep_statistic(self, statistic, time): """ Calculate statistics from the primary attribute of the StObject. Args: statistic: statistic being calculated time: Timestep being investigated Returns: Value of the statistic """
ti = np.where(self.times == time)[0][0] ma = np.where(self.masks[ti].ravel() == 1) if statistic in ['mean', 'max', 'min', 'std', 'ptp']: stat_val = getattr(self.timesteps[ti].ravel()[ma], statistic)() elif statistic == 'median': stat_val = np.median(self.timesteps[ti].ravel()[ma]) elif 'percentile' in statistic: per = int(statistic.split("_")[1]) stat_val = np.percentile(self.timesteps[ti].ravel()[ma], per) elif 'dt' in statistic: stat_name = statistic[:-3] if ti == 0: stat_val = 0 else: stat_val = self.calc_timestep_statistic(stat_name, time) -\ self.calc_timestep_statistic(stat_name, time - 1) else: stat_val = np.nan return stat_val
<SYSTEM_TASK:> Calculate shape statistics for a single time step <END_TASK> <USER_TASK:> Description: def calc_shape_step(self, stat_names, time): """ Calculate shape statistics for a single time step Args: stat_names: List of shape statistics calculated from region props time: Time being investigated Returns: List of shape statistics """
ti = np.where(self.times == time)[0][0] props = regionprops(self.masks[ti], self.timesteps[ti])[0] shape_stats = [] for stat_name in stat_names: if "moments_hu" in stat_name: hu_index = int(stat_name.split("_")[-1]) hu_name = "_".join(stat_name.split("_")[:-1]) hu_val = np.log(props[hu_name][hu_index]) if np.isnan(hu_val): shape_stats.append(0) else: shape_stats.append(hu_val) else: shape_stats.append(props[stat_name]) return shape_stats
<SYSTEM_TASK:> Output the data in the STObject to a geoJSON file. <END_TASK> <USER_TASK:> Description: def to_geojson(self, filename, proj, metadata=None): """ Output the data in the STObject to a geoJSON file. Args: filename: Name of the file proj: PyProj object for converting the x and y coordinates back to latitude and longitue values. metadata: Metadata describing the object to be included in the top-level properties. """
if metadata is None: metadata = {} json_obj = {"type": "FeatureCollection", "features": [], "properties": {}} json_obj['properties']['times'] = self.times.tolist() json_obj['properties']['dx'] = self.dx json_obj['properties']['step'] = self.step json_obj['properties']['u'] = self.u.tolist() json_obj['properties']['v'] = self.v.tolist() for k, v in metadata.items(): json_obj['properties'][k] = v for t, time in enumerate(self.times): feature = {"type": "Feature", "geometry": {"type": "Polygon"}, "properties": {}} boundary_coords = self.boundary_polygon(time) lonlat = np.vstack(proj(boundary_coords[0], boundary_coords[1], inverse=True)) lonlat_list = lonlat.T.tolist() if len(lonlat_list) > 0: lonlat_list.append(lonlat_list[0]) feature["geometry"]["coordinates"] = [lonlat_list] for attr in ["timesteps", "masks", "x", "y", "i", "j"]: feature["properties"][attr] = getattr(self, attr)[t].tolist() feature["properties"]["attributes"] = {} for attr_name, steps in self.attributes.items(): feature["properties"]["attributes"][attr_name] = steps[t].tolist() json_obj['features'].append(feature) file_obj = open(filename, "w") json.dump(json_obj, file_obj, indent=1, sort_keys=True) file_obj.close() return
<SYSTEM_TASK:> Tournament selection and when negative is True it performs negative <END_TASK> <USER_TASK:> Description: def tournament(self, negative=False): """Tournament selection and when negative is True it performs negative tournament selection"""
if self.generation <= self._random_generations and not negative: return self.random_selection() if not self._negative_selection and negative: return self.random_selection(negative=negative) vars = self.random() fit = [(k, self.population[x].fitness) for k, x in enumerate(vars)] if negative: fit = min(fit, key=lambda x: x[1]) else: fit = max(fit, key=lambda x: x[1]) index = fit[0] return vars[index]
<SYSTEM_TASK:> Replace an individual selected by negative tournament selection with <END_TASK> <USER_TASK:> Description: def replace(self, v): """Replace an individual selected by negative tournament selection with individual v"""
if self.popsize < self._popsize: return self.add(v) k = self.tournament(negative=True) self.clean(self.population[k]) self.population[k] = v v.position = len(self._hist) self._hist.append(v) self.bsf = v self.estopping = v self._inds_replace += 1 self._density += self.get_density(v) if self._inds_replace == self._popsize: self._inds_replace = 0 self.generation += 1 gc.collect()
<SYSTEM_TASK:> Make the directory path, if needed. <END_TASK> <USER_TASK:> Description: def make_directory_if_needed(directory_path): """ Make the directory path, if needed. """
if os.path.exists(directory_path): if not os.path.isdir(directory_path): raise OSError("Path is not a directory:", directory_path) else: os.makedirs(directory_path)
<SYSTEM_TASK:> Main entry point for the hatchery program <END_TASK> <USER_TASK:> Description: def hatchery(): """ Main entry point for the hatchery program """
args = docopt.docopt(__doc__) task_list = args['<task>'] if not task_list or 'help' in task_list or args['--help']: print(__doc__.format(version=_version.__version__, config_files=config.CONFIG_LOCATIONS)) return 0 level_str = args['--log-level'] try: level_const = getattr(logging, level_str.upper()) logging.basicConfig(level=level_const) if level_const == logging.DEBUG: workdir.options.debug = True except LookupError: logging.basicConfig() logger.error('received invalid log level: ' + level_str) return 1 for task in task_list: if task not in ORDERED_TASKS: logger.info('starting task: check') logger.error('received invalid task: ' + task) return 1 for task in CHECK_TASKS: if task in task_list: task_check(args) break if 'package' in task_list and not args['--release-version']: logger.error('--release-version is required for the package task') return 1 config_dict = _get_config_or_die( calling_task='hatchery', required_params=['auto_push_tag'] ) if config_dict['auto_push_tag'] and 'upload' in task_list: logger.info('adding task: tag (auto_push_tag==True)') task_list.append('tag') # all commands will raise a SystemExit if they fail # check will have already been run for task in ORDERED_TASKS: if task in task_list and task != 'check': logger.info('starting task: ' + task) globals()['task_' + task](args) logger.info("all's well that ends well...hatchery out") return 0
<SYSTEM_TASK:> Call an arbitary command and return the exit value, stdout, and stderr as a tuple <END_TASK> <USER_TASK:> Description: def call(cmd_args, suppress_output=False): """ Call an arbitary command and return the exit value, stdout, and stderr as a tuple Command can be passed in as either a string or iterable >>> result = call('hatchery', suppress_output=True) >>> result.exitval 0 >>> result = call(['hatchery', 'notreal']) >>> result.exitval 1 """
if not funcy.is_list(cmd_args) and not funcy.is_tuple(cmd_args): cmd_args = shlex.split(cmd_args) logger.info('executing `{}`'.format(' '.join(cmd_args))) call_request = CallRequest(cmd_args, suppress_output=suppress_output) call_result = call_request.run() if call_result.exitval: logger.error('`{}` returned error code {}'.format(' '.join(cmd_args), call_result.exitval)) return call_result
<SYSTEM_TASK:> Call a setup.py command or list of commands <END_TASK> <USER_TASK:> Description: def setup(cmd_args, suppress_output=False): """ Call a setup.py command or list of commands >>> result = setup('--name', suppress_output=True) >>> result.exitval 0 >>> result = setup('notreal') >>> result.exitval 1 """
if not funcy.is_list(cmd_args) and not funcy.is_tuple(cmd_args): cmd_args = shlex.split(cmd_args) cmd_args = [sys.executable, 'setup.py'] + [x for x in cmd_args] return call(cmd_args, suppress_output=suppress_output)
<SYSTEM_TASK:> Loads data files and stores the output in the data attribute. <END_TASK> <USER_TASK:> Description: def load_data(self): """ Loads data files and stores the output in the data attribute. """
data = [] valid_dates = [] mrms_files = np.array(sorted(os.listdir(self.path + self.variable + "/"))) mrms_file_dates = np.array([m_file.split("_")[-2].split("-")[0] for m_file in mrms_files]) old_mrms_file = None file_obj = None for t in range(self.all_dates.shape[0]): file_index = np.where(mrms_file_dates == self.all_dates[t].strftime("%Y%m%d"))[0] if len(file_index) > 0: mrms_file = mrms_files[file_index][0] if mrms_file is not None: if file_obj is not None: file_obj.close() file_obj = Dataset(self.path + self.variable + "/" + mrms_file) #old_mrms_file = mrms_file if "time" in file_obj.variables.keys(): time_var = "time" else: time_var = "date" file_valid_dates = pd.DatetimeIndex(num2date(file_obj.variables[time_var][:], file_obj.variables[time_var].units)) else: file_valid_dates = pd.DatetimeIndex([]) time_index = np.where(file_valid_dates.values == self.all_dates.values[t])[0] if len(time_index) > 0: data.append(file_obj.variables[self.variable][time_index[0]]) valid_dates.append(self.all_dates[t]) if file_obj is not None: file_obj.close() self.data = np.array(data) self.data[self.data < 0] = 0 self.data[self.data > 150] = 150 self.valid_dates = pd.DatetimeIndex(valid_dates)
<SYSTEM_TASK:> Rescale your input data so that is ranges over integer values, which will perform better in the watershed. <END_TASK> <USER_TASK:> Description: def rescale_data(data, data_min, data_max, out_min=0.0, out_max=100.0): """ Rescale your input data so that is ranges over integer values, which will perform better in the watershed. Args: data: 2D or 3D ndarray being rescaled data_min: minimum value of input data for scaling purposes data_max: maximum value of input data for scaling purposes out_min: minimum value of scaled data out_max: maximum value of scaled data Returns: Linearly scaled ndarray """
return (out_max - out_min) / (data_max - data_min) * (data - data_min) + out_min
<SYSTEM_TASK:> Labels input grid using enhanced watershed algorithm. <END_TASK> <USER_TASK:> Description: def label(self, input_grid): """ Labels input grid using enhanced watershed algorithm. Args: input_grid (numpy.ndarray): Grid to be labeled. Returns: Array of labeled pixels """
marked = self.find_local_maxima(input_grid) marked = np.where(marked >= 0, 1, 0) # splabel returns two things in a tuple: an array and an integer # assign the first thing (array) to markers markers = splabel(marked)[0] return markers
<SYSTEM_TASK:> Finds the local maxima in the inputGrid and perform region growing to identify objects. <END_TASK> <USER_TASK:> Description: def find_local_maxima(self, input_grid): """ Finds the local maxima in the inputGrid and perform region growing to identify objects. Args: input_grid: Raw input data. Returns: array with labeled objects. """
pixels, q_data = self.quantize(input_grid) centers = OrderedDict() for p in pixels.keys(): centers[p] = [] marked = np.ones(q_data.shape, dtype=int) * self.UNMARKED MIN_INFL = int(np.round(1 + 0.5 * np.sqrt(self.max_size))) MAX_INFL = 2 * MIN_INFL marked_so_far = [] # Find the maxima. These are high-values with enough clearance # around them. # Work from high to low bins. The pixels in the highest bin mark their # neighborhoods first. If you did it from low to high the lowest maxima # would mark their neighborhoods first and interfere with the identification of higher maxima. for b in sorted(pixels.keys(),reverse=True): # Square starts large with high intensity bins and gets smaller with low intensity bins. infl_dist = MIN_INFL + int(np.round(float(b) / self.max_bin * (MAX_INFL - MIN_INFL))) for p in pixels[b]: if marked[p] == self.UNMARKED: ok = False del marked_so_far[:] # Temporarily mark unmarked points in square around point (keep track of them in list marked_so_far). # If none of the points in square were marked already from a higher intensity center, # this counts as a new center and ok=True and points will remain marked. # Otherwise ok=False and marked points that were previously unmarked will be unmarked. for (i, j), v in np.ndenumerate(marked[p[0] - infl_dist:p[0] + infl_dist + 1, p[1] - infl_dist:p[1]+ infl_dist + 1]): if v == self.UNMARKED: ok = True marked[i - infl_dist + p[0],j - infl_dist + p[1]] = b marked_so_far.append((i - infl_dist + p[0],j - infl_dist + p[1])) else: # neighborhood already taken ok = False break # ok if point and surrounding square were not marked already. if ok: # highest point in its neighborhood centers[b].append(p) else: for m in marked_so_far: marked[m] = self.UNMARKED # Erase marks and start over. You have a list of centers now. marked[:, :] = self.UNMARKED deferred_from_last = [] deferred_to_next = [] # delta (int): maximum number of increments the cluster is allowed to range over. Larger d results in clusters over larger scales. for delta in range(0, self.delta + 1): # Work from high to low bins. for b in sorted(centers.keys(), reverse=True): bin_lower = b - delta deferred_from_last[:] = deferred_to_next[:] del deferred_to_next[:] foothills = [] n_centers = len(centers[b]) tot_centers = n_centers + len(deferred_from_last) for i in range(tot_centers): # done this way to minimize memory overhead of maintaining two lists if i < n_centers: center = centers[b][i] else: center = deferred_from_last[i - n_centers] if bin_lower < 0: bin_lower = 0 if marked[center] == self.UNMARKED: captured = self.set_maximum(q_data, marked, center, bin_lower, foothills) if not captured: # decrement to lower value to see if it'll get big enough deferred_to_next.append(center) else: pass # this is the last one for this bin self.remove_foothills(q_data, marked, b, bin_lower, centers, foothills) del deferred_from_last[:] del deferred_to_next[:] return marked
<SYSTEM_TASK:> Grow a region at a certain bin level and check if the region has reached the maximum size. <END_TASK> <USER_TASK:> Description: def set_maximum(self, q_data, marked, center, bin_lower, foothills): """ Grow a region at a certain bin level and check if the region has reached the maximum size. Args: q_data: Quantized data array marked: Array marking points that are objects center: Coordinates of the center pixel of the region being grown bin_lower: Intensity level of lower bin being evaluated foothills: List of points that are associated with a center but fall outside the the size or intensity criteria Returns: True if the object is finished growing and False if the object should be grown again at the next threshold level. """
as_bin = [] # pixels to be included in peak as_glob = [] # pixels to be globbed up as part of foothills marked_so_far = [] # pixels that have already been marked will_be_considered_again = False as_bin.append(center) center_data = q_data[center] while len(as_bin) > 0: p = as_bin.pop(-1) # remove and return last pixel in as_bin if marked[p] != self.UNMARKED: # already processed continue marked[p] = q_data[center] marked_so_far.append(p) # check neighbors for index,val in np.ndenumerate(marked[p[0] - 1:p[0] + 2, p[1] - 1:p[1] + 2]): # is neighbor part of peak or part of mountain? if val == self.UNMARKED: pixel = (index[0] - 1 + p[0],index[1] - 1 + p[1]) p_data = q_data[pixel] if (not will_be_considered_again) and (p_data >= 0) and (p_data < center_data): will_be_considered_again = True if p_data >= bin_lower and (np.abs(center_data - p_data) <= self.delta): as_bin.append(pixel) # Do not check that this is the closest: this way, a narrow channel of globbed pixels form elif p_data >= 0: as_glob.append(pixel) if bin_lower == 0: will_be_considered_again = False big_enough = len(marked_so_far) >= self.max_size if big_enough: # remove lower values within region of influence foothills.append((center, as_glob)) elif will_be_considered_again: # remove the check if you want to ignore regions smaller than max_size for m in marked_so_far: marked[m] = self.UNMARKED del as_bin[:] del as_glob[:] del marked_so_far[:] return big_enough or (not will_be_considered_again)
<SYSTEM_TASK:> Mark points determined to be foothills as globbed, so that they are not included in <END_TASK> <USER_TASK:> Description: def remove_foothills(self, q_data, marked, bin_num, bin_lower, centers, foothills): """ Mark points determined to be foothills as globbed, so that they are not included in future searches. Also searches neighboring points to foothill points to determine if they should also be considered foothills. Args: q_data: Quantized data marked: Marked bin_num: Current bin being searched bin_lower: Next bin being searched centers: dictionary of local maxima considered to be object centers foothills: List of foothill points being removed. """
hills = [] for foot in foothills: center = foot[0] hills[:] = foot[1][:] # remove all foothills while len(hills) > 0: # mark this point pt = hills.pop(-1) marked[pt] = self.GLOBBED for s_index, val in np.ndenumerate(marked[pt[0]-1:pt[0]+2,pt[1]-1:pt[1]+2]): index = (s_index[0] - 1 + pt[0], s_index[1] - 1 + pt[1]) # is neighbor part of peak or part of mountain? if val == self.UNMARKED: # will let in even minor peaks if (q_data[index] >= 0) and \ (q_data[index] < bin_lower) and \ ((q_data[index] <= q_data[pt]) or self.is_closest(index, center, centers, bin_num)): hills.append(index) del foothills[:]
<SYSTEM_TASK:> Quantize a grid into discrete steps based on input parameters. <END_TASK> <USER_TASK:> Description: def quantize(self, input_grid): """ Quantize a grid into discrete steps based on input parameters. Args: input_grid: 2-d array of values Returns: Dictionary of value pointing to pixel locations, and quantized 2-d array of data """
pixels = {} for i in range(self.max_bin+1): pixels[i] = [] data = (np.array(input_grid, dtype=int) - self.min_thresh) / self.data_increment data[data < 0] = -1 data[data > self.max_bin] = self.max_bin good_points = np.where(data >= 0) for g in np.arange(good_points[0].shape[0]): pixels[data[(good_points[0][g], good_points[1][g])]].append((good_points[0][g], good_points[1][g])) return pixels, data
<SYSTEM_TASK:> Load neighborhood probability forecasts. <END_TASK> <USER_TASK:> Description: def load_forecasts(self): """ Load neighborhood probability forecasts. """
run_date_str = self.run_date.strftime("%Y%m%d") forecast_file = self.forecast_path + "{0}/{1}_{2}_{3}_consensus_{0}.nc".format(run_date_str, self.ensemble_name, self.model_name, self.forecast_variable) print("Forecast file: " + forecast_file) forecast_data = Dataset(forecast_file) for size_threshold in self.size_thresholds: for smoothing_radius in self.smoothing_radii: for neighbor_radius in self.neighbor_radii: hour_var = "neighbor_prob_r_{0:d}_s_{1:d}_{2}_{3:0.2f}".format(neighbor_radius, smoothing_radius, self.forecast_variable, float(size_threshold)) period_var = "neighbor_prob_{0:d}-hour_r_{1:d}_s_{2:d}_{3}_{4:0.2f}".format(self.end_hour - self.start_hour + 1, neighbor_radius, smoothing_radius, self.forecast_variable, float(size_threshold)) print("Loading forecasts {0} {1} {2} {3} {4}".format(self.run_date, self.model_name, self.forecast_variable, size_threshold, smoothing_radius)) if hour_var in forecast_data.variables.keys(): self.hourly_forecasts[hour_var] = forecast_data.variables[hour_var][:] if period_var in forecast_data.variables.keys(): self.period_forecasts[period_var] = forecast_data.variables[period_var][:] forecast_data.close()
<SYSTEM_TASK:> Calculates ROC curves and Reliability scores for each forecast hour. <END_TASK> <USER_TASK:> Description: def evaluate_hourly_forecasts(self): """ Calculates ROC curves and Reliability scores for each forecast hour. Returns: A pandas DataFrame containing forecast metadata as well as DistributedROC and Reliability objects. """
score_columns = ["Run_Date", "Forecast_Hour", "Ensemble Name", "Model_Name", "Forecast_Variable", "Neighbor_Radius", "Smoothing_Radius", "Size_Threshold", "ROC", "Reliability"] all_scores = pd.DataFrame(columns=score_columns) for h, hour in enumerate(range(self.start_hour, self.end_hour + 1)): for neighbor_radius in self.neighbor_radii: n_filter = disk(neighbor_radius) for s, size_threshold in enumerate(self.size_thresholds): print("Eval hourly forecast {0:02d} {1} {2} {3} {4:d} {5:d}".format(hour, self.model_name, self.forecast_variable, self.run_date, neighbor_radius, size_threshold)) hour_obs = fftconvolve(self.raw_obs[self.mrms_variable][h] >= self.obs_thresholds[s], n_filter, mode="same") hour_obs[hour_obs > 1] = 1 hour_obs[hour_obs < 1] = 0 if self.obs_mask: hour_obs = hour_obs[self.raw_obs[self.mask_variable][h] > 0] for smoothing_radius in self.smoothing_radii: hour_var = "neighbor_prob_r_{0:d}_s_{1:d}_{2}_{3:0.2f}".format(neighbor_radius, smoothing_radius, self.forecast_variable, size_threshold) if self.obs_mask: hour_forecast = self.hourly_forecasts[hour_var][h][self.raw_obs[self.mask_variable][h] > 0] else: hour_forecast = self.hourly_forecasts[hour_var][h] roc = DistributedROC(thresholds=self.probability_levels, obs_threshold=0.5) roc.update(hour_forecast, hour_obs) rel = DistributedReliability(thresholds=self.probability_levels, obs_threshold=0.5) rel.update(hour_forecast, hour_obs) row = [self.run_date, hour, self.ensemble_name, self.model_name, self.forecast_variable, neighbor_radius, smoothing_radius, size_threshold, roc, rel] all_scores.loc[hour_var + "_{0:d}".format(hour)] = row return all_scores
<SYSTEM_TASK:> Evaluates ROC and Reliability scores for forecasts over the full period from start hour to end hour <END_TASK> <USER_TASK:> Description: def evaluate_period_forecasts(self): """ Evaluates ROC and Reliability scores for forecasts over the full period from start hour to end hour Returns: A pandas DataFrame with full-period metadata and verification statistics """
score_columns = ["Run_Date", "Ensemble Name", "Model_Name", "Forecast_Variable", "Neighbor_Radius", "Smoothing_Radius", "Size_Threshold", "ROC", "Reliability"] all_scores = pd.DataFrame(columns=score_columns) if self.coordinate_file is not None: coord_mask = np.where((self.coordinates["lon"] >= self.lon_bounds[0]) & (self.coordinates["lon"] <= self.lon_bounds[1]) & (self.coordinates["lat"] >= self.lat_bounds[0]) & (self.coordinates["lat"] <= self.lat_bounds[1]) & (self.period_obs[self.mask_variable] > 0)) else: coord_mask = None for neighbor_radius in self.neighbor_radii: n_filter = disk(neighbor_radius) for s, size_threshold in enumerate(self.size_thresholds): period_obs = fftconvolve(self.period_obs[self.mrms_variable] >= self.obs_thresholds[s], n_filter, mode="same") period_obs[period_obs > 1] = 1 if self.obs_mask and self.coordinate_file is None: period_obs = period_obs[self.period_obs[self.mask_variable] > 0] elif self.obs_mask and self.coordinate_file is not None: period_obs = period_obs[coord_mask[0], coord_mask[1]] else: period_obs = period_obs.ravel() for smoothing_radius in self.smoothing_radii: print("Eval period forecast {0} {1} {2} {3} {4} {5}".format(self.model_name, self.forecast_variable, self.run_date, neighbor_radius, size_threshold, smoothing_radius)) period_var = "neighbor_prob_{0:d}-hour_r_{1:d}_s_{2:d}_{3}_{4:0.2f}".format(self.end_hour - self.start_hour + 1, neighbor_radius, smoothing_radius, self.forecast_variable, size_threshold) if self.obs_mask and self.coordinate_file is None: period_forecast = self.period_forecasts[period_var][self.period_obs[self.mask_variable] > 0] elif self.obs_mask and self.coordinate_file is not None: period_forecast = self.period_forecasts[period_var][coord_mask[0], coord_mask[1]] else: period_forecast = self.period_forecasts[period_var].ravel() roc = DistributedROC(thresholds=self.probability_levels, obs_threshold=0.5) roc.update(period_forecast, period_obs) rel = DistributedReliability(thresholds=self.probability_levels, obs_threshold=0.5) rel.update(period_forecast, period_obs) row = [self.run_date, self.ensemble_name, self.model_name, self.forecast_variable, neighbor_radius, smoothing_radius, size_threshold, roc, rel] all_scores.loc[period_var] = row return all_scores
<SYSTEM_TASK:> Main function explicitly called from the C++ code. <END_TASK> <USER_TASK:> Description: def bootstrap_main(args): """ Main function explicitly called from the C++ code. Return the main application object. """
version_info = sys.version_info if version_info.major != 3 or version_info.minor < 6: return None, "python36" main_fn = load_module_as_package("nionui_app.nionswift") if main_fn: return main_fn(["nionui_app.nionswift"] + args, {"pyqt": None}), None return None, "main"
<SYSTEM_TASK:> Reads forecasts from json files and merges them with the input data from the step csv files. <END_TASK> <USER_TASK:> Description: def merge_input_csv_forecast_json(input_csv_file, forecast_json_path, condition_models, dist_models): """ Reads forecasts from json files and merges them with the input data from the step csv files. Args: input_csv_file: Name of the input data csv file being processed forecast_json_path: Path to the forecast json files toplevel directory condition_models: List of models used to forecast hail or no hail dist_models: List of models used to forecast the hail size distribution Returns: """
try: run_date = input_csv_file[:-4].split("_")[-1] print(run_date) ens_member = "_".join(input_csv_file.split("/")[-1][:-4].split("_")[3:-1]) ens_name = input_csv_file.split("/")[-1].split("_")[2] input_data = pd.read_csv(input_csv_file, index_col="Step_ID") full_json_path = forecast_json_path + "{0}/{1}/".format(run_date, ens_member) track_ids = sorted(input_data["Track_ID"].unique()) model_pred_cols = [] condition_models_ns = [] dist_models_ns = [] gamma_params = ["Shape", "Location", "Scale"] for condition_model in condition_models: model_pred_cols.append(condition_model.replace(" ", "-") + "_Condition") condition_models_ns.append(condition_model.replace(" ", "-")) for dist_model in dist_models: dist_models_ns.append(dist_model.replace(" ", "-")) for param in gamma_params: model_pred_cols.append(dist_model.replace(" ", "-") + "_" + param) pred_data = pd.DataFrame(index=input_data.index, columns=model_pred_cols, dtype=float) for track_id in track_ids: track_id_num = track_id.split("_")[-1] json_filename = full_json_path + "{0}_{1}_{2}_model_track_{3}.json".format(ens_name, run_date, ens_member, track_id_num) json_file = open(json_filename) json_data = json.load(json_file) json_file.close() for s, step in enumerate(json_data["features"]): step_id = track_id + "_{0:02d}".format(s) for cond_model in condition_models_ns: pred_data.loc[step_id, cond_model + "_Condition"] = step["properties"]["condition_" + cond_model] for dist_model in dist_models_ns: pred_data.loc[step_id, [dist_model + "_" + p for p in gamma_params]] = step["properties"]["dist_" + dist_model] out_data = input_data.merge(pred_data, left_index=True, right_index=True) return out_data, ens_name, ens_member except Exception as e: print(traceback.format_exc()) raise e
<SYSTEM_TASK:> Called from item to indicate its data or metadata has changed. <END_TASK> <USER_TASK:> Description: def mark_data_dirty(self): """ Called from item to indicate its data or metadata has changed."""
self.__cache.set_cached_value_dirty(self.__display_item, self.__cache_property_name) self.__initialize_cache() self.__cached_value_dirty = True
<SYSTEM_TASK:> Recompute the data on a thread, if necessary. <END_TASK> <USER_TASK:> Description: def recompute_if_necessary(self, ui): """Recompute the data on a thread, if necessary. If the data has recently been computed, this call will be rescheduled for the future. If the data is currently being computed, it do nothing."""
self.__initialize_cache() if self.__cached_value_dirty: with self.__is_recomputing_lock: is_recomputing = self.__is_recomputing self.__is_recomputing = True if is_recomputing: pass else: # the only way to get here is if we're not currently computing # this has the side effect of limiting the number of threads that # are sleeping. def recompute(): try: if self.__recompute_thread_cancel.wait(0.01): # helps tests run faster return minimum_time = 0.5 current_time = time.time() if current_time < self.__cached_value_time + minimum_time: if self.__recompute_thread_cancel.wait(self.__cached_value_time + minimum_time - current_time): return self.recompute_data(ui) finally: self.__is_recomputing = False self.__recompute_thread = None with self.__is_recomputing_lock: self.__recompute_thread = threading.Thread(target=recompute) self.__recompute_thread.start()
<SYSTEM_TASK:> Compute the data associated with this processor. <END_TASK> <USER_TASK:> Description: def recompute_data(self, ui): """Compute the data associated with this processor. This method is thread safe and may take a long time to return. It should not be called from the UI thread. Upon return, the results will be calculated with the latest data available and the cache will not be marked dirty. """
self.__initialize_cache() with self.__recompute_lock: if self.__cached_value_dirty: try: calculated_data = self.get_calculated_data(ui) except Exception as e: import traceback traceback.print_exc() traceback.print_stack() raise self.__cache.set_cached_value(self.__display_item, self.__cache_property_name, calculated_data) self.__cached_value = calculated_data self.__cached_value_dirty = False self.__cached_value_time = time.time() else: calculated_data = None if calculated_data is None: calculated_data = self.get_default_data() if calculated_data is not None: # if the default is not None, treat is as valid cached data self.__cache.set_cached_value(self.__display_item, self.__cache_property_name, calculated_data) self.__cached_value = calculated_data self.__cached_value_dirty = False self.__cached_value_time = time.time() else: # otherwise remove everything from the cache self.__cache.remove_cached_value(self.__display_item, self.__cache_property_name) self.__cached_value = None self.__cached_value_dirty = None self.__cached_value_time = 0 self.__recompute_lock.release() if callable(self.on_thumbnail_updated): self.on_thumbnail_updated() self.__recompute_lock.acquire()
<SYSTEM_TASK:> Close the document controller. <END_TASK> <USER_TASK:> Description: def close(self): """Close the document controller. This method must be called to shut down the document controller. There are several paths by which it can be called, though. * User quits application via menu item. The menu item will call back to Application.exit which will close each document controller by calling this method. * User quits application using dock menu item. The Qt application will call aboutToClose in the document windows * User closes document window via menu item. * User closes document window via close box. The main concept of closing is that it is always triggered by the document window closing. This can be initiated from within Python by calling request_close on the document window. When the window closes, either by explicit request or by the user clicking a close box, it will invoke the about_to_close method on the document window. At this point, the window would still be open, so the about_to_close message can be used to tell the document controller to save anything it needs to save and prepare for closing. """
assert self.__closed == False self.__closed = True self.finish_periodic() # required to finish periodic operations during tests # dialogs for weak_dialog in self.__dialogs: dialog = weak_dialog() if dialog: try: dialog.request_close() except Exception as e: pass # menus self._file_menu = None self._edit_menu = None self._processing_menu = None self._view_menu = None self._window_menu = None self._help_menu = None self._library_menu = None self._processing_arithmetic_menu = None self._processing_reduce_menu = None self._processing_transform_menu = None self._processing_filter_menu = None self._processing_fourier_menu = None self._processing_graphics_menu = None self._processing_sequence_menu = None self._processing_redimension_menu = None self._display_type_menu = None if self.__workspace_controller: self.__workspace_controller.close() self.__workspace_controller = None self.__call_soon_event_listener.close() self.__call_soon_event_listener = None self.__filtered_display_items_model.close() self.__filtered_display_items_model = None self.filter_controller.close() self.filter_controller = None self.__display_items_model.close() self.__display_items_model = None # document_model may be shared between several DocumentControllers, so use reference counting # to determine when to close it. self.document_model.remove_ref() self.document_model = None self.did_close_event.fire(self) self.did_close_event = None super().close()
<SYSTEM_TASK:> Add a listener function and return listener token. Token can be closed or deleted to unlisten. <END_TASK> <USER_TASK:> Description: def add_periodic(self, interval: float, listener_fn): """Add a listener function and return listener token. Token can be closed or deleted to unlisten."""
class PeriodicListener: def __init__(self, interval: float, listener_fn): self.interval = interval self.__listener_fn = listener_fn # the call function is very performance critical; make it fast by using a property # instead of a logic statement each time. if callable(listener_fn): self.call = self.__listener_fn else: def void(*args, **kwargs): pass self.call = void self.next_scheduled_time = time.time() + interval def close(self): self.__listener_fn = None def void(*args, **kwargs): pass self.call = void listener = PeriodicListener(interval, listener_fn) def remove_listener(weak_listener): with self.__weak_periodic_listeners_mutex: self.__weak_periodic_listeners.remove(weak_listener) weak_listener = weakref.ref(listener, remove_listener) with self.__weak_periodic_listeners_mutex: self.__weak_periodic_listeners.append(weak_listener) return listener
<SYSTEM_TASK:> Update the data item model with a new container, filter, and sorting. <END_TASK> <USER_TASK:> Description: def __update_display_items_model(self, display_items_model: ListModel.FilteredListModel, data_group: typing.Optional[DataGroup.DataGroup], filter_id: typing.Optional[str]) -> None: """Update the data item model with a new container, filter, and sorting. This is called when the data item model is created or when the user changes the data group or sorting settings. """
with display_items_model.changes(): # change filter and sort together if data_group is not None: display_items_model.container = data_group display_items_model.filter = ListModel.Filter(True) display_items_model.sort_key = None display_items_model.filter_id = None elif filter_id == "latest-session": display_items_model.container = self.document_model display_items_model.filter = ListModel.EqFilter("session_id", self.document_model.session_id) display_items_model.sort_key = DataItem.sort_by_date_key display_items_model.sort_reverse = True display_items_model.filter_id = filter_id elif filter_id == "temporary": display_items_model.container = self.document_model display_items_model.filter = ListModel.NotEqFilter("category", "persistent") display_items_model.sort_key = DataItem.sort_by_date_key display_items_model.sort_reverse = True display_items_model.filter_id = filter_id elif filter_id == "none": # not intended to be used directly display_items_model.container = self.document_model display_items_model.filter = ListModel.Filter(False) display_items_model.sort_key = DataItem.sort_by_date_key display_items_model.sort_reverse = True display_items_model.filter_id = filter_id else: # "all" display_items_model.container = self.document_model display_items_model.filter = ListModel.EqFilter("category", "persistent") display_items_model.sort_key = DataItem.sort_by_date_key display_items_model.sort_reverse = True display_items_model.filter_id = None
<SYSTEM_TASK:> Return the selected display item. <END_TASK> <USER_TASK:> Description: def selected_display_item(self) -> typing.Optional[DisplayItem.DisplayItem]: """Return the selected display item. The selected display is the display ite that has keyboard focus in the data panel or a display panel. """
# first check for the [focused] data browser display_item = self.focused_display_item if not display_item: selected_display_panel = self.selected_display_panel display_item = selected_display_panel.display_item if selected_display_panel else None return display_item
<SYSTEM_TASK:> Get two sensible data sources, which may be the same. <END_TASK> <USER_TASK:> Description: def _get_two_data_sources(self): """Get two sensible data sources, which may be the same."""
selected_display_items = self.selected_display_items if len(selected_display_items) < 2: selected_display_items = list() display_item = self.selected_display_item if display_item: selected_display_items.append(display_item) if len(selected_display_items) == 1: display_item = selected_display_items[0] data_item = display_item.data_item if display_item else None if display_item and len(display_item.graphic_selection.indexes) == 2: index1 = display_item.graphic_selection.anchor_index index2 = list(display_item.graphic_selection.indexes.difference({index1}))[0] graphic1 = display_item.graphics[index1] graphic2 = display_item.graphics[index2] if data_item: if data_item.is_datum_1d and isinstance(graphic1, Graphics.IntervalGraphic) and isinstance(graphic2, Graphics.IntervalGraphic): crop_graphic1 = graphic1 crop_graphic2 = graphic2 elif data_item.is_datum_2d and isinstance(graphic1, Graphics.RectangleTypeGraphic) and isinstance(graphic2, Graphics.RectangleTypeGraphic): crop_graphic1 = graphic1 crop_graphic2 = graphic2 else: crop_graphic1 = self.__get_crop_graphic(display_item) crop_graphic2 = crop_graphic1 else: crop_graphic1 = self.__get_crop_graphic(display_item) crop_graphic2 = crop_graphic1 else: crop_graphic1 = self.__get_crop_graphic(display_item) crop_graphic2 = crop_graphic1 return (display_item, crop_graphic1), (display_item, crop_graphic2) if len(selected_display_items) == 2: display_item1 = selected_display_items[0] crop_graphic1 = self.__get_crop_graphic(display_item1) display_item2 = selected_display_items[1] crop_graphic2 = self.__get_crop_graphic(display_item2) return (display_item1, crop_graphic1), (display_item2, crop_graphic2) return None
<SYSTEM_TASK:> Calculate origin and size for canvas size, data shape, and image display parameters. <END_TASK> <USER_TASK:> Description: def calculate_origin_and_size(canvas_size, data_shape, image_canvas_mode, image_zoom, image_position) -> typing.Tuple[typing.Any, typing.Any]: """Calculate origin and size for canvas size, data shape, and image display parameters."""
if data_shape is None: return None, None if image_canvas_mode == "fill": data_shape = data_shape scale_h = float(data_shape[1]) / canvas_size[1] scale_v = float(data_shape[0]) / canvas_size[0] if scale_v < scale_h: image_canvas_size = (canvas_size[0], canvas_size[0] * data_shape[1] / data_shape[0]) else: image_canvas_size = (canvas_size[1] * data_shape[0] / data_shape[1], canvas_size[1]) image_canvas_origin = (canvas_size[0] * 0.5 - image_canvas_size[0] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5) elif image_canvas_mode == "fit": image_canvas_size = canvas_size image_canvas_origin = (0, 0) elif image_canvas_mode == "1:1": image_canvas_size = data_shape image_canvas_origin = (canvas_size[0] * 0.5 - image_canvas_size[0] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5) elif image_canvas_mode == "2:1": image_canvas_size = (data_shape[0] * 0.5, data_shape[1] * 0.5) image_canvas_origin = (canvas_size[0] * 0.5 - image_canvas_size[0] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5) else: image_canvas_size = (canvas_size[0] * image_zoom, canvas_size[1] * image_zoom) canvas_rect = Geometry.fit_to_size(((0, 0), image_canvas_size), data_shape) image_canvas_origin_y = (canvas_size[0] * 0.5) - image_position[0] * canvas_rect[1][0] - canvas_rect[0][0] image_canvas_origin_x = (canvas_size[1] * 0.5) - image_position[1] * canvas_rect[1][1] - canvas_rect[0][1] image_canvas_origin = (image_canvas_origin_y, image_canvas_origin_x) return image_canvas_origin, image_canvas_size
<SYSTEM_TASK:> Migrate items from the storage system to the object context. <END_TASK> <USER_TASK:> Description: def auto_migrate_storage_system(*, persistent_storage_system=None, new_persistent_storage_system=None, data_item_uuids=None, deletions: typing.List[uuid.UUID] = None, utilized_deletions: typing.Set[uuid.UUID] = None, ignore_older_files: bool = True): """Migrate items from the storage system to the object context. Files in data_item_uuids have already been loaded and are ignored (not migrated). Files in deletes have been deleted in object context and are ignored (not migrated) and then added to the utilized deletions list. Data items will have persistent_object_context set upon return, but caller will need to call finish_reading on each of the data items. """
storage_handlers = persistent_storage_system.find_data_items() ReaderInfo = collections.namedtuple("ReaderInfo", ["properties", "changed_ref", "large_format", "storage_handler", "identifier"]) reader_info_list = list() for storage_handler in storage_handlers: try: large_format = isinstance(storage_handler, HDF5Handler.HDF5Handler) properties = Migration.transform_to_latest(storage_handler.read_properties()) reader_info = ReaderInfo(properties, [False], large_format, storage_handler, storage_handler.reference) reader_info_list.append(reader_info) except Exception as e: logging.debug("Error reading %s", storage_handler.reference) import traceback traceback.print_exc() traceback.print_stack() library_storage_properties = persistent_storage_system.library_storage_properties for deletion in copy.deepcopy(library_storage_properties.get("data_item_deletions", list())): if not deletion in deletions: deletions.append(deletion) preliminary_library_updates = dict() library_updates = dict() if not ignore_older_files: Migration.migrate_to_latest(reader_info_list, preliminary_library_updates) good_reader_info_list = list() count = len(reader_info_list) for index, reader_info in enumerate(reader_info_list): storage_handler = reader_info.storage_handler properties = reader_info.properties try: version = properties.get("version", 0) if version == DataItem.DataItem.writer_version: data_item_uuid = uuid.UUID(properties["uuid"]) if not data_item_uuid in data_item_uuids: if str(data_item_uuid) in deletions: utilized_deletions.add(data_item_uuid) else: auto_migrate_data_item(reader_info, persistent_storage_system, new_persistent_storage_system, index, count) good_reader_info_list.append(reader_info) data_item_uuids.add(data_item_uuid) library_update = preliminary_library_updates.get(data_item_uuid) if library_update: library_updates[data_item_uuid] = library_update except Exception as e: logging.debug("Error reading %s", storage_handler.reference) import traceback traceback.print_exc() traceback.print_stack() return good_reader_info_list, library_updates
<SYSTEM_TASK:> Load configuration from .pypirc file, cached to only run once <END_TASK> <USER_TASK:> Description: def from_pypirc(pypi_repository): """ Load configuration from .pypirc file, cached to only run once """
ret = {} pypirc_locations = PYPIRC_LOCATIONS for pypirc_path in pypirc_locations: pypirc_path = os.path.expanduser(pypirc_path) if os.path.isfile(pypirc_path): parser = configparser.SafeConfigParser() parser.read(pypirc_path) if 'distutils' not in parser.sections(): continue if 'index-servers' not in parser.options('distutils'): continue if pypi_repository not in parser.get('distutils', 'index-servers'): continue if pypi_repository in parser.sections(): for option in parser.options(pypi_repository): ret[option] = parser.get(pypi_repository, option) if not ret: raise ConfigError( 'repository does not appear to be configured in pypirc ({})'.format(pypi_repository) + ', remember that it needs an entry in [distutils] and its own section' ) return ret
<SYSTEM_TASK:> Create a temporary pypirc file for interaction with twine <END_TASK> <USER_TASK:> Description: def pypirc_temp(index_url): """ Create a temporary pypirc file for interaction with twine """
pypirc_file = tempfile.NamedTemporaryFile(suffix='.pypirc', delete=False) print(pypirc_file.name) with open(pypirc_file.name, 'w') as fh: fh.write(PYPIRC_TEMPLATE.format(index_name=PYPIRC_TEMP_INDEX_NAME, index_url=index_url)) return pypirc_file.name
<SYSTEM_TASK:> Get a versioned interface matching the given version and ui_version. <END_TASK> <USER_TASK:> Description: def get_api(version: str, ui_version: str=None) -> API_1: """Get a versioned interface matching the given version and ui_version. version is a string in the form "1.0.2". """
ui_version = ui_version if ui_version else "~1.0" return _get_api_with_app(version, ui_version, ApplicationModule.app)
<SYSTEM_TASK:> Return the mask created by this graphic as extended data. <END_TASK> <USER_TASK:> Description: def mask_xdata_with_shape(self, shape: DataAndMetadata.ShapeType) -> DataAndMetadata.DataAndMetadata: """Return the mask created by this graphic as extended data. .. versionadded:: 1.0 Scriptable: Yes """
mask = self._graphic.get_mask(shape) return DataAndMetadata.DataAndMetadata.from_data(mask)
<SYSTEM_TASK:> Set the data. <END_TASK> <USER_TASK:> Description: def data(self, data: numpy.ndarray) -> None: """Set the data. :param data: A numpy ndarray. .. versionadded:: 1.0 Scriptable: Yes """
self.__data_item.set_data(numpy.copy(data))
<SYSTEM_TASK:> Return the extended data of this data item display. <END_TASK> <USER_TASK:> Description: def display_xdata(self) -> DataAndMetadata.DataAndMetadata: """Return the extended data of this data item display. Display data will always be 1d or 2d and either int, float, or RGB data type. .. versionadded:: 1.0 Scriptable: Yes """
display_data_channel = self.__display_item.display_data_channel return display_data_channel.get_calculated_display_values(True).display_data_and_metadata
<SYSTEM_TASK:> Set the dimensional calibrations. <END_TASK> <USER_TASK:> Description: def set_dimensional_calibrations(self, dimensional_calibrations: typing.List[CalibrationModule.Calibration]) -> None: """Set the dimensional calibrations. :param dimensional_calibrations: A list of calibrations, must match the dimensions of the data. .. versionadded:: 1.0 Scriptable: Yes """
self.__data_item.set_dimensional_calibrations(dimensional_calibrations)
<SYSTEM_TASK:> Return the graphics attached to this data item. <END_TASK> <USER_TASK:> Description: def graphics(self) -> typing.List[Graphic]: """Return the graphics attached to this data item. .. versionadded:: 1.0 Scriptable: Yes """
return [Graphic(graphic) for graphic in self.__display_item.graphics]
<SYSTEM_TASK:> Add a point graphic to the data item. <END_TASK> <USER_TASK:> Description: def add_point_region(self, y: float, x: float) -> Graphic: """Add a point graphic to the data item. :param x: The x coordinate, in relative units [0.0, 1.0] :param y: The y coordinate, in relative units [0.0, 1.0] :return: The :py:class:`nion.swift.Facade.Graphic` object that was added. .. versionadded:: 1.0 Scriptable: Yes """
graphic = Graphics.PointGraphic() graphic.position = Geometry.FloatPoint(y, x) self.__display_item.add_graphic(graphic) return Graphic(graphic)
<SYSTEM_TASK:> Return the mask by combining any mask graphics on this data item as extended data. <END_TASK> <USER_TASK:> Description: def mask_xdata(self) -> DataAndMetadata.DataAndMetadata: """Return the mask by combining any mask graphics on this data item as extended data. .. versionadded:: 1.0 Scriptable: Yes """
display_data_channel = self.__display_item.display_data_channel shape = display_data_channel.display_data_shape mask = numpy.zeros(shape) for graphic in self.__display_item.graphics: if isinstance(graphic, (Graphics.SpotGraphic, Graphics.WedgeGraphic, Graphics.RingGraphic, Graphics.LatticeGraphic)): mask = numpy.logical_or(mask, graphic.get_mask(shape)) return DataAndMetadata.DataAndMetadata.from_data(mask)
<SYSTEM_TASK:> Return the data item associated with this display panel. <END_TASK> <USER_TASK:> Description: def data_item(self) -> DataItem: """Return the data item associated with this display panel. .. versionadded:: 1.0 Scriptable: Yes """
display_panel = self.__display_panel if not display_panel: return None data_item = display_panel.data_item return DataItem(data_item) if data_item else None
<SYSTEM_TASK:> Set the data item associated with this display panel. <END_TASK> <USER_TASK:> Description: def set_data_item(self, data_item: DataItem) -> None: """Set the data item associated with this display panel. :param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add. This will replace whatever data item, browser, or controller is currently in the display panel with the single data item. .. versionadded:: 1.0 Scriptable: Yes """
display_panel = self.__display_panel if display_panel: display_item = data_item._data_item.container.get_display_item_for_data_item(data_item._data_item) if data_item._data_item.container else None display_panel.set_display_panel_display_item(display_item)
<SYSTEM_TASK:> Add a data item to the group. <END_TASK> <USER_TASK:> Description: def add_data_item(self, data_item: DataItem) -> None: """Add a data item to the group. :param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add. .. versionadded:: 1.0 Scriptable: Yes """
display_item = data_item._data_item.container.get_display_item_for_data_item(data_item._data_item) if data_item._data_item.container else None if display_item: self.__data_group.append_display_item(display_item)
<SYSTEM_TASK:> Close the task. <END_TASK> <USER_TASK:> Description: def close(self) -> None: """Close the task. .. versionadded:: 1.0 This method must be called when the task is no longer needed. """
self.__data_channel_buffer.stop() self.__data_channel_buffer.close() self.__data_channel_buffer = None if not self.__was_playing: self.__hardware_source.stop_playing()
<SYSTEM_TASK:> Record data and return a list of data_and_metadata objects. <END_TASK> <USER_TASK:> Description: def record(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None, timeout: float=None) -> typing.List[DataAndMetadata.DataAndMetadata]: """Record data and return a list of data_and_metadata objects. .. versionadded:: 1.0 :param frame_parameters: The frame parameters for the record. Pass None for defaults. :type frame_parameters: :py:class:`FrameParameters` :param channels_enabled: The enabled channels for the record. Pass None for defaults. :type channels_enabled: List of booleans. :param timeout: The timeout in seconds. Pass None to use default. :return: The list of data and metadata items that were read. :rtype: list of :py:class:`DataAndMetadata` """
if frame_parameters: self.__hardware_source.set_record_frame_parameters(self.__hardware_source.get_frame_parameters_from_dict(frame_parameters)) if channels_enabled is not None: for channel_index, channel_enabled in enumerate(channels_enabled): self.__hardware_source.set_channel_enabled(channel_index, channel_enabled) self.__hardware_source.start_recording() return self.__hardware_source.get_next_xdatas_to_finish(timeout)
<SYSTEM_TASK:> Create a record task for this hardware source. <END_TASK> <USER_TASK:> Description: def create_record_task(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None) -> RecordTask: """Create a record task for this hardware source. .. versionadded:: 1.0 :param frame_parameters: The frame parameters for the record. Pass None for defaults. :type frame_parameters: :py:class:`FrameParameters` :param channels_enabled: The enabled channels for the record. Pass None for defaults. :type channels_enabled: List of booleans. :return: The :py:class:`RecordTask` object. :rtype: :py:class:`RecordTask` Callers should call close on the returned task when finished. See :py:class:`RecordTask` for examples of how to use. """
return RecordTask(self.__hardware_source, frame_parameters, channels_enabled)
<SYSTEM_TASK:> Grabs the next frame to finish and returns it as data and metadata. <END_TASK> <USER_TASK:> Description: def grab_next_to_finish(self, timeout: float=None) -> typing.List[DataAndMetadata.DataAndMetadata]: """Grabs the next frame to finish and returns it as data and metadata. .. versionadded:: 1.0 :param timeout: The timeout in seconds. Pass None to use default. :return: The list of data and metadata items that were read. :rtype: list of :py:class:`DataAndMetadata` If the view is not already started, it will be started automatically. Scriptable: Yes """
self.start_playing() return self.__hardware_source.get_next_xdatas_to_finish(timeout)
<SYSTEM_TASK:> Set the value of a control asynchronously. <END_TASK> <USER_TASK:> Description: def set_control_output(self, name: str, value: float, *, options: dict=None) -> None: """Set the value of a control asynchronously. :param name: The name of the control (string). :param value: The control value (float). :param options: A dict of custom options to pass to the instrument for setting the value. Options are: value_type: local, delta, output. output is default. confirm, confirm_tolerance_factor, confirm_timeout: confirm value gets set. inform: True to keep dependent control outputs constant by adjusting their internal values. False is default. Default value of confirm is False. Default confirm_tolerance_factor is 1.0. A value of 1.0 is the nominal tolerance for that control. Passing a higher tolerance factor (for example 1.5) will increase the permitted error margin and passing lower tolerance factor (for example 0.5) will decrease the permitted error margin and consequently make a timeout more likely. The tolerance factor value 0.0 is a special value which removes all checking and only waits for any change at all and then returns. Default confirm_timeout is 16.0 (seconds). Raises exception if control with name doesn't exist. Raises TimeoutException if confirm is True and timeout occurs. .. versionadded:: 1.0 Scriptable: Yes """
self.__instrument.set_control_output(name, value, options)
<SYSTEM_TASK:> Return the value of a float property. <END_TASK> <USER_TASK:> Description: def get_property_as_float(self, name: str) -> float: """Return the value of a float property. :return: The property value (float). Raises exception if property with name doesn't exist. .. versionadded:: 1.0 Scriptable: Yes """
return float(self.__instrument.get_property(name))
<SYSTEM_TASK:> Set the value of a float property. <END_TASK> <USER_TASK:> Description: def set_property_as_float(self, name: str, value: float) -> None: """Set the value of a float property. :param name: The name of the property (string). :param value: The property value (float). Raises exception if property with name doesn't exist. .. versionadded:: 1.0 Scriptable: Yes """
self.__instrument.set_property(name, float(value))
<SYSTEM_TASK:> Return the list of data items. <END_TASK> <USER_TASK:> Description: def data_items(self) -> typing.List[DataItem]: """Return the list of data items. :return: The list of :py:class:`nion.swift.Facade.DataItem` objects. .. versionadded:: 1.0 Scriptable: Yes """
return [DataItem(data_item) for data_item in self.__document_model.data_items]
<SYSTEM_TASK:> Return the list of display items. <END_TASK> <USER_TASK:> Description: def display_items(self) -> typing.List[Display]: """Return the list of display items. :return: The list of :py:class:`nion.swift.Facade.Display` objects. .. versionadded:: 1.0 Scriptable: Yes """
return [Display(display_item) for display_item in self.__document_model.display_items]
<SYSTEM_TASK:> Return the list of data items that are data sources for the data item. <END_TASK> <USER_TASK:> Description: def get_source_data_items(self, data_item: DataItem) -> typing.List[DataItem]: """Return the list of data items that are data sources for the data item. :return: The list of :py:class:`nion.swift.Facade.DataItem` objects. .. versionadded:: 1.0 Scriptable: Yes """
return [DataItem(data_item) for data_item in self._document_model.get_source_data_items(data_item._data_item)] if data_item else None
<SYSTEM_TASK:> Return the dependent data items the data item argument. <END_TASK> <USER_TASK:> Description: def get_dependent_data_items(self, data_item: DataItem) -> typing.List[DataItem]: """Return the dependent data items the data item argument. :return: The list of :py:class:`nion.swift.Facade.DataItem` objects. .. versionadded:: 1.0 Scriptable: Yes """
return [DataItem(data_item) for data_item in self._document_model.get_dependent_data_items(data_item._data_item)] if data_item else None
<SYSTEM_TASK:> Create an empty data item in the library. <END_TASK> <USER_TASK:> Description: def create_data_item(self, title: str=None) -> DataItem: """Create an empty data item in the library. :param title: The title of the data item (optional). :return: The new :py:class:`nion.swift.Facade.DataItem` object. :rtype: :py:class:`nion.swift.Facade.DataItem` .. versionadded:: 1.0 Scriptable: Yes """
data_item = DataItemModule.DataItem() data_item.ensure_data_source() if title is not None: data_item.title = title self.__document_model.append_data_item(data_item) return DataItem(data_item)
<SYSTEM_TASK:> Create a data item in the library from an ndarray. <END_TASK> <USER_TASK:> Description: def create_data_item_from_data(self, data: numpy.ndarray, title: str=None) -> DataItem: """Create a data item in the library from an ndarray. The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay writing to disk and keep using the data, create an empty data item and use the data item methods to modify the data. :param data: The data (ndarray). :param title: The title of the data item (optional). :return: The new :py:class:`nion.swift.Facade.DataItem` object. :rtype: :py:class:`nion.swift.Facade.DataItem` .. versionadded:: 1.0 Scriptable: Yes """
return self.create_data_item_from_data_and_metadata(DataAndMetadata.DataAndMetadata.from_data(data), title)
<SYSTEM_TASK:> Create a data item in the library from a data and metadata object. <END_TASK> <USER_TASK:> Description: def create_data_item_from_data_and_metadata(self, data_and_metadata: DataAndMetadata.DataAndMetadata, title: str=None) -> DataItem: """Create a data item in the library from a data and metadata object. The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay writing to disk and keep using the data, create an empty data item and use the data item methods to modify the data. :param data_and_metadata: The data and metadata. :param title: The title of the data item (optional). :return: The new :py:class:`nion.swift.Facade.DataItem` object. :rtype: :py:class:`nion.swift.Facade.DataItem` .. versionadded:: 1.0 Scriptable: Yes """
data_item = DataItemModule.new_data_item(data_and_metadata) if title is not None: data_item.title = title self.__document_model.append_data_item(data_item) return DataItem(data_item)
<SYSTEM_TASK:> Snapshot a data item. Similar to copy but with a data snapshot. <END_TASK> <USER_TASK:> Description: def snapshot_data_item(self, data_item: DataItem) -> DataItem: """Snapshot a data item. Similar to copy but with a data snapshot. .. versionadded:: 1.0 Scriptable: No """
data_item = data_item._data_item.snapshot() self.__document_model.append_data_item(data_item) return DataItem(data_item)
<SYSTEM_TASK:> Get the data item with the given UUID. <END_TASK> <USER_TASK:> Description: def get_data_item_by_uuid(self, data_item_uuid: uuid_module.UUID) -> DataItem: """Get the data item with the given UUID. .. versionadded:: 1.0 Status: Provisional Scriptable: Yes """
data_item = self._document_model.get_data_item_by_uuid(data_item_uuid) return DataItem(data_item) if data_item else None
<SYSTEM_TASK:> Get the graphic with the given UUID. <END_TASK> <USER_TASK:> Description: def get_graphic_by_uuid(self, graphic_uuid: uuid_module.UUID) -> Graphic: """Get the graphic with the given UUID. .. versionadded:: 1.0 Status: Provisional Scriptable: Yes """
for display_item in self._document_model.display_items: for graphic in display_item.graphics: if graphic.uuid == graphic_uuid: return Graphic(graphic) return None
<SYSTEM_TASK:> Return whether the library value for the given key exists. <END_TASK> <USER_TASK:> Description: def has_library_value(self, key: str) -> bool: """Return whether the library value for the given key exists. Please consult the developer documentation for a list of valid keys. .. versionadded:: 1.0 Scriptable: Yes """
desc = Metadata.session_key_map.get(key) if desc is not None: field_id = desc['path'][-1] return bool(getattr(ApplicationData.get_session_metadata_model(), field_id, None)) return False
<SYSTEM_TASK:> Get the library value for the given key. <END_TASK> <USER_TASK:> Description: def get_library_value(self, key: str) -> typing.Any: """Get the library value for the given key. Please consult the developer documentation for a list of valid keys. .. versionadded:: 1.0 Scriptable: Yes """
desc = Metadata.session_key_map.get(key) if desc is not None: field_id = desc['path'][-1] return getattr(ApplicationData.get_session_metadata_model(), field_id) raise KeyError()
<SYSTEM_TASK:> Set the library value for the given key. <END_TASK> <USER_TASK:> Description: def set_library_value(self, key: str, value: typing.Any) -> None: """Set the library value for the given key. Please consult the developer documentation for a list of valid keys. .. versionadded:: 1.0 Scriptable: Yes """
desc = Metadata.session_key_map.get(key) if desc is not None: field_id = desc['path'][-1] setattr(ApplicationData.get_session_metadata_model(), field_id, value) return raise KeyError()
<SYSTEM_TASK:> Delete the library value for the given key. <END_TASK> <USER_TASK:> Description: def delete_library_value(self, key: str) -> None: """Delete the library value for the given key. Please consult the developer documentation for a list of valid keys. .. versionadded:: 1.0 Scriptable: Yes """
desc = Metadata.session_key_map.get(key) if desc is not None: field_id = desc['path'][-1] setattr(ApplicationData.get_session_metadata_model(), field_id, None) return raise KeyError()
<SYSTEM_TASK:> Return the list of display panels currently visible. <END_TASK> <USER_TASK:> Description: def all_display_panels(self) -> typing.List[DisplayPanel]: """Return the list of display panels currently visible. .. versionadded:: 1.0 Scriptable: Yes """
return [DisplayPanel(display_panel) for display_panel in self.__document_controller.workspace_controller.display_panels]
<SYSTEM_TASK:> Return display panel with the identifier. <END_TASK> <USER_TASK:> Description: def get_display_panel_by_id(self, identifier: str) -> DisplayPanel: """Return display panel with the identifier. .. versionadded:: 1.0 Status: Provisional Scriptable: Yes """
display_panel = next( (display_panel for display_panel in self.__document_controller.workspace_controller.display_panels if display_panel.identifier.lower() == identifier.lower()), None) return DisplayPanel(display_panel) if display_panel else None
<SYSTEM_TASK:> Display a new data item and gives it keyboard focus. Uses existing display if it is already displayed. <END_TASK> <USER_TASK:> Description: def display_data_item(self, data_item: DataItem, source_display_panel=None, source_data_item=None): """Display a new data item and gives it keyboard focus. Uses existing display if it is already displayed. .. versionadded:: 1.0 Status: Provisional Scriptable: Yes """
for display_panel in self.__document_controller.workspace_controller.display_panels: if display_panel.data_item == data_item._data_item: display_panel.request_focus() return DisplayPanel(display_panel) result_display_panel = self.__document_controller.next_result_display_panel() if result_display_panel: display_item = self.__document_controller.document_model.get_display_item_for_data_item(data_item._data_item) result_display_panel.set_display_panel_display_item(display_item) result_display_panel.request_focus() return DisplayPanel(result_display_panel) return None
<SYSTEM_TASK:> Create a data item in the library from data. <END_TASK> <USER_TASK:> Description: def create_data_item_from_data(self, data: numpy.ndarray, title: str=None) -> DataItem: """Create a data item in the library from data. .. versionadded:: 1.0 .. deprecated:: 1.1 Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data` instead. Scriptable: No """
return DataItem(self.__document_controller.add_data(data, title))
<SYSTEM_TASK:> Create a utility panel that can be attached to a window. <END_TASK> <USER_TASK:> Description: def create_panel(self, panel_delegate): """Create a utility panel that can be attached to a window. .. versionadded:: 1.0 Scriptable: No The panel_delegate should respond to the following: (property, read-only) panel_id (property, read-only) panel_name (property, read-only) panel_positions (a list from "top", "bottom", "left", "right", "all") (property, read-only) panel_position (from "top", "bottom", "left", "right", "none") (method, required) create_panel_widget(ui), returns a widget (method, optional) close() """
panel_id = panel_delegate.panel_id panel_name = panel_delegate.panel_name panel_positions = getattr(panel_delegate, "panel_positions", ["left", "right"]) panel_position = getattr(panel_delegate, "panel_position", "none") properties = getattr(panel_delegate, "panel_properties", None) workspace_manager = Workspace.WorkspaceManager() def create_facade_panel(document_controller, panel_id, properties): panel = Panel(document_controller, panel_id, properties) ui = UserInterface(self.__ui_version, document_controller.ui) document_controller = DocumentWindow(document_controller) panel.widget = panel_delegate.create_panel_widget(ui, document_controller)._widget return panel class PanelReference: def __init__(self): self.__panel_delegate = panel_delegate workspace_manager.register_panel(create_facade_panel, panel_id, panel_name, panel_positions, panel_position, properties) def __del__(self): self.close() def close(self): if self.__panel_delegate: panel_delegate_close_fn = getattr(self.__panel_delegate, "close", None) if panel_delegate_close_fn: panel_delegate_close_fn() workspace_manager.unregister_panel(panel_id) self.__panel_delegate = None return PanelReference()
<SYSTEM_TASK:> Return the hardware source API matching the hardware_source_id and version. <END_TASK> <USER_TASK:> Description: def get_hardware_source_by_id(self, hardware_source_id: str, version: str): """Return the hardware source API matching the hardware_source_id and version. .. versionadded:: 1.0 Scriptable: Yes """
actual_version = "1.0.0" if Utility.compare_versions(version, actual_version) > 0: raise NotImplementedError("Hardware API requested version %s is greater than %s." % (version, actual_version)) hardware_source = HardwareSourceModule.HardwareSourceManager().get_hardware_source_for_hardware_source_id(hardware_source_id) return HardwareSource(hardware_source) if hardware_source else None
<SYSTEM_TASK:> Pad a possibly non-square matrix to make it square. <END_TASK> <USER_TASK:> Description: def pad_matrix(self, matrix, pad_value=0): """ Pad a possibly non-square matrix to make it square. :Parameters: matrix : list of lists matrix to pad pad_value : int value to use to pad the matrix :rtype: list of lists :return: a new, possibly padded, matrix """
max_columns = 0 total_rows = len(matrix) for row in matrix: max_columns = max(max_columns, len(row)) total_rows = max(max_columns, total_rows) new_matrix = [] for row in matrix: row_len = len(row) new_row = row[:] if total_rows > row_len: # Row too short. Pad it. new_row += [pad_value] * (total_rows - row_len) new_matrix += [new_row] while len(new_matrix) < total_rows: new_matrix += [[pad_value] * total_rows] return new_matrix
<SYSTEM_TASK:> For each row of the matrix, find the smallest element and <END_TASK> <USER_TASK:> Description: def __step1(self): """ For each row of the matrix, find the smallest element and subtract it from every element in its row. Go to Step 2. """
C = self.C n = self.n for i in range(n): minval = min(self.C[i]) # Find the minimum value for this row and subtract that minimum # from every element in the row. for j in range(n): self.C[i][j] -= minval return 2
<SYSTEM_TASK:> Cover each column containing a starred zero. If K columns are <END_TASK> <USER_TASK:> Description: def __step3(self): """ Cover each column containing a starred zero. If K columns are covered, the starred zeros describe a complete set of unique assignments. In this case, Go to DONE, otherwise, Go to Step 4. """
n = self.n count = 0 for i in range(n): for j in range(n): if self.marked[i][j] == 1: self.col_covered[j] = True count += 1 if count >= n: step = 7 # done else: step = 4 return step
<SYSTEM_TASK:> Find a noncovered zero and prime it. If there is no starred zero <END_TASK> <USER_TASK:> Description: def __step4(self): """ Find a noncovered zero and prime it. If there is no starred zero in the row containing this primed zero, Go to Step 5. Otherwise, cover this row and uncover the column containing the starred zero. Continue in this manner until there are no uncovered zeros left. Save the smallest uncovered value and Go to Step 6. """
step = 0 done = False row = -1 col = -1 star_col = -1 while not done: (row, col) = self.__find_a_zero() if row < 0: done = True step = 6 else: self.marked[row][col] = 2 star_col = self.__find_star_in_row(row) if star_col >= 0: col = star_col self.row_covered[row] = True self.col_covered[col] = False else: done = True self.Z0_r = row self.Z0_c = col step = 5 return step
<SYSTEM_TASK:> Add the value found in Step 4 to every element of each covered <END_TASK> <USER_TASK:> Description: def __step6(self): """ Add the value found in Step 4 to every element of each covered row, and subtract it from every element of each uncovered column. Return to Step 4 without altering any stars, primes, or covered lines. """
minval = self.__find_smallest() for i in range(self.n): for j in range(self.n): if self.row_covered[i]: self.C[i][j] += minval if not self.col_covered[j]: self.C[i][j] -= minval return 4
<SYSTEM_TASK:> Find the smallest uncovered value in the matrix. <END_TASK> <USER_TASK:> Description: def __find_smallest(self): """Find the smallest uncovered value in the matrix."""
minval = sys.maxsize for i in range(self.n): for j in range(self.n): if (not self.row_covered[i]) and (not self.col_covered[j]): if minval > self.C[i][j]: minval = self.C[i][j] return minval
<SYSTEM_TASK:> Find the first uncovered element with value 0 <END_TASK> <USER_TASK:> Description: def __find_a_zero(self): """Find the first uncovered element with value 0"""
row = -1 col = -1 i = 0 n = self.n done = False while not done: j = 0 while True: if (self.C[i][j] == 0) and \ (not self.row_covered[i]) and \ (not self.col_covered[j]): row = i col = j done = True j += 1 if j >= n: break i += 1 if i >= n: done = True return (row, col)