docstring
stringlengths 52
499
| function
stringlengths 67
35.2k
| __index_level_0__
int64 52.6k
1.16M
|
---|---|---|
log function
Args:
msg: the text message to be logged
|
def log(self, msg):
time = self.get_time()
msg = "{:s}\t {:s}".format(time, msg)
self.history.append(msg)
self.history_model.insertRow(0, QtGui.QStandardItem(msg))
| 850,359 |
Calls the plot function of the script, and redraws both plots
Args:
script: script to be plotted
|
def plot_script(self, script):
script.plot([self.matplotlibwidget_1.figure, self.matplotlibwidget_2.figure])
self.matplotlibwidget_1.draw()
self.matplotlibwidget_2.draw()
| 850,364 |
waits for a signal emitted from a thread and updates the gui
Args:
progress:
Returns:
|
def update_status(self, progress):
# interval at which the gui will be updated, if requests come in faster than they will be ignored
update_interval = 0.2
now = datetime.datetime.now()
if not self._last_progress_update is None and now-self._last_progress_update < datetime.timedelta(seconds=update_interval):
return
self._last_progress_update = now
self.progressBar.setValue(progress)
script = self.current_script
# Estimate remaining time if progress has been made
if progress:
remaining_time = str(datetime.timedelta(seconds=script.remaining_time.seconds))
self.lbl_time_estimate.setText('time remaining: {:s}'.format(remaining_time))
if script is not str(self.tabWidget.tabText(self.tabWidget.currentIndex())).lower() in ['scripts', 'instruments']:
self.plot_script(script)
| 850,365 |
checks the plottype of the script and plots it accordingly
Args:
script: script to be plotted
|
def plot_script_validate(self, script):
script.plot_validate([self.matplotlibwidget_1.figure, self.matplotlibwidget_2.figure])
self.matplotlibwidget_1.draw()
self.matplotlibwidget_2.draw()
| 850,367 |
updates the script based on the information provided in item
Args:
script: script to be updated
item: B26QTreeItem that contains the new settings of the script
|
def update_script_from_item(self, item):
script, path_to_script, script_item = item.get_script()
# build dictionary
# get full information from script
dictator = list(script_item.to_dict().values())[0] # there is only one item in the dictionary
for instrument in list(script.instruments.keys()):
# update instrument
script.instruments[instrument]['settings'] = dictator[instrument]['settings']
# remove instrument
del dictator[instrument]
for sub_script_name in list(script.scripts.keys()):
sub_script_item = script_item.get_subscript(sub_script_name)
self.update_script_from_item(sub_script_item)
del dictator[sub_script_name]
script.update(dictator)
# update datefolder path
script.data_path = self.gui_settings['data_folder']
| 850,369 |
fills a QTreeWidget with nested parameters, in future replace QTreeWidget with QTreeView and call fill_treeview
Args:
tree: QtWidgets.QTreeWidget
parameters: dictionary or Parameter object
show_all: boolean if true show all parameters, if false only selected ones
Returns:
|
def fill_treewidget(self, tree, parameters):
tree.clear()
assert isinstance(parameters, (dict, Parameter))
for key, value in parameters.items():
if isinstance(value, Parameter):
B26QTreeItem(tree, key, value, parameters.valid_values[key], parameters.info[key])
else:
B26QTreeItem(tree, key, value, type(value), '')
| 850,370 |
fills a treeview with nested parameters
Args:
tree: QtWidgets.QTreeView
parameters: dictionary or Parameter object
Returns:
|
def fill_treeview(self, tree, input_dict):
tree.model().removeRows(0, tree.model().rowCount())
def add_element(item, key, value):
child_name = QtWidgets.QStandardItem(key)
if isinstance(value, dict):
for key_child, value_child in value.items():
add_element(child_name, key_child, value_child)
item.appendRow(child_name)
else:
child_value = QtWidgets.QStandardItem(str(value))
item.appendRow([child_name, child_value])
for index, (key, value) in enumerate(input_dict.items()):
if isinstance(value, dict):
item = QtWidgets.QStandardItem(key)
for sub_key, sub_value in value.items():
add_element(item, sub_key, sub_value)
tree.model().appendRow(item)
elif isinstance(value, str):
item = QtGui.QStandardItem(key)
item_value = QtGui.QStandardItem(value)
item_value.setEditable(True)
item_value.setSelectable(True)
tree.model().appendRow([item, item_value])
| 850,371 |
refresh trees with current settings
Args:
tree: a QtWidgets.QTreeWidget object or a QtWidgets.QTreeView object
items: dictionary or Parameter items with which to populate the tree
show_all: boolean if true show all parameters, if false only selected ones
|
def refresh_tree(self, tree, items):
if tree == self.tree_scripts or tree == self.tree_settings:
tree.itemChanged.disconnect()
self.fill_treewidget(tree, items)
tree.itemChanged.connect(lambda: self.update_parameters(tree))
elif tree == self.tree_gui_settings:
self.fill_treeview(tree, items)
| 850,373 |
fills the tree with data sets where datasets is a dictionary of the form
Args:
tree:
data_sets: a dataset
Returns:
|
def fill_dataset_tree(self, tree, data_sets):
tree.model().removeRows(0, tree.model().rowCount())
for index, (time, script) in enumerate(data_sets.items()):
name = script.settings['tag']
type = script.name
item_time = QtGui.QStandardItem(str(time))
item_name = QtGui.QStandardItem(str(name))
item_type = QtGui.QStandardItem(str(type))
item_time.setSelectable(False)
item_time.setEditable(False)
item_type.setSelectable(False)
item_type.setEditable(False)
tree.model().appendRow([item_time, item_name, item_type])
| 850,374 |
checks if the file is a valid config file
Args:
filepath:
|
def load_config(self, filepath=None):
# load config or default if invalid
def load_settings(filepath):
instruments_loaded = {}
probes_loaded = {}
scripts_loaded = {}
if filepath and os.path.isfile(filepath):
in_data = load_b26_file(filepath)
instruments = in_data['instruments'] if 'instruments' in in_data else {}
scripts = in_data['scripts'] if 'scripts' in in_data else {}
probes = in_data['probes'] if 'probes' in in_data else {}
try:
instruments_loaded, failed = Instrument.load_and_append(instruments)
if len(failed) > 0:
print(('WARNING! Following instruments could not be loaded: ', failed))
scripts_loaded, failed, instruments_loaded = Script.load_and_append(
script_dict=scripts,
instruments=instruments_loaded,
log_function=self.log,
data_path=self.gui_settings['data_folder'])
if len(failed) > 0:
print(('WARNING! Following scripts could not be loaded: ', failed))
probes_loaded, failed, instruments_loadeds = Probe.load_and_append(
probe_dict=probes,
probes=probes_loaded,
instruments=instruments_loaded)
self.log('Successfully loaded from previous save.')
except ImportError:
self.log('Could not load instruments or scripts from file.')
self.log('Opening with blank GUI.')
return instruments_loaded, scripts_loaded, probes_loaded
config = None
try:
config = load_b26_file(filepath)
config_settings = config['gui_settings']
if config_settings['gui_settings'] != filepath:
print((
'WARNING path to settings file ({:s}) in config file is different from path of settings file ({:s})'.format(
config_settings['gui_settings'], filepath)))
config_settings['gui_settings'] = filepath
except Exception as e:
if filepath:
self.log('The filepath was invalid --- could not load settings. Loading blank GUI.')
config_settings = self._DEFAULT_CONFIG
for x in self._DEFAULT_CONFIG.keys():
if x in config_settings:
if not os.path.exists(config_settings[x]):
try:
os.makedirs(config_settings[x])
except Exception:
config_settings[x] = self._DEFAULT_CONFIG[x]
os.makedirs(config_settings[x])
print(('WARNING: failed validating or creating path: set to default path'.format(config_settings[x])))
else:
config_settings[x] = self._DEFAULT_CONFIG[x]
os.makedirs(config_settings[x])
print(('WARNING: path {:s} not specified set to default {:s}'.format(x, config_settings[x])))
# check if file_name is a valid filename
if filepath is not None and os.path.exists(os.path.dirname(filepath)):
config_settings['gui_settings'] = filepath
self.gui_settings = config_settings
if(config):
self.gui_settings_hidden = config['gui_settings_hidden']
else:
self.gui_settings_hidden['script_source_folder'] = ''
self.instruments, self.scripts, self.probes = load_settings(filepath)
self.refresh_tree(self.tree_gui_settings, self.gui_settings)
self.refresh_tree(self.tree_scripts, self.scripts)
self.refresh_tree(self.tree_settings, self.instruments)
self._hide_parameters(filepath)
| 850,375 |
hide the parameters that had been hidden
Args:
file_name: config file that has the information about which parameters are hidden
|
def _hide_parameters(self, file_name):
try:
in_data = load_b26_file(file_name)
except:
in_data = {}
def set_item_visible(item, is_visible):
if isinstance(is_visible, dict):
for child_id in range(item.childCount()):
child = item.child(child_id)
if child.name in is_visible:
set_item_visible(child, is_visible[child.name])
else:
item.visible = is_visible
if "scripts_hidden_parameters" in in_data:
# consistency check
if len(list(in_data["scripts_hidden_parameters"].keys())) == self.tree_scripts.topLevelItemCount():
for index in range(self.tree_scripts.topLevelItemCount()):
item = self.tree_scripts.topLevelItem(index)
# if item.name in in_data["scripts_hidden_parameters"]:
set_item_visible(item, in_data["scripts_hidden_parameters"][item.name])
else:
print('WARNING: settings for hiding parameters does\'t seem to match other settings')
| 850,376 |
saves gui configuration to out_file_name
Args:
filepath: name of file
|
def save_config(self, filepath):
def get_hidden_parameter(item):
num_sub_elements = item.childCount()
if num_sub_elements == 0:
dictator = {item.name : item.visible}
else:
dictator = {item.name:{}}
for child_id in range(num_sub_elements):
dictator[item.name].update(get_hidden_parameter(item.child(child_id)))
return dictator
try:
filepath = str(filepath)
if not os.path.exists(os.path.dirname(filepath)):
os.makedirs(os.path.dirname(filepath))
# build a dictionary for the configuration of the hidden parameters
dictator = {}
for index in range(self.tree_scripts.topLevelItemCount()):
script_item = self.tree_scripts.topLevelItem(index)
dictator.update(get_hidden_parameter(script_item))
dictator = {"gui_settings": self.gui_settings, "gui_settings_hidden": self.gui_settings_hidden, "scripts_hidden_parameters":dictator}
# update the internal dictionaries from the trees in the gui
for index in range(self.tree_scripts.topLevelItemCount()):
script_item = self.tree_scripts.topLevelItem(index)
self.update_script_from_item(script_item)
dictator.update({'instruments': {}, 'scripts': {}, 'probes': {}})
for instrument in self.instruments.values():
dictator['instruments'].update(instrument.to_dict())
for script in self.scripts.values():
dictator['scripts'].update(script.to_dict())
for instrument, probe_dict in self.probes.items():
dictator['probes'].update({instrument: ','.join(list(probe_dict.keys()))})
with open(filepath, 'w') as outfile:
json.dump(dictator, outfile, indent=4)
save_config_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'save_config.json'))
if os.path.isfile(save_config_path) and os.access(save_config_path, os.R_OK):
with open(save_config_path, 'w') as outfile:
json.dump({'last_save_path': filepath}, outfile, indent=4)
else:
with io.open(save_config_path, 'w') as save_config_file:
save_config_file.write(json.dumps({'last_save_path': filepath}))
self.log('Saved GUI configuration (location: {0}'.format(filepath))
except Exception:
msg = QtWidgets.QMessageBox()
msg.setText("Saving failed. Please use 'save as' to define a valid path for the gui.")
msg.exec_()
| 850,377 |
saves current dataset to out_file_name
Args:
out_file_name: name of file
|
def save_dataset(self, out_file_name):
for time_tag, script in self.data_sets.items():
script.save(os.path.join(out_file_name, '{:s}.b26s'.format(time_tag)))
| 850,378 |
If there is not currently a selected NV within self.settings[patch_size] of pt, adds it to the selected list. If
there is, removes that point from the selected list.
Args:
pt: the point to add or remove from the selected list
Poststate: updates selected list
|
def toggle_NV(self, pt):
if not self.data['nv_locations']: #if self.data is empty so this is the first point
self.data['nv_locations'].append(pt)
self.data['image_data'] = None # clear image data
else:
# use KDTree to find NV closest to mouse click
tree = scipy.spatial.KDTree(self.data['nv_locations'])
#does a search with k=1, that is a search for the nearest neighbor, within distance_upper_bound
d, i = tree.query(pt,k = 1, distance_upper_bound = self.settings['patch_size'])
# removes NV if previously selected
if d is not np.inf:
self.data['nv_locations'].pop(i)
# adds NV if not previously selected
else:
self.data['nv_locations'].append(pt)
# randomize
if self.settings['randomize']:
self.log('warning! randomize not avalable when manually selecting points')
# if type is not free we calculate the total points of locations from the first selected points
if self.settings['type'] == 'square' and len(self.data['nv_locations'])>1:
# here we create a rectangular grid, where pts a and be define the top left and bottom right corner of the rectangle
Nx, Ny = self.settings['Nx'], self.settings['Ny']
pta = self.data['nv_locations'][0]
ptb = self.data['nv_locations'][1]
tmp = np.array([[[pta[0] + 1.0*i*(ptb[0]-pta[0])/(Nx-1), pta[1] + 1.0*j*(ptb[1]-pta[1])/(Ny-1)] for i in range(Nx)] for j in range(Ny)])
nv_pts = np.reshape(tmp, (Nx * Ny, 2))
# randomize
if self.settings['randomize']:
random.shuffle(nv_pts) # shuffles in place
self.data['nv_locations'] = nv_pts
self.stop()
elif self.settings['type'] == 'line' and len(self.data['nv_locations'])>1:
# here we create a straight line between points a and b
N = self.settings['Nx']
pta = self.data['nv_locations'][0]
ptb = self.data['nv_locations'][1]
nv_pts = [np.array([pta[0] + 1.0*i*(ptb[0]-pta[0])/(N-1), pta[1] + 1.0*i*(ptb[1]-pta[1])/(N-1)]) for i in range(N)]
# randomize
if self.settings['randomize']:
random.shuffle(nv_pts) # shuffles in place
self.data['nv_locations'] = nv_pts
self.stop()
elif self.settings['type'] == 'ring' and len(self.data['nv_locations'])>1:
# here we create a circular grid, where pts a and be define the center and the outermost ring
Nx, Ny = self.settings['Nx'], self.settings['Ny']
pt_center = self.data['nv_locations'][0] # center
pt_outer = self.data['nv_locations'][1] # outermost ring
# radius of outermost ring:
rmax = np.sqrt((pt_center[0] - pt_outer[0]) ** 2 + (pt_center[1] - pt_outer[1]) ** 2)
# angles
angles = np.linspace(0, 2 * np.pi, Nx+1)[0:-1]
# create points on rings
nv_pts = []
for r in np.linspace(rmax, 0, Ny + 1)[0:-1]:
for theta in angles:
nv_pts += [[r * np.sin(theta)+pt_center[0], r * np.cos(theta)+pt_center[1]]]
# randomize
if self.settings['randomize']:
coarray = list(zip(nv_pts, angles))
random.shuffle(coarray) # shuffles in place
nv_pts, angles = zip(*coarray)
self.data['nv_locations'] = np.array(nv_pts)
self.data['angles'] = np.array(angles)* 180 / np.pi
self.data['ring_data'] = [pt_center, pt_outer]
self.stop()
elif self.settings['type'] == 'arc' and len(self.data['nv_locations']) > 3:
# here we create a circular grid, where pts a and be define the center and the outermost ring
Nx, Ny = self.settings['Nx'], self.settings['Ny']
pt_center = self.data['nv_locations'][0] # center
pt_start = self.data['nv_locations'][1] # arc point one (radius)
pt_dir = self.data['nv_locations'][2] # arc point two (direction)
pt_end = self.data['nv_locations'][3] # arc point three (angle)
# radius of outermost ring:
rmax = np.sqrt((pt_center[0] - pt_start[0]) ** 2 + (pt_center[1] - pt_start[1]) ** 2)
angle_start = np.arctan((pt_start[1] - pt_center[1]) / (pt_start[0] - pt_center[0]))
# arctan always returns between -pi/2 and pi/2, so adjust to allow full range of angles
if ((pt_start[0] - pt_center[0]) < 0):
angle_start += np.pi
angle_end = np.arctan((pt_end[1] - pt_center[1]) / (pt_end[0] - pt_center[0]))
# arctan always returns between -pi/2 and pi/2, so adjust to allow full range of angles
if ((pt_end[0] - pt_center[0]) < 0):
angle_end += np.pi
if pt_dir[0] < pt_start[0]:
# counter-clockwise: invert the order of the angles
angle_start, angle_end = angle_end, angle_start
if angle_start > angle_end:
# make sure that start is the smaller
# (e.g. angle_start= 180 deg and angle_end =10, we want to got from 180 to 370 deg)
angle_end += 2 * np.pi
# create points on arcs
nv_pts = []
for r in np.linspace(rmax, 0, Ny + 1)[0:-1]:
for theta in np.linspace(angle_start, angle_end, Nx, endpoint=True):
nv_pts += [[r * np.cos(theta) + pt_center[0], r * np.sin(theta) + pt_center[1]]]
# randomize
if self.settings['randomize']:
coarray = list(zip(nv_pts, np.linspace(angle_start, angle_end, Nx, endpoint=True)))
random.shuffle(coarray) # shuffles in place
nv_pts, angles = zip(*coarray)
else:
angles = np.linspace(angle_start, angle_end, Nx, endpoint=True)
self.data['nv_locations'] = np.array(nv_pts)
self.data['arc_data'] = [pt_center, pt_start, pt_end]
self.data['angles'] = np.array(angles) * 180 / np.pi
self.stop()
| 850,479 |
Compress dinf flow direction to D8 direction with weight follows ArcGIS D8 codes.
Args:
angle: D-inf flow direction angle
nodata: NoData value
Returns:
1. Updated Dinf values
2. Compressed flow direction follows ArcGIS D8 codes rule
3. Weight of the first direction
|
def compress_dinf(angle, nodata):
if MathClass.floatequal(angle, nodata):
return DEFAULT_NODATA, DEFAULT_NODATA, DEFAULT_NODATA
taud, d = DinfUtil.check_orthogonal(angle)
if d != -1:
return taud, d, 1
if angle < FlowModelConst.ne:
a1 = angle
d = 129 # 1+128
elif angle < FlowModelConst.n:
a1 = angle - FlowModelConst.ne
d = 192 # 128+64
elif angle < FlowModelConst.nw:
a1 = angle - FlowModelConst.n
d = 96 # 64+32
elif angle < FlowModelConst.w:
a1 = angle - FlowModelConst.nw
d = 48 # 32+16
elif angle < FlowModelConst.sw:
a1 = angle - FlowModelConst.w
d = 24 # 16+8
elif angle < FlowModelConst.s:
a1 = angle - FlowModelConst.sw
d = 12 # 8+4
elif angle < FlowModelConst.se:
a1 = angle - FlowModelConst.s
d = 6 # 4+2
else:
a1 = angle - FlowModelConst.se
d = 3 # 2+1
return angle, d, a1 / PI * 4.0
| 850,573 |
Output compressed Dinf flow direction and weight to raster file
Args:
dinfflowang: Dinf flow direction raster file
compdinffile: Compressed D8 flow code
weightfile: The correspond weight
|
def output_compressed_dinf(dinfflowang, compdinffile, weightfile):
dinf_r = RasterUtilClass.read_raster(dinfflowang)
data = dinf_r.data
xsize = dinf_r.nCols
ysize = dinf_r.nRows
nodata_value = dinf_r.noDataValue
cal_dir_code = frompyfunc(DinfUtil.compress_dinf, 2, 3)
updated_angle, dir_code, weight = cal_dir_code(data, nodata_value)
RasterUtilClass.write_gtiff_file(dinfflowang, ysize, xsize, updated_angle,
dinf_r.geotrans, dinf_r.srs, DEFAULT_NODATA, GDT_Float32)
RasterUtilClass.write_gtiff_file(compdinffile, ysize, xsize, dir_code,
dinf_r.geotrans, dinf_r.srs, DEFAULT_NODATA, GDT_Int16)
RasterUtilClass.write_gtiff_file(weightfile, ysize, xsize, weight,
dinf_r.geotrans, dinf_r.srs, DEFAULT_NODATA, GDT_Float32)
| 850,574 |
Get the downslope directions of an dinf direction value
Args:
a: Dinf value
Returns:
downslope directions
|
def dinf_downslope_direction(a):
taud, d = DinfUtil.check_orthogonal(a)
if d != -1:
down = [d]
return down
else:
if a < FlowModelConst.ne: # 129 = 1+128
down = [1, 2]
elif a < FlowModelConst.n: # 192 = 128+64
down = [2, 3]
elif a < FlowModelConst.nw: # 96 = 64+32
down = [3, 4]
elif a < FlowModelConst.w: # 48 = 32+16
down = [4, 5]
elif a < FlowModelConst.sw: # 24 = 16+8
down = [5, 6]
elif a < FlowModelConst.s: # 12 = 8+4
down = [6, 7]
elif a < FlowModelConst.se: # 6 = 4+2
down = [7, 8]
else: # 3 = 2+1
down = [8, 1]
return down
| 850,575 |
find downslope coordinate for Dinf of TauDEM
Args:
dinfdir_value: dinf direction value
i: current row
j: current col
Returns:
downstream (row, col)s
|
def downstream_index_dinf(dinfdir_value, i, j):
down_dirs = DinfUtil.dinf_downslope_direction(dinfdir_value)
down_coors = []
for dir_code in down_dirs:
row, col = D8Util.downstream_index(dir_code, i, j)
down_coors.append([row, col])
return down_coors
| 850,576 |
Eliminate reach with zero length and return the reach ID map.
Args:
streamnet_file: original stream net ESRI shapefile
output_reach_file: serialized stream net, ESRI shapefile
Returns:
id pairs {origin: newly assigned}
|
def serialize_streamnet(streamnet_file, output_reach_file):
FileClass.copy_files(streamnet_file, output_reach_file)
ds_reach = ogr_Open(output_reach_file, update=True)
layer_reach = ds_reach.GetLayer(0)
layer_def = layer_reach.GetLayerDefn()
i_link = layer_def.GetFieldIndex(FLD_LINKNO)
i_link_downslope = layer_def.GetFieldIndex(FLD_DSLINKNO)
i_len = layer_def.GetFieldIndex(REACH_LENGTH)
old_id_list = []
# there are some reaches with zero length.
# this program will remove these zero-length reaches
# output_dic is used to store the downstream reaches of these zero-length
# reaches
output_dic = {}
ft = layer_reach.GetNextFeature()
while ft is not None:
link_id = ft.GetFieldAsInteger(i_link)
reach_len = ft.GetFieldAsDouble(i_len)
if link_id not in old_id_list:
if reach_len < DELTA:
downstream_id = ft.GetFieldAsInteger(i_link_downslope)
output_dic[link_id] = downstream_id
else:
old_id_list.append(link_id)
ft = layer_reach.GetNextFeature()
old_id_list.sort()
id_map = {}
for i, old_id in enumerate(old_id_list):
id_map[old_id] = i + 1
# print(id_map)
# change old ID to new ID
layer_reach.ResetReading()
ft = layer_reach.GetNextFeature()
while ft is not None:
link_id = ft.GetFieldAsInteger(i_link)
if link_id not in id_map:
layer_reach.DeleteFeature(ft.GetFID())
ft = layer_reach.GetNextFeature()
continue
ds_id = ft.GetFieldAsInteger(i_link_downslope)
ds_id = output_dic.get(ds_id, ds_id)
ds_id = output_dic.get(ds_id, ds_id)
ft.SetField(FLD_LINKNO, id_map[link_id])
if ds_id in id_map:
ft.SetField(FLD_DSLINKNO, id_map[ds_id])
else:
# print(ds_id)
ft.SetField(FLD_DSLINKNO, -1)
layer_reach.SetFeature(ft)
ft = layer_reach.GetNextFeature()
ds_reach.ExecuteSQL(str('REPACK reach'))
layer_reach.SyncToDisk()
ds_reach.Destroy()
del ds_reach
return id_map
| 850,577 |
Assign stream link ID according to subbasin ID.
Args:
stream_file: input stream raster file
subbasin_file: subbasin raster file
out_stream_file: output stream raster file
|
def assign_stream_id_raster(stream_file, subbasin_file, out_stream_file):
stream_raster = RasterUtilClass.read_raster(stream_file)
stream_data = stream_raster.data
nrows = stream_raster.nRows
ncols = stream_raster.nCols
nodata = stream_raster.noDataValue
subbain_data = RasterUtilClass.read_raster(subbasin_file).data
nodata_array = ones((nrows, ncols)) * DEFAULT_NODATA
newstream_data = where((stream_data > 0) & (stream_data != nodata),
subbain_data, nodata_array)
RasterUtilClass.write_gtiff_file(out_stream_file, nrows, ncols, newstream_data,
stream_raster.geotrans, stream_raster.srs,
DEFAULT_NODATA, GDT_Int16)
| 850,578 |
updates the internal dictionary and sends changed values to instrument
Args:
settings: parameters to be set
# mabe in the future:
# Returns: boolean that is true if update successful
|
def update(self, settings):
Instrument.update(self, settings)
for key, value in settings.items():
if key == 'test1':
self._internal_state = value
| 850,580 |
requestes value from the instrument and returns it
Args:
key: name of requested value
Returns: reads values from instrument
|
def read_probes(self, key):
assert key in list(self._PROBES.keys())
import random
if key == 'value1':
value = random.random()
elif key == 'value2':
value = self.settings['output probe2']
elif key == 'internal':
value = self._internal_state
elif key == 'deep_internal':
value = self._internal_state_deep
return value
| 850,581 |
requestes value from the instrument and returns it
Args:
key: name of requested value
Returns: reads values from instrument
|
def read_probes(self, key):
assert key in list(self._PROBES.keys())
if key == 'output':
value = self._output
return value
| 850,586 |
if value is valid sets the data to value
Args:
column: column of item
role: role of item (see Qt doc)
value: value to be set
|
def setData(self, column, role, value):
assert isinstance(column, int)
assert isinstance(role, int)
# make sure that the right row is selected, this is not always the case for checkboxes and
# combo boxes because they are items on top of the tree structure
if isinstance(value, (QtWidgets.QComboBox, QtWidgets.QCheckBox)):
self.treeWidget().setCurrentItem(self)
# if row 2 (editrole, value has been entered)
if role == 2 and column == 1:
if isinstance(value, str):
value = self.cast_type(value) # cast into same type as valid values
if isinstance(value, QtCore.QVariant):
value = self.cast_type(value.toString()) # cast into same type as valid values
if isinstance(value, QtWidgets.QComboBox):
value = self.cast_type(value.currentText())
if isinstance(value, QtWidgets.QCheckBox):
value = bool(int(value.checkState())) # checkState() gives 2 (True) and 0 (False)
# save value in internal variable
self.value = value
elif column == 0:
# labels should not be changed so we set it back
value = self.name
if value is None:
value = self.value
# 180327(asafira) --- why do we need to do the following lines? Why not just always call super or always
# emitDataChanged()?
if not isinstance(value, bool):
super(B26QTreeItem, self).setData(column, role, value)
else:
self.emitDataChanged()
| 850,612 |
cast the value into the type typ
if type is not provided it is set to self.valid_values
Args:
var: variable to be cast
type: target type
Returns: the variable var csat into type typ
|
def cast_type(self, var, cast_type=None):
if cast_type is None:
cast_type = self.valid_values
try:
if cast_type == int:
return int(var)
elif cast_type == float:
return float(var)
elif type == str:
return str(var)
elif isinstance(cast_type, list):
# cast var to be of the same type as those in the list
return type(cast_type[0])(var)
else:
return None
except ValueError:
return None
return var
| 850,613 |
finds the item that contains the sub_script with name sub_script_name
Args:
sub_script_name: name of subscript
Returns: B26QTreeItem in QTreeWidget which is a script
|
def get_subscript(self, sub_script_name):
# get tree of item
tree = self.treeWidget()
items = tree.findItems(sub_script_name, QtCore.Qt.MatchExactly | QtCore.Qt.MatchRecursive)
if len(items) >= 1:
# identify correct script by checking that it is a sub_element of the current script
subscript_item = [sub_item for sub_item in items if isinstance(sub_item.value, Script)
and sub_item.parent() is self]
subscript_item = subscript_item[0]
else:
raise ValueError('several elements with name ' + sub_script_name)
return subscript_item
| 850,616 |
figures out if item is a point, that is if it has two subelements of type float
Args:
self:
Returns: if item is a point (True) or not (False)
|
def is_point(self):
if self.childCount() == 2:
if self.child(0).valid_values == float and self.child(1).valid_values == float:
return True
else:
return False
| 850,617 |
Get raster value by (row, col).
Args:
row: row number.
col: col number.
Returns:
raster value, None if the input are invalid.
|
def get_value_by_row_col(self, row, col):
if row < 0 or row >= self.nRows or col < 0 or col >= self.nCols:
raise ValueError("The row or col must be >=0 and less than "
"nRows (%d) or nCols (%d)!" % (self.nRows, self.nCols))
else:
value = self.data[int(round(row))][int(round(col))]
if value == self.noDataValue:
return None
else:
return value
| 850,697 |
Get raster value by xy coordinates.
Args:
x: X Coordinate.
y: Y Coordinate.
Returns:
raster value, None if the input are invalid.
|
def get_value_by_xy(self, x, y):
if x < self.xMin or x > self.xMax or y < self.yMin or y > self.yMax:
return None
# raise ValueError("The x or y value must be within the Min and Max!")
else:
row = self.nRows - int(numpy.ceil((y - self.yMin) / self.dx))
col = int(numpy.floor((x - self.xMin) / self.dx))
value = self.data[row][col]
if value == self.noDataValue:
return None
else:
return value
| 850,698 |
Get the coordinates of central grid.
Args:
row: row number, range from 0 to (nRows - 1).
col: col number, range from 0 to (nCols - 1).
Returns:
XY coordinates. If the row or col are invalid, raise ValueError.
|
def get_central_coors(self, row, col):
if row < 0 or row >= self.nRows or col < 0 or col >= self.nCols:
raise ValueError("The row (%d) or col (%d) must be >=0 and less than "
"nRows (%d) or nCols (%d)!" % (row, col, self.nRows, self.nCols))
else:
tmpx = self.xMin + (col + 0.5) * self.dx
tmpy = self.yMax - (row + 0.5) * self.dx
return tmpx, tmpy
| 850,699 |
Read raster by GDAL.
Args:
raster_file: raster file path.
Returns:
Raster object.
|
def read_raster(raster_file):
ds = gdal_Open(raster_file)
band = ds.GetRasterBand(1)
data = band.ReadAsArray()
xsize = band.XSize
ysize = band.YSize
nodata_value = band.GetNoDataValue()
geotrans = ds.GetGeoTransform()
dttype = band.DataType
srs = osr_SpatialReference()
srs.ImportFromWkt(ds.GetProjection())
# print(srs.ExportToProj4())
if nodata_value is None:
nodata_value = DEFAULT_NODATA
band = None
ds = None
return Raster(ysize, xsize, data, nodata_value, geotrans, srs, dttype)
| 850,700 |
Generate mask data from a given raster data.
Args:
rasterfile: raster file path.
outmaskfile: output mask file path.
Returns:
Raster object of mask data.
|
def get_mask_from_raster(rasterfile, outmaskfile, keep_nodata=False):
raster_r = RasterUtilClass.read_raster(rasterfile)
xsize = raster_r.nCols
ysize = raster_r.nRows
nodata_value = raster_r.noDataValue
srs = raster_r.srs
x_min = raster_r.xMin
y_max = raster_r.yMax
dx = raster_r.dx
data = raster_r.data
if not keep_nodata:
i_min = ysize - 1
i_max = 0
j_min = xsize - 1
j_max = 0
for i in range(ysize):
for j in range(xsize):
if abs(data[i][j] - nodata_value) > DELTA:
i_min = min(i, i_min)
i_max = max(i, i_max)
j_min = min(j, j_min)
j_max = max(j, j_max)
# print(i_min, i_max, j_min, j_max)
y_size_mask = i_max - i_min + 1
x_size_mask = j_max - j_min + 1
x_min_mask = x_min + j_min * dx
y_max_mask = y_max - i_min * dx
else:
y_size_mask = ysize
x_size_mask = xsize
x_min_mask = x_min
y_max_mask = y_max
i_min = 0
j_min = 0
print('%dx%d -> %dx%d' % (xsize, ysize, x_size_mask, y_size_mask))
mask = numpy.zeros((y_size_mask, x_size_mask))
for i in range(y_size_mask):
for j in range(x_size_mask):
if abs(data[i + i_min][j + j_min] - nodata_value) > DELTA:
mask[i][j] = 1
else:
mask[i][j] = DEFAULT_NODATA
mask_geotrans = [x_min_mask, dx, 0, y_max_mask, 0, -dx]
RasterUtilClass.write_gtiff_file(outmaskfile, y_size_mask, x_size_mask, mask,
mask_geotrans, srs, DEFAULT_NODATA, GDT_Int32)
return Raster(y_size_mask, x_size_mask, mask, DEFAULT_NODATA, mask_geotrans, srs)
| 850,701 |
Reclassify raster by given classifier dict.
Args:
srcfile: source raster file.
v_dict: classifier dict.
dstfile: destination file path.
gdaltype (:obj:`pygeoc.raster.GDALDataType`): GDT_Float32 as default.
|
def raster_reclassify(srcfile, v_dict, dstfile, gdaltype=GDT_Float32):
src_r = RasterUtilClass.read_raster(srcfile)
src_data = src_r.data
dst_data = numpy.copy(src_data)
if gdaltype == GDT_Float32 and src_r.dataType != GDT_Float32:
gdaltype = src_r.dataType
no_data = src_r.noDataValue
new_no_data = DEFAULT_NODATA
if gdaltype in [GDT_Unknown, GDT_Byte, GDT_UInt16, GDT_UInt32]:
new_no_data = 0
if not MathClass.floatequal(new_no_data, src_r.noDataValue):
if src_r.noDataValue not in v_dict:
v_dict[src_r.noDataValue] = new_no_data
no_data = new_no_data
for (k, v) in iteritems(v_dict):
dst_data[src_data == k] = v
RasterUtilClass.write_gtiff_file(dstfile, src_r.nRows, src_r.nCols, dst_data,
src_r.geotrans, src_r.srs, no_data, gdaltype)
| 850,702 |
Output Raster to GeoTiff format file.
Args:
f_name: output gtiff file name.
n_rows: Row count.
n_cols: Col count.
data: 2D array data.
geotransform: geographic transformation.
srs: coordinate system.
nodata_value: nodata value.
gdal_type (:obj:`pygeoc.raster.GDALDataType`): output raster data type,
GDT_Float32 as default.
|
def write_gtiff_file(f_name, n_rows, n_cols, data, geotransform, srs, nodata_value,
gdal_type=GDT_Float32):
UtilClass.mkdir(os.path.dirname(FileClass.get_file_fullpath(f_name)))
driver = gdal_GetDriverByName(str('GTiff'))
try:
ds = driver.Create(f_name, n_cols, n_rows, 1, gdal_type)
except Exception:
print('Cannot create output file %s' % f_name)
return
ds.SetGeoTransform(geotransform)
try:
ds.SetProjection(srs.ExportToWkt())
except AttributeError or Exception:
ds.SetProjection(srs)
ds.GetRasterBand(1).SetNoDataValue(nodata_value)
# if data contains numpy.nan, then replaced by nodata_value
if isinstance(data, numpy.ndarray) and data.dtype in [numpy.dtype('int'),
numpy.dtype('float')]:
data = numpy.where(numpy.isnan(data), nodata_value, data)
ds.GetRasterBand(1).WriteArray(data)
ds = None
| 850,703 |
Output Raster to ASCII file.
Args:
filename: output ASCII filename.
data: 2D array data.
xsize: Col count.
ysize: Row count.
geotransform: geographic transformation.
nodata_value: nodata_flow value.
|
def write_asc_file(filename, data, xsize, ysize, geotransform, nodata_value):
UtilClass.mkdir(os.path.dirname(FileClass.get_file_fullpath(filename)))
header = 'NCOLS %d\n' \
'NROWS %d\n' \
'XLLCENTER %f\n' \
'YLLCENTER %f\n' \
'CELLSIZE %f\n' \
'NODATA_VALUE %f' % (xsize, ysize, geotransform[0] + 0.5 * geotransform[1],
geotransform[3] - (ysize - 0.5) * geotransform[1],
geotransform[1], nodata_value)
with open(filename, 'w', encoding='utf-8') as f:
f.write(header)
for i in range(0, ysize):
for j in range(0, xsize):
f.write('%s\t' % repr(data[i][j]))
f.write('\n')
f.close()
| 850,704 |
Converting Raster format to GeoTIFF.
Args:
tif: source raster file path.
geotif: output raster file path.
change_nodata: change NoDataValue to -9999 or not.
gdal_type (:obj:`pygeoc.raster.GDALDataType`): GDT_Float32 as default.
change_gdal_type: If True, output the Float32 data type.
|
def raster_to_gtiff(tif, geotif, change_nodata=False, change_gdal_type=False):
rst_file = RasterUtilClass.read_raster(tif)
nodata = rst_file.noDataValue
if change_nodata:
if not MathClass.floatequal(rst_file.noDataValue, DEFAULT_NODATA):
nodata = DEFAULT_NODATA
rst_file.data[rst_file.data == rst_file.noDataValue] = DEFAULT_NODATA
gdal_type = rst_file.dataType
if change_gdal_type:
gdal_type = GDT_Float32
RasterUtilClass.write_gtiff_file(geotif, rst_file.nRows, rst_file.nCols, rst_file.data,
rst_file.geotrans, rst_file.srs, nodata,
gdal_type)
| 850,705 |
Converting Raster format to ASCII raster.
Args:
raster_f: raster file.
asc_f: output ASCII file.
|
def raster_to_asc(raster_f, asc_f):
raster_r = RasterUtilClass.read_raster(raster_f)
RasterUtilClass.write_asc_file(asc_f, raster_r.data, raster_r.nCols, raster_r.nRows,
raster_r.geotrans, raster_r.noDataValue)
| 850,706 |
Get basic statistics of raster data.
Args:
raster_file: raster file path.
Returns:
min, max, mean, std.
|
def raster_statistics(raster_file):
ds = gdal_Open(raster_file)
band = ds.GetRasterBand(1)
minv, maxv, meanv, std = band.ComputeStatistics(False)
return minv, maxv, meanv, std
| 850,707 |
Split raster by given shapefile and field name.
Args:
rs: origin raster file.
split_shp: boundary (ESRI Shapefile) used to spilt raster.
field_name: field name identify the spilt value.
temp_dir: directory to store the spilt rasters.
|
def split_raster(rs, split_shp, field_name, temp_dir):
UtilClass.rmmkdir(temp_dir)
ds = ogr_Open(split_shp)
lyr = ds.GetLayer(0)
lyr.ResetReading()
ft = lyr.GetNextFeature()
while ft:
cur_field_name = ft.GetFieldAsString(field_name)
for r in rs:
cur_file_name = r.split(os.sep)[-1]
outraster = temp_dir + os.sep + \
cur_file_name.replace('.tif', '_%s.tif' %
cur_field_name.replace(' ', '_'))
subprocess.call(['gdalwarp', r, outraster, '-cutline', split_shp,
'-crop_to_cutline', '-cwhere',
"'%s'='%s'" % (field_name, cur_field_name), '-dstnodata',
'-9999'])
ft = lyr.GetNextFeature()
ds = None
| 850,708 |
Mask raster data.
Args:
in_raster: list or one raster
mask: Mask raster data
out_raster: list or one raster
|
def mask_raster(in_raster, mask, out_raster):
if is_string(in_raster) and is_string(out_raster):
in_raster = [str(in_raster)]
out_raster = [str(out_raster)]
if len(in_raster) != len(out_raster):
raise RuntimeError('input raster and output raster must have the same size.')
maskr = RasterUtilClass.read_raster(mask)
rows = maskr.nRows
cols = maskr.nCols
maskdata = maskr.data
temp = maskdata == maskr.noDataValue
for inr, outr in zip(in_raster, out_raster):
origin = RasterUtilClass.read_raster(inr)
if origin.nRows == rows and origin.nCols == cols:
masked = numpy.where(temp, origin.noDataValue, origin.data)
else:
masked = numpy.ones((rows, cols)) * origin.noDataValue
# TODO, the following loop should be optimized by numpy or numba
for i in range(rows):
for j in range(cols):
if maskdata[i][j] == maskr.noDataValue:
continue
# get the center point coordinate of current cell
tempx, tempy = maskr.get_central_coors(i, j)
tempv = origin.get_value_by_xy(tempx, tempy)
if tempv is None:
continue
masked[i][j] = tempv
RasterUtilClass.write_gtiff_file(outr, maskr.nRows, maskr.nCols, masked,
maskr.geotrans, maskr.srs,
origin.noDataValue, origin.dataType)
| 850,710 |
Make the raster into binarization.
The opening and closing are based on binary image. Therefore we need to
make the raster into binarization.
Args:
given_value: The given value's pixels will be value in 1,
other pixels will be value in 0.
rasterfilename: The initial rasterfilena,e.
Returns:
binary_raster: Raster after binarization.
|
def raster_binarization(given_value, rasterfilename):
origin_raster = RasterUtilClass.read_raster(rasterfilename)
binary_raster = numpy.where(origin_raster.data == given_value, 1, 0)
return binary_raster
| 850,711 |
Erode the raster image.
Find the min pixel's value in 8-neighborhood. Then change the compute
pixel's value into the min pixel's value.
Args:
rasterfile: input original raster image, type can be filename(string,
like "test1.tif"), rasterfile(class Raster) or numpy.ndarray.
Returns:
erosion_raster: raster image after erosion, type is numpy.ndarray.
|
def raster_erosion(rasterfile):
if is_string(rasterfile):
origin_raster = RasterUtilClass.read_raster(str(rasterfile))
elif isinstance(rasterfile, Raster):
origin_raster = rasterfile.data
elif isinstance(rasterfile, numpy.ndarray):
origin_raster = rasterfile
else:
return "Your rasterfile has a wrong type. Type must be string or " \
"numpy.array or class Raster in pygeoc."
max_value_raster = origin_raster.max()
erosion_raster = numpy.zeros((origin_raster.shape[0], origin_raster.shape[1]))
# In order to compute the raster edges, we need to expand the original
# raster's rows and cols. We need to add the edges whose pixels' value is
# the max pixel's value in raster.
add_row = numpy.full((1, origin_raster.shape[1]), max_value_raster)
temp_origin_raster = numpy.vstack((numpy.vstack((add_row, origin_raster)), add_row))
add_col = numpy.full((origin_raster.shape[0] + 2, 1), max_value_raster)
expand_origin_raster = numpy.hstack((numpy.hstack((add_col, temp_origin_raster)), add_col))
# Erode the raster.
for i in range(origin_raster.shape[0]):
for j in range(origin_raster.shape[1]):
min_pixel_value = max_value_raster
# Find the min pixel value in the 8-neighborhood.
for k in range(3):
for l in range(3):
if expand_origin_raster[i + k, j + l] <= min_pixel_value:
min_pixel_value = expand_origin_raster[i + k, j + l]
# After this loop, we get the min pixel's value of the
# 8-neighborhood. Then we change the compute pixel's value into
# the min pixel's value.
erosion_raster[i, j] = min_pixel_value
# Return the result.
return erosion_raster
| 850,712 |
Dilate the raster image.
Find the max pixel's value in 8-neighborhood. Then change the compute
pixel's value into the max pixel's value.
Args:
rasterfile: input original raster image, type can be filename(string,
like "test1.tif"), rasterfile(class Raster) or numpy.ndarray.
Returns:
dilation_raster: raster image after dilation, type is numpy.ndarray.
|
def raster_dilation(rasterfile):
if is_string(rasterfile):
origin_raster = RasterUtilClass.read_raster(str(rasterfile))
elif isinstance(rasterfile, Raster):
origin_raster = rasterfile.data
elif isinstance(rasterfile, numpy.ndarray):
origin_raster = rasterfile
else:
return 'Your rasterfile has a wrong type. Type must be string or ' \
'numpy.array or class Raster in pygeoc.'
min_value_raster = origin_raster.min()
dilation_raster = numpy.zeros((origin_raster.shape[0], origin_raster.shape[1]))
# In order to compute the raster edges, we need to expand the original
# raster's rows and cols. We need to add the edges whose pixels' value is
# the max pixel's value in raster.
add_row = numpy.full((1, origin_raster.shape[1]), min_value_raster)
temp_origin_raster = numpy.vstack((numpy.vstack((add_row, origin_raster)), add_row))
add_col = numpy.full((origin_raster.shape[0] + 2, 1), min_value_raster)
expand_origin_raster = numpy.hstack((numpy.hstack((add_col, temp_origin_raster)), add_col))
# Dilate the raster.
for i in range(origin_raster.shape[0]):
for j in range(origin_raster.shape[1]):
max_pixel_value = min_value_raster
# Find the max pixel value in the 8-neighborhood.
for k in range(3):
for l in range(3):
if expand_origin_raster[i + k, j + l] >= max_pixel_value:
max_pixel_value = expand_origin_raster[i + k, j + l]
# After this loop, we get the max pixel's value of the
# 8-neighborhood. Then we change the compute pixel's value into
# the max pixel's value.
dilation_raster[i, j] = max_pixel_value
# Return the result.
return dilation_raster
| 850,713 |
Do openning.
Openning: Erode firstly, then Dilate.
Args:
input_rasterfilename: input original raster image filename.
times: Erode and Dilate times.
Returns:
openning_raster: raster image after open.
|
def openning(input_rasterfilename, times):
input_raster = RasterUtilClass.read_raster(input_rasterfilename)
openning_raster = input_raster
for i in range(times):
openning_raster = RasterUtilClass.raster_erosion(openning_raster)
for i in range(times):
openning_raster = RasterUtilClass.raster_dilation(openning_raster)
return openning_raster
| 850,714 |
Do closing.
Closing: Dilate firstly, then Erode.
Args:
input_rasterfilename: input original raster image filename.
times: Erode and Dilate times.
Returns:
closing_raster: raster image after close.
|
def closing(input_rasterfilename, times):
input_raster = RasterUtilClass.read_raster(input_rasterfilename)
closing_raster = input_raster
for i in range(times):
closing_raster = RasterUtilClass.raster_dilation(closing_raster)
for i in range(times):
closing_raster = RasterUtilClass.raster_erosion(closing_raster)
return closing_raster
| 850,715 |
When each subscript is called, uses its standard plotting
Args:
figure_list: list of figures passed from the guit
|
def plot(self, figure_list):
#TODO: be smarter about how we plot ScriptIterator
if self._current_subscript_stage is not None:
if self._current_subscript_stage['current_subscript'] is not None:
self._current_subscript_stage['current_subscript'].plot(figure_list)
if (self.is_running is False) and not (self.data == {} or self.data is None):
script_names = list(self.settings['script_order'].keys())
script_indices = [self.settings['script_order'][name] for name in script_names]
_, sorted_script_names = list(zip(*sorted(zip(script_indices, script_names))))
last_script = self.scripts[sorted_script_names[-1]]
last_script.force_update() # since we use the last script plot function we force it to refresh
axes_list = last_script.get_axes_layout(figure_list)
# catch error is _plot function doens't take optional data argument
try:
last_script._plot(axes_list, self.data)
except TypeError as err:
print((warnings.warn('can\'t plot average script data because script.plot function doens\'t take data as optional argument. Plotting last data set instead')))
print((err.message))
last_script.plot(figure_list)
| 850,760 |
assigning the actual script settings depending on the iterator type
this might be overwritten by classes that inherit form ScriptIterator
Args:
sub_scripts: dictionary with the subscripts
script_order: execution order of subscripts
script_execution_freq: execution frequency of subscripts
Returns:
the default setting for the iterator
|
def get_default_settings(sub_scripts, script_order, script_execution_freq, iterator_type):
def populate_sweep_param(scripts, parameter_list, trace=''):
def get_parameter_from_dict(trace, dic, parameter_list, valid_values=None):
if valid_values is None and isinstance(dic, Parameter):
valid_values = dic.valid_values
for key, value in dic.items():
if isinstance(value, dict): # for nested parameters ex {point: {'x': int, 'y': int}}
parameter_list = get_parameter_from_dict(trace + '.' + key, value, parameter_list,
dic.valid_values[key])
elif (valid_values[key] in (float, int)) or \
(isinstance(valid_values[key], list) and valid_values[key][0] in (float, int)):
parameter_list.append(trace + '.' + key)
else: # once down to the form {key: value}
# in all other cases ignore parameter
print(('ignoring sweep parameter', key))
return parameter_list
for script_name in list(scripts.keys()):
from pylabcontrol.core import ScriptIterator
script_trace = trace
if script_trace == '':
script_trace = script_name
else:
script_trace = script_trace + '->' + script_name
if issubclass(scripts[script_name], ScriptIterator): # gets subscripts of ScriptIterator objects
populate_sweep_param(vars(scripts[script_name])['_SCRIPTS'], parameter_list=parameter_list,
trace=script_trace)
else:
# use inspect instead of vars to get _DEFAULT_SETTINGS also for classes that inherit _DEFAULT_SETTINGS from a superclass
for setting in \
[elem[1] for elem in inspect.getmembers(scripts[script_name]) if elem[0] == '_DEFAULT_SETTINGS'][0]:
parameter_list = get_parameter_from_dict(script_trace, setting, parameter_list)
return parameter_list
if iterator_type == 'loop':
script_default_settings = [
Parameter('script_order', script_order),
Parameter('script_execution_freq', script_execution_freq),
Parameter('num_loops', 0, int, 'times the subscripts will be executed'),
Parameter('run_all_first', True, bool, 'Run all scripts with nonzero frequency in first pass')
]
elif iterator_type == 'sweep':
sweep_params = populate_sweep_param(sub_scripts, [])
script_default_settings = [
Parameter('script_order', script_order),
Parameter('script_execution_freq', script_execution_freq),
Parameter('sweep_param', sweep_params[0], sweep_params, 'variable over which to sweep'),
Parameter('sweep_range',
[Parameter('min_value', 0, float, 'min parameter value'),
Parameter('max_value', 0, float, 'max parameter value'),
Parameter('N/value_step', 0, float,
'either number of steps or parameter value step, depending on mode')]),
Parameter('stepping_mode', 'N', ['N', 'value_step'],
'Switch between number of steps and step amount'),
Parameter('run_all_first', True, bool, 'Run all scripts with nonzero frequency in first pass')
]
else:
print(('unknown iterator type ' + iterator_type))
raise TypeError('unknown iterator type ' + iterator_type)
return script_default_settings
| 850,761 |
Initializes the log file in the proper format.
Arguments:
filename (str): Path to a file. Or None if logging is to
be disabled.
loglevel (str): Determines the level of the log output.
|
def init_log(logger, filename=None, loglevel=None):
formatter = logging.Formatter(
'[%(asctime)s] %(levelname)s: %(name)s: %(message)s'
)
if loglevel:
logger.setLevel(getattr(logging, loglevel))
# We will allways print warnings and higher to stderr
ch = logging.StreamHandler()
ch.setLevel('WARNING')
ch.setFormatter(formatter)
if filename:
fi = logging.FileHandler(filename, encoding='utf-8')
if loglevel:
fi.setLevel(getattr(logging, loglevel))
fi.setFormatter(formatter)
logger.addHandler(fi)
# If no logfile is provided we print all log messages that the user has
# defined to stderr
else:
if loglevel:
ch.setLevel(getattr(logging, loglevel))
logger.addHandler(ch)
| 850,854 |
gets activated when the user clicks on a plot
Args:
mouse_event:
|
def plot_clicked(self, mouse_event):
if isinstance(self.current_script, SelectPoints) and self.current_script.is_running:
if (not (mouse_event.xdata == None)):
if (mouse_event.button == 1):
pt = np.array([mouse_event.xdata, mouse_event.ydata])
self.current_script.toggle_NV(pt)
self.current_script.plot([self.matplotlibwidget_1.figure])
self.matplotlibwidget_1.draw()
item = self.tree_scripts.currentItem()
if item is not None:
if item.is_point():
item_x = item.child(1)
if mouse_event.xdata is not None:
self.tree_scripts.setCurrentItem(item_x)
item_x.value = float(mouse_event.xdata)
item_x.setText(1, '{:0.3f}'.format(float(mouse_event.xdata)))
item_y = item.child(0)
if mouse_event.ydata is not None:
self.tree_scripts.setCurrentItem(item_y)
item_y.value = float(mouse_event.ydata)
item_y.setText(1, '{:0.3f}'.format(float(mouse_event.ydata)))
# focus back on item
self.tree_scripts.setCurrentItem(item)
else:
if item.parent() is not None:
if item.parent().is_point():
if item == item.parent().child(1):
if mouse_event.xdata is not None:
item.setData(1, 2, float(mouse_event.xdata))
if item == item.parent().child(0):
if mouse_event.ydata is not None:
item.setData(1, 2, float(mouse_event.ydata))
| 850,858 |
checks if the file is a valid config file
Args:
file_name:
|
def load_config(self, file_name):
# load config or default if invalid
def load_settings(file_name):
instruments_loaded = {}
probes_loaded = {}
scripts_loaded = {}
if os.path.isfile(file_name):
in_data = load_b26_file(file_name)
instruments = in_data['instruments'] if 'instruments' in in_data else {}
scripts = in_data['scripts'] if 'scripts' in in_data else {}
probes = in_data['probes'] if 'probes' in in_data else {}
instruments_loaded, failed = Instrument.load_and_append(instruments)
if len(failed) > 0:
print(('WARNING! Following instruments could not be loaded: ', failed))
scripts_loaded, failed, instruments_loaded = Script.load_and_append(
script_dict=scripts,
instruments=instruments_loaded,
log_function=self.log,
data_path=self.gui_settings['data_folder'])
if len(failed) > 0:
print(('WARNING! Following scripts could not be loaded: ', failed))
probes_loaded, failed, instruments_loadeds = Probe.load_and_append(
probe_dict=probes,
probes=probes_loaded,
instruments=instruments_loaded)
return instruments_loaded, scripts_loaded, probes_loaded
print(('loading script/instrument/probes config from {:s}'.format(file_name)))
try:
config = load_b26_file(file_name)['gui_settings']
if config['settings_file'] != file_name:
print((
'WARNING path to settings file ({:s}) in config file is different from path of settings file ({:s})'.format(
config['settings_file'], file_name)))
config['settings_file'] = file_name
print(('loading of {:s} successful'.format(file_name)))
except Exception:
print(('WARNING path to settings file ({:s}) invalid use default settings'.format(file_name)))
config = self._DEFAULT_CONFIG
for x in list(self._DEFAULT_CONFIG.keys()):
if x in config:
if not os.path.exists(config[x]):
try:
os.makedirs(config[x])
except Exception:
config[x] = self._DEFAULT_CONFIG[x]
os.makedirs(config[x])
print(('WARNING: failed validating or creating path: set to default path'.format(config[x])))
else:
config[x] = self._DEFAULT_CONFIG[x]
os.makedirs(config[x])
print(('WARNING: path {:s} not specified set to default {:s}'.format(x, config[x])))
# check if file_name is a valid filename
if os.path.exists(os.path.dirname(file_name)):
config['settings_file'] = file_name
self.gui_settings = config
self.instruments, self.scripts, self.probes = load_settings(file_name)
self.refresh_tree(self.tree_gui_settings, self.gui_settings)
self.refresh_tree(self.tree_scripts, self.scripts)
self.refresh_tree(self.tree_settings, self.instruments)
self._hide_parameters(file_name)
| 850,861 |
saves gui configuration to out_file_name
Args:
out_file_name: name of file
|
def save_config(self, out_file_name):
def get_hidden_parameter(item):
numer_of_sub_elements = item.childCount()
if numer_of_sub_elements == 0:
dictator = {item.name : item.visible}
else:
dictator = {item.name:{}}
for child_id in range(numer_of_sub_elements):
dictator[item.name].update(get_hidden_parameter(item.child(child_id)))
return dictator
out_file_name = str(out_file_name)
if not os.path.exists(os.path.dirname(out_file_name)):
os.makedirs(os.path.dirname(out_file_name))
# build a dictionary for the configuration of the hidden parameters
dictator = {}
for index in range(self.tree_scripts.topLevelItemCount()):
script_item = self.tree_scripts.topLevelItem(index)
dictator.update(get_hidden_parameter(script_item))
dictator = {"gui_settings": self.gui_settings, "scripts_hidden_parameters":dictator}
# update the internal dictionaries from the trees in the gui
for index in range(self.tree_scripts.topLevelItemCount()):
script_item = self.tree_scripts.topLevelItem(index)
self.update_script_from_item(script_item)
dictator.update({'instruments': {}, 'scripts': {}, 'probes': {}})
for instrument in self.instruments.values():
dictator['instruments'].update(instrument.to_dict())
for script in self.scripts.values():
dictator['scripts'].update(script.to_dict())
for instrument, probe_dict in self.probes.items():
dictator['probes'].update({instrument: ','.join(list(probe_dict.keys()))})
with open(out_file_name, 'w') as outfile:
tmp = json.dump(dictator, outfile, indent=4)
| 850,862 |
returns the *single sided* power spectral density of the time trace x which is sampled at intervals time_step
Args:
x (array): timetrace
time_step (float): sampling interval of x
freq_range (array or tuple): frequency range in the form [f_min, f_max] to return only the spectrum within this range
Returns:
|
def power_spectral_density(x, time_step, freq_range = None):
N = len(x)
P = 2 * np.abs(np.fft.rfft(x))**2 / N * time_step
F = np.fft.rfftfreq(len(x), time_step)
if freq_range is not None:
brange = np.all([F >= freq_range[0], F <= freq_range[1]], axis=0)
P = P[brange]
F = F[brange]
return F, P
| 850,911 |
creates a probe...
Args:
name (optinal): name of probe, if not provided take name of function
settings (optinal): a Parameter object that contains all the information needed in the script
|
def __init__(self, instrument, probe_name, name = None, info = None, buffer_length = 100):
assert isinstance(instrument, Instrument)
assert isinstance(probe_name, str)
assert probe_name in instrument._PROBES
if name is None:
name = probe_name
assert isinstance(name, str)
if info is None:
info = ''
assert isinstance(info, str)
self.name = name
self.info = info
self.instrument = instrument
self.probe_name = probe_name
self.buffer = deque(maxlen = buffer_length)
| 850,917 |
tries to instantiate all the instruments that are imported in /instruments/__init__.py
and saves instruments that could be instantiate into a .b2 file in the folder path
Args:
target_folder: target path for .b26 files
|
def export_default_instruments(target_folder, source_folder = None, raise_errors = False, verbose=True):
print('export_def_instr called')
instruments_to_load = get_classes_in_folder(source_folder, Instrument, verbose = True)
print('instruments to load:')
print(instruments_to_load)
if verbose:
print(('attempt to load {:d} instruments: '.format(len(instruments_to_load))))
loaded_instruments, failed = Instrument.load_and_append(instruments_to_load, raise_errors = raise_errors)
print('loaded instruments:')
print(loaded_instruments, failed)
for name, value in loaded_instruments.items():
filename = os.path.join(target_folder, '{:s}.b26'.format(name))
value.save_b26(filename)
if verbose:
print('\n================================================')
print('================================================')
print(('saved {:d} instruments, {:d} failed'.format(len(loaded_instruments), len(failed))))
if failed != {}:
for error_name, error in failed.items():
print(('failed to create instruments: ', error_name, error))
| 850,971 |
Add arguments to the parser for collection in app.args.
Args:
parser:
`argparse.ArgumentParser`. Parser.
Arguments added here are server on
self.args.
|
def add_arguments(cls, parser):
parser.add_argument(
'-t', '--title',
action='store',
nargs='?',
const='',
dest='title',
help="[issue] task/issue title.",
)
parser.add_argument(
'-b', '--body',
action='store',
nargs='?',
const='',
dest='body',
help="[issue] task/issue body.",
)
pass
| 851,105 |
Determine the format of an audio file.
Parameters:
data (bytes-like object, str, os.PathLike, or file-like object):
A bytes-like object, filepath, path-like object
or file-like object of an audio file.
extension (str): The file extension of the file.
Used as a tie-breaker for formats that can
be used in multiple containers (e.g. ID3).
|
def determine_format(data, extension=None):
if isinstance(data, (os.PathLike, str)):
data = open(data, 'rb')
data_reader = DataReader(data)
data_reader.seek(0, os.SEEK_SET)
d = data_reader.read(4)
if d.startswith((b'ID3', b'\xFF\xFB')): # TODO: Catch all MP3 possibilities.
if extension is None or extension.endswith('.mp3'):
return MP3
if d.startswith((b'fLaC', b'ID3')):
if extension is None or extension.endswith('.flac'):
return FLAC
if d.startswith(b'RIFF'):
if extension is None or extension.endswith('.wav'):
return WAV
return None
| 851,305 |
Load audio metadata from filepath or file-like object.
Parameters:
f (str, os.PathLike, or file-like object):
A filepath, path-like object or file-like object of an audio file.
Returns:
Format: An audio format object.
Raises:
UnsupportedFormat: If file is not of a supported format.
ValueError: If filepath/file-like object is not valid or readable.
|
def load(f):
if isinstance(f, (os.PathLike, str)):
fileobj = open(f, 'rb')
else:
try:
f.read(0)
except AttributeError:
raise ValueError("Not a valid file-like object.")
except Exception:
raise ValueError("Can't read from file-like object.")
fileobj = f
parser_cls = determine_format(fileobj, os.path.splitext(fileobj.name)[1])
if parser_cls is None:
raise UnsupportedFormat("Supported format signature not found.")
else:
fileobj.seek(0, os.SEEK_SET)
return parser_cls.load(fileobj)
| 851,306 |
Load audio metadata from a bytes-like object.
Parameters:
b (bytes-like object): A bytes-like object of an audio file.
Returns:
Format: An audio format object.
Raises:
UnsupportedFormat: If file is not of a supported format.
|
def loads(b):
parser_cls = determine_format(b)
if parser_cls is None:
raise UnsupportedFormat("Supported format signature not found.")
return parser_cls.load(b)
| 851,307 |
Add arguments to the parser for collection in app.args.
Args:
parser:
`argparse.ArgumentParser`. Parser.
Arguments added here are server on
self.args.
|
def add_arguments(cls, parser):
parser.add_argument(
'-as-api', '--asana-api',
action='store',
nargs='?',
const='',
dest='asana_api',
help="[setting] asana api key.",
)
parser.add_argument(
'-gh-api', '--github-api',
action='store',
nargs='?',
const='',
dest='github_api',
help="[setting] github api token.",
)
parser.add_argument(
'--first-issue',
type=int,
action='store',
nargs='?',
const='',
help="[setting] only sync issues [FIRST_ISSUE] and above"
)
| 851,472 |
Determines if the n-th bit of passed bytes is 1 or 0.
Arguments:
hash_bytes - List of hash byte values for which the n-th bit value
should be checked. Each element of the list should be an integer from
0 to 255.
Returns:
True if the bit is 1. False if the bit is 0.
|
def _get_bit(self, n, hash_bytes):
if hash_bytes[n // 8] >> int(8 - ((n % 8) + 1)) & 1 == 1:
return True
return False
| 851,616 |
Generates matrix that describes which blocks should be coloured.
Arguments:
hash_bytes - List of hash byte values for which the identicon is being
generated. Each element of the list should be an integer from 0 to
255.
Returns:
List of rows, where each element in a row is boolean. True means the
foreground colour should be used, False means a background colour
should be used.
|
def _generate_matrix(self, hash_bytes):
# Since the identicon needs to be symmetric, we'll need to work on half
# the columns (rounded-up), and reflect where necessary.
half_columns = self.columns // 2 + self.columns % 2
cells = self.rows * half_columns
# Initialise the matrix (list of rows) that will be returned.
matrix = [[False] * self.columns for _ in range(self.rows)]
# Process the cells one by one.
for cell in range(cells):
# If the bit from hash correpsonding to this cell is 1, mark the
# cell as foreground one. Do not use first byte (since that one is
# used for determining the foreground colour.
if self._get_bit(cell, hash_bytes[1:]):
# Determine the cell coordinates in matrix.
column = cell // self.columns
row = cell % self.rows
# Mark the cell and its reflection. Central column may get
# marked twice, but we don't care.
matrix[row][column] = True
matrix[row][self.columns - column - 1] = True
return matrix
| 851,617 |
Saves a issue data (tasks, etc.) to local data.
Args:
issue:
`int`. Github issue number.
task:
`int`. Asana task ID.
namespace:
`str`. Namespace for storing this issue.
|
def save_issue_data_task(self, issue, task_id, namespace='open'):
issue_data = self.get_saved_issue_data(issue, namespace)
if not issue_data.has_key('tasks'):
issue_data['tasks'] = [task_id]
elif task_id not in issue_data['tasks']:
issue_data['tasks'].append(task_id)
| 851,631 |
Returns issue data from local data.
Args:
issue:
`int`. Github issue number.
namespace:
`str`. Namespace for storing this issue.
|
def get_saved_issue_data(self, issue, namespace='open'):
if isinstance(issue, int):
issue_number = str(issue)
elif isinstance(issue, basestring):
issue_number = issue
else:
issue_number = issue.number
issue_data_key = self._issue_data_key(namespace)
issue_data = self.data.get(issue_data_key,
{})
_data = issue_data.get(str(issue_number), {})
issue_data[str(issue_number)] = _data
return _data
| 851,633 |
Returns task data from local data.
Args:
task:
`int`. Asana task number.
|
def get_saved_task_data(self, task):
if isinstance(task, int):
task_number = str(task)
elif isinstance(task, basestring):
task_number = task
else:
task_number = task['id']
task_data_key = self._task_data_key()
task_data = self.data.get(task_data_key, {})
_data = task_data.get(str(task_number), {})
task_data[str(task_number)] = _data
return _data
| 851,636 |
Args:
filename:
Filename for database.
args:
Program arguments.
version:
Version of file.
|
def __init__(self, filename, args, version):
self.args = args
self.version = version
self.filename = filename
try:
with open(self.filename, 'rb') as file:
self.data = json.load(file)
except IOError:
self.data = {}
| 851,646 |
Add arguments to the parser for collection in app.args.
Args:
parser:
`argparse.ArgumentParser`. Parser.
Arguments added here are server on
self.args.
|
def add_arguments(cls, parser):
parser.add_argument(
'-i', '--issue',
action='store',
nargs='?',
const='',
dest='issue',
help="[pr] issue #",
)
parser.add_argument(
'-br', '--branch',
action='store',
nargs='?',
const='',
dest='branch',
help="[pr] branch",
)
parser.add_argument(
'-tbr', '--target-branch',
action='store',
nargs='?',
const='',
default='master',
dest='target_branch',
help="[pr] name of branch to pull changes into\n(defaults to: master)",
)
| 851,669 |
Send string to module level log
Args:
logstr (str): string to print.
priority (int): priority, supports 3 (default) and 4 (special).
|
def ekm_log(logstr, priority=3):
if priority <= ekmmeters_log_level:
dt = datetime.datetime
stamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M.%f")
ekmmeters_log_func("[EKM Meter Debug Message: " + stamp + "] -> " + logstr)
pass
| 851,860 |
Passthrough for pyserial Serial.write().
Args:
output (str): Block to write to port
|
def write(self, output):
view_str = output.encode('ascii', 'ignore')
if (len(view_str) > 0):
self.m_ser.write(view_str)
self.m_ser.flush()
self.m_ser.reset_input_buffer()
time.sleep(self.m_force_wait)
pass
| 851,863 |
Optional polling loop control
Args:
max_waits (int): waits
wait_sleep (int): ms per wait
|
def setPollingValues(self, max_waits, wait_sleep):
self.m_max_waits = max_waits
self.m_wait_sleep = wait_sleep
| 851,864 |
Poll for finished block or first byte ACK.
Args:
context (str): internal serial call context.
Returns:
string: Response, implict cast from byte array.
|
def getResponse(self, context=""):
waits = 0 # allowed interval counter
response_str = "" # returned bytes in string default
try:
waits = 0 # allowed interval counter
while (waits < self.m_max_waits):
bytes_to_read = self.m_ser.inWaiting()
if bytes_to_read > 0:
next_chunk = str(self.m_ser.read(bytes_to_read)).encode('ascii', 'ignore')
response_str += next_chunk
if (len(response_str) == 255):
time.sleep(self.m_force_wait)
return response_str
if (len(response_str) == 1) and (response_str.encode('hex') == '06'):
time.sleep(self.m_force_wait)
return response_str
else: # hang out -- half shortest expected interval (50 ms)
waits += 1
time.sleep(self.m_force_wait)
response_str = ""
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return response_str
| 851,865 |
Translate FieldType to portable SQL Type. Override if needful.
Args:
fld_type (int): :class:`~ekmmeters.FieldType` in serial block.
fld_len (int): Binary length in serial block
Returns:
string: Portable SQL type and length where appropriate.
|
def mapTypeToSql(fld_type=FieldType.NoType, fld_len=0):
if fld_type == FieldType.Float:
return "FLOAT"
elif fld_type == FieldType.String:
return "VARCHAR(" + str(fld_len) + ")"
elif fld_type == FieldType.Int:
return "INT"
elif fld_type == FieldType.Hex:
return "VARCHAR(" + str(fld_len * 2) + ")"
elif fld_type == FieldType.PowerFactor:
return "VARCHAR(" + str(fld_len) + ")"
else:
ekm_log("Type " + str(type) + " not handled by mapTypeToSql, returned VARCHAR(255)")
return "VARCHAR(255)"
| 851,868 |
Return query portion below CREATE.
Args:
qry_str (str): String as built.
Returns:
string: Passed string with fields appended.
|
def fillCreate(self, qry_str):
count = 0
for fld in self.m_all_fields:
fld_type = self.m_all_fields[fld][MeterData.TypeValue]
fld_len = self.m_all_fields[fld][MeterData.SizeValue]
qry_spec = self.mapTypeToSql(fld_type, fld_len)
if count > 0:
qry_str += ", \n"
qry_str = qry_str + ' ' + fld + ' ' + qry_spec
count += 1
qry_str += (",\n\t" + Field.Time_Stamp + " BIGINT,\n\t" +
"Raw_A VARCHAR(512),\n\t" +
"Raw_B VARCHAR(512)\n)")
return qry_str
| 851,869 |
Reasonably portable SQL INSERT for from combined read buffer.
Args:
def_buf (SerialBlock): Database only serial block of all fields.
raw_a (str): Raw A read as hex string.
raw_b (str): Raw B read (if exists, otherwise empty) as hex string.
Returns:
str: SQL insert for passed read buffer
|
def sqlInsert(def_buf, raw_a, raw_b):
count = 0
qry_str = "INSERT INTO Meter_Reads ( \n\t"
for fld in def_buf:
if count > 0:
qry_str += ", \n\t"
qry_str = qry_str + fld
count += 1
qry_str += (",\n\t" + Field.Time_Stamp + ", \n\t" +
"Raw_A,\n\t" +
"Raw_B\n) \n" +
"VALUES( \n\t")
count = 0
for fld in def_buf:
if count > 0:
qry_str += ", \n\t"
fld_type = def_buf[fld][MeterData.TypeValue]
fld_str_content = def_buf[fld][MeterData.StringValue]
delim = ""
if (fld_type == FieldType.Hex) or \
(fld_type == FieldType.String) or \
(fld_type == FieldType.PowerFactor):
delim = "'"
qry_str = qry_str + delim + fld_str_content + delim
count += 1
time_val = int(time.time() * 1000)
qry_str = (qry_str + ",\n\t" + str(time_val) + ",\n\t'" +
binascii.b2a_hex(raw_a) + "'" + ",\n\t'" +
binascii.b2a_hex(raw_b) + "'\n);")
ekm_log(qry_str, 4)
return qry_str
| 851,871 |
Call overridden dbExec() with built insert statement.
Args:
def_buf (SerialBlock): Block of read buffer fields to write.
raw_a (str): Hex string of raw A read.
raw_b (str): Hex string of raw B read or empty.
|
def dbInsert(self, def_buf, raw_a, raw_b):
self.dbExec(self.sqlInsert(def_buf, raw_a, raw_b))
| 851,872 |
Required override of dbExec() from MeterDB(), run query.
Args:
query_str (str): query to run
|
def dbExec(self, query_str):
try:
connection = sqlite3.connect(self.m_connection_string)
cursor = connection.cursor()
cursor.execute(query_str)
connection.commit()
cursor.close()
connection.close()
return True
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return False
pass
| 851,873 |
Sqlite callback accepting the cursor and the original row as a tuple.
Simple return of JSON safe types.
Args:
cursor (sqlite cursor): Original cursory
row (sqlite row tuple): Original row.
Returns:
dict: modified row.
|
def dict_factory(self, cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
val = row[idx]
name = col[0]
if name == Field.Time_Stamp:
d[col[0]] = str(val)
continue
if name == "Raw_A" or name == "Raw_B": # or name == Field.Meter_Time:
continue
if name not in self.m_all_fields:
continue
if (str(val) != "None") and ((val > 0) or (val < 0)):
d[name] = str(val)
return d
| 851,874 |
Sqlite callback accepting the cursor and the original row as a tuple.
Simple return of JSON safe types, including raw read hex strings.
Args:
cursor (sqlite cursor): Original cursory
row (sqlite row tuple): Original row.
Returns:
dict: modified row.
|
def raw_dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
val = row[idx]
name = col[0]
if name == Field.Time_Stamp or name == Field.Meter_Address:
d[name] = str(val)
continue
if name == "Raw_A" or name == "Raw_B":
d[name] = str(val)
continue
return d
| 851,875 |
Simple since Time_Stamp query returned as JSON records.
Args:
timestamp (int): Epoch time in seconds.
meter (str): 12 character meter address to query
Returns:
str: JSON rendered read records.
|
def renderJsonReadsSince(self, timestamp, meter):
result = ""
try:
connection = sqlite3.connect(self.m_connection_string)
connection.row_factory = self.dict_factory
select_cursor = connection.cursor()
select_cursor.execute("select * from Meter_Reads where " + Field.Time_Stamp +
" > " + str(timestamp) + " and " + Field.Meter_Address +
"= '" + meter + "';")
reads = select_cursor.fetchall()
result = json.dumps(reads, indent=4)
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return result
| 851,876 |
Set context string for serial command. Private setter.
Args:
context_str (str): Command specific string.
|
def setContext(self, context_str):
if (len(self.m_context) == 0) and (len(context_str) >= 7):
if context_str[0:7] != "request":
ekm_log("Context: " + context_str)
self.m_context = context_str
| 851,878 |
Drop in pure python replacement for ekmcrc.c extension.
Args:
buf (bytes): String or byte array (implicit Python 2.7 cast)
Returns:
str: 16 bit CRC per EKM Omnimeters formatted as hex string.
|
def calc_crc16(buf):
crc_table = [0x0000, 0xc0c1, 0xc181, 0x0140, 0xc301, 0x03c0, 0x0280, 0xc241,
0xc601, 0x06c0, 0x0780, 0xc741, 0x0500, 0xc5c1, 0xc481, 0x0440,
0xcc01, 0x0cc0, 0x0d80, 0xcd41, 0x0f00, 0xcfc1, 0xce81, 0x0e40,
0x0a00, 0xcac1, 0xcb81, 0x0b40, 0xc901, 0x09c0, 0x0880, 0xc841,
0xd801, 0x18c0, 0x1980, 0xd941, 0x1b00, 0xdbc1, 0xda81, 0x1a40,
0x1e00, 0xdec1, 0xdf81, 0x1f40, 0xdd01, 0x1dc0, 0x1c80, 0xdc41,
0x1400, 0xd4c1, 0xd581, 0x1540, 0xd701, 0x17c0, 0x1680, 0xd641,
0xd201, 0x12c0, 0x1380, 0xd341, 0x1100, 0xd1c1, 0xd081, 0x1040,
0xf001, 0x30c0, 0x3180, 0xf141, 0x3300, 0xf3c1, 0xf281, 0x3240,
0x3600, 0xf6c1, 0xf781, 0x3740, 0xf501, 0x35c0, 0x3480, 0xf441,
0x3c00, 0xfcc1, 0xfd81, 0x3d40, 0xff01, 0x3fc0, 0x3e80, 0xfe41,
0xfa01, 0x3ac0, 0x3b80, 0xfb41, 0x3900, 0xf9c1, 0xf881, 0x3840,
0x2800, 0xe8c1, 0xe981, 0x2940, 0xeb01, 0x2bc0, 0x2a80, 0xea41,
0xee01, 0x2ec0, 0x2f80, 0xef41, 0x2d00, 0xedc1, 0xec81, 0x2c40,
0xe401, 0x24c0, 0x2580, 0xe541, 0x2700, 0xe7c1, 0xe681, 0x2640,
0x2200, 0xe2c1, 0xe381, 0x2340, 0xe101, 0x21c0, 0x2080, 0xe041,
0xa001, 0x60c0, 0x6180, 0xa141, 0x6300, 0xa3c1, 0xa281, 0x6240,
0x6600, 0xa6c1, 0xa781, 0x6740, 0xa501, 0x65c0, 0x6480, 0xa441,
0x6c00, 0xacc1, 0xad81, 0x6d40, 0xaf01, 0x6fc0, 0x6e80, 0xae41,
0xaa01, 0x6ac0, 0x6b80, 0xab41, 0x6900, 0xa9c1, 0xa881, 0x6840,
0x7800, 0xb8c1, 0xb981, 0x7940, 0xbb01, 0x7bc0, 0x7a80, 0xba41,
0xbe01, 0x7ec0, 0x7f80, 0xbf41, 0x7d00, 0xbdc1, 0xbc81, 0x7c40,
0xb401, 0x74c0, 0x7580, 0xb541, 0x7700, 0xb7c1, 0xb681, 0x7640,
0x7200, 0xb2c1, 0xb381, 0x7340, 0xb101, 0x71c0, 0x7080, 0xb041,
0x5000, 0x90c1, 0x9181, 0x5140, 0x9301, 0x53c0, 0x5280, 0x9241,
0x9601, 0x56c0, 0x5780, 0x9741, 0x5500, 0x95c1, 0x9481, 0x5440,
0x9c01, 0x5cc0, 0x5d80, 0x9d41, 0x5f00, 0x9fc1, 0x9e81, 0x5e40,
0x5a00, 0x9ac1, 0x9b81, 0x5b40, 0x9901, 0x59c0, 0x5880, 0x9841,
0x8801, 0x48c0, 0x4980, 0x8941, 0x4b00, 0x8bc1, 0x8a81, 0x4a40,
0x4e00, 0x8ec1, 0x8f81, 0x4f40, 0x8d01, 0x4dc0, 0x4c80, 0x8c41,
0x4400, 0x84c1, 0x8581, 0x4540, 0x8701, 0x47c0, 0x4680, 0x8641,
0x8201, 0x42c0, 0x4380, 0x8341, 0x4100, 0x81c1, 0x8081, 0x4040]
crc = 0xffff
for c in buf:
index = (crc ^ ord(c)) & 0xff
crct = crc_table[index]
crc = (crc >> 8) ^ crct
crc = (crc << 8) | (crc >> 8)
crc &= 0x7F7F
return "%04x" % crc
| 851,879 |
Simple wrap to calc legacy PF value
Args:
pf: meter power factor reading
Returns:
int: legacy push pf
|
def calcPF(pf):
pf_y = pf[:1]
pf_x = pf[1:]
result = 100
if pf_y == CosTheta.CapacitiveLead:
result = 200 - int(pf_x)
elif pf_y == CosTheta.InductiveLag:
result = int(pf_x)
return result
| 851,880 |
Serial call to set max demand period.
Args:
period (int): : as int.
password (str): Optional password.
Returns:
bool: True on completion with ACK.
|
def setMaxDemandPeriod(self, period, password="00000000"):
result = False
self.setContext("setMaxDemandPeriod")
try:
if period < 1 or period > 3:
self.writeCmdMsg("Correct parameter: 1 = 15 minute, 2 = 30 minute, 3 = hour")
self.setContext("")
return result
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_str = "015731023030353028" + binascii.hexlify(str(period)).zfill(2) + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setMaxDemandPeriod): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
| 851,881 |
Serial Call to set meter password. USE WITH CAUTION.
Args:
new_pwd (str): 8 digit numeric password to set
pwd (str): Old 8 digit numeric password.
Returns:
bool: True on completion with ACK.
|
def setMeterPassword(self, new_pwd, pwd="00000000"):
result = False
self.setContext("setMeterPassword")
try:
if len(new_pwd) != 8 or len(pwd) != 8:
self.writeCmdMsg("Passwords must be exactly eight characters.")
self.setContext("")
return result
if not self.request(False):
self.writeCmdMsg("Pre command read failed: check serial line.")
else:
if not self.serialCmdPwdAuth(pwd):
self.writeCmdMsg("Password failure")
else:
req_pwd = binascii.hexlify(new_pwd.zfill(8))
req_str = "015731023030323028" + req_pwd + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setMeterPassword): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
| 851,882 |
Wrapper for struct.unpack with SerialBlock buffer definitionns.
Args:
data (str): Implicit cast bytes to str, serial port return.
def_buf (SerialBlock): Block object holding field lengths.
Returns:
tuple: parsed result of struct.unpack() with field definitions.
|
def unpackStruct(self, data, def_buf):
struct_str = "="
for fld in def_buf:
if not def_buf[fld][MeterData.CalculatedFlag]:
struct_str = struct_str + str(def_buf[fld][MeterData.SizeValue]) + "s"
if len(data) == 255:
contents = struct.unpack(struct_str, str(data))
else:
self.writeCmdMsg("Length error. Len() size = " + str(len(data)))
contents = ()
return contents
| 851,883 |
Move data from raw tuple into scaled and conveted values.
Args:
contents (tuple): Breakout of passed block from unpackStruct().
def_buf (): Read buffer destination.
kwh_scale (int): :class:`~ekmmeters.ScaleKWH` as int, from Field.kWhScale`
Returns:
bool: True on completion.
|
def convertData(self, contents, def_buf, kwh_scale=ScaleKWH.EmptyScale):
log_str = ""
count = 0
# getting scale does not require a full read. It does require that the
# reads have the scale value in the first block read. This requirement
# is filled by default in V3 and V4 requests
if kwh_scale == ScaleKWH.EmptyScale:
scale_offset = int(def_buf.keys().index(Field.kWh_Scale))
self.m_kwh_precision = kwh_scale = int(contents[scale_offset])
for fld in def_buf:
if def_buf[fld][MeterData.CalculatedFlag]:
count += 1
continue
if len(contents) == 0:
count += 1
continue
try: # scrub up messes on a field by field basis
raw_data = contents[count]
fld_type = def_buf[fld][MeterData.TypeValue]
fld_scale = def_buf[fld][MeterData.ScaleValue]
if fld_type == FieldType.Float:
float_data = float(str(raw_data))
divisor = 1
if fld_scale == ScaleType.KWH:
divisor = 1
if kwh_scale == ScaleKWH.Scale10:
divisor = 10
elif kwh_scale == ScaleKWH.Scale100:
divisor = 100
elif (kwh_scale != ScaleKWH.NoScale) and (kwh_scale != ScaleKWH.EmptyScale):
ekm_log("Unrecognized kwh scale.")
elif fld_scale == ScaleType.Div10:
divisor = 10
elif fld_scale == ScaleType.Div100:
divisor = 100
elif fld_scale != ScaleType.No:
ekm_log("Unrecognized float scale.")
float_data /= divisor
float_data_str = str(float_data)
def_buf[fld][MeterData.StringValue] = float_data_str
def_buf[fld][MeterData.NativeValue] = float_data
elif fld_type == FieldType.Hex:
hex_data = raw_data.encode('hex')
def_buf[fld][MeterData.StringValue] = hex_data
def_buf[fld][MeterData.NativeValue] = hex_data
elif fld_type == FieldType.Int:
integer_data = int(raw_data)
integer_data_str = str(integer_data)
if len(integer_data_str) == 0:
integer_data_str = str(0)
def_buf[fld][MeterData.StringValue] = integer_data_str
def_buf[fld][MeterData.NativeValue] = integer_data
elif fld_type == FieldType.String:
string_data = str(raw_data)
def_buf[fld][MeterData.StringValue] = string_data
def_buf[fld][MeterData.NativeValue] = string_data
elif fld_type == FieldType.PowerFactor:
def_buf[fld][MeterData.StringValue] = str(raw_data)
def_buf[fld][MeterData.NativeValue] = str(raw_data)
else:
ekm_log("Unrecognized field type")
log_str = log_str + '"' + fld + '": "' + def_buf[fld][MeterData.StringValue] + '"\n'
except:
ekm_log("Exception on Field:" + str(fld))
ekm_log(traceback.format_exc(sys.exc_info()))
self.writeCmdMsg("Exception on Field:" + str(fld))
count += 1
return True
| 851,884 |
Translate the passed serial block into string only JSON.
Args:
def_buf (SerialBlock): Any :class:`~ekmmeters.SerialBlock` object.
Returns:
str: JSON rendering of meter record.
|
def jsonRender(self, def_buf):
try:
ret_dict = SerialBlock()
ret_dict[Field.Meter_Address] = self.getMeterAddress()
for fld in def_buf:
compare_fld = fld.upper()
if not "RESERVED" in compare_fld and not "CRC" in compare_fld:
ret_dict[str(fld)] = def_buf[fld][MeterData.StringValue]
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return ""
return json.dumps(ret_dict, indent=4)
| 851,885 |
Internal read CRC wrapper.
Args:
raw_read (str): Bytes with implicit string cast from serial read
def_buf (SerialBlock): Populated read buffer.
Returns:
bool: True if passed CRC equals calculated CRC.
|
def crcMeterRead(self, raw_read, def_buf):
try:
if len(raw_read) == 0:
ekm_log("(" + self.m_context + ") Empty return read.")
return False
sent_crc = self.calc_crc16(raw_read[1:-2])
logstr = "(" + self.m_context + ")CRC sent = " + str(def_buf["crc16"][MeterData.StringValue])
logstr += " CRC calc = " + sent_crc
ekm_log(logstr)
if int(def_buf["crc16"][MeterData.StringValue], 16) == int(sent_crc, 16):
return True
# A cross simple test lines on a USB serial adapter, these occur every
# 1000 to 2000 reads, and they show up here as a bad unpack or
# a bad crc type call. In either case, we suppress them a log will
# become quite large. ekmcrc errors come through as type errors.
# Failures of int type conversion in 16 bit conversion occur as value
# errors.
except struct.error:
ekm_log(str(sys.exc_info()))
for frame in traceback.extract_tb(sys.exc_info()[2]):
fname, lineno, fn, text = frame
ekm_log("Error in %s on line %d" % (fname, lineno))
return False
except TypeError:
ekm_log(str(sys.exc_info()))
for frame in traceback.extract_tb(sys.exc_info()[2]):
fname, lineno, fn, text = frame
ekm_log("Error in %s on line %d" % (fname, lineno))
return False
except ValueError:
ekm_log(str(sys.exc_info()))
for frame in traceback.extract_tb(sys.exc_info()[2]):
fname, lineno, fn, text = frame
ekm_log("Error in %s on line %d" % (fname, lineno))
return False
return False
| 851,886 |
Remove an observer from the meter update() chain.
Args:
observer (MeterObserver): Subclassed MeterObserver.
|
def unregisterObserver(self, observer):
if observer in self.m_observers:
self.m_observers.remove(observer)
pass
| 851,888 |
Return the requested tariff schedule :class:`~ekmmeters.SerialBlock` for meter.
Args:
period_group (int): A :class:`~ekmmeters.ReadSchedules` value.
Returns:
SerialBlock: The requested tariff schedules for meter.
|
def getSchedulesBuffer(self, period_group):
empty_return = SerialBlock()
if period_group == ReadSchedules.Schedules_1_To_4:
return self.m_schd_1_to_4
elif period_group == ReadSchedules.Schedules_5_To_6:
return self.m_schd_5_to_6
else:
return empty_return
| 851,891 |
Get the months tariff SerialBlock for meter.
Args:
direction (int): A :class:`~ekmmeters.ReadMonths` value.
Returns:
SerialBlock: Requested months tariffs buffer.
|
def getMonthsBuffer(self, direction):
if direction == ReadMonths.kWhReverse:
return self.m_rev_mons
# default direction == ReadMonths.kWh
return self.m_mons
| 851,895 |
Serial set time with day of week calculation.
Args:
yy (int): Last two digits of year.
mm (int): Month 1-12.
dd (int): Day 1-31
hh (int): Hour 0 to 23.
minutes (int): Minutes 0 to 59.
ss (int): Seconds 0 to 59.
password (str): Optional password.
Returns:
bool: True on completion and ACK.
|
def setTime(self, yy, mm, dd, hh, minutes, ss, password="00000000"):
result = False
self.setContext("setTime")
try:
if mm < 1 or mm > 12:
self.writeCmdMsg("Month must be between 1 and 12")
self.setContext("")
return result
if dd < 1 or dd > 31:
self.writeCmdMsg("Day must be between 1 and 31")
self.setContext("")
return result
if hh < 0 or hh > 23:
self.writeCmdMsg("Hour must be between 0 and 23, inclusive")
self.setContext("")
return result
if minutes < 0 or minutes > 59:
self.writeCmdMsg("Minutes must be between 0 and 59, inclusive")
self.setContext("")
return result
if ss < 0 or ss > 59:
self.writeCmdMsg("Seconds must be between 0 and 59, inclusive")
self.setContext("")
return result
if len(password) != 8:
self.writeCmdMsg("Invalid password length.")
self.setContext("")
return result
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
dt_buf = datetime.datetime(int(yy), int(mm), int(dd), int(hh), int(minutes), int(ss))
ekm_log("Writing Date and Time " + dt_buf.strftime("%Y-%m-%d %H:%M"))
dayofweek = dt_buf.date().isoweekday()
ekm_log("Calculated weekday " + str(dayofweek))
req_str = "015731023030363028"
req_str += binascii.hexlify(str(yy)[-2:])
req_str += binascii.hexlify(str(mm).zfill(2))
req_str += binascii.hexlify(str(dd).zfill(2))
req_str += binascii.hexlify(str(dayofweek).zfill(2))
req_str += binascii.hexlify(str(hh).zfill(2))
req_str += binascii.hexlify(str(minutes).zfill(2))
req_str += binascii.hexlify(str(ss).zfill(2))
req_str += "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setTime): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
| 851,896 |
Serial call to set CT ratio for attached inductive pickup.
Args:
new_ct (int): A :class:`~ekmmeters.CTRatio` value, a legal amperage setting.
password (str): Optional password.
Returns:
bool: True on completion with ACK.
|
def setCTRatio(self, new_ct, password="00000000"):
ret = False
self.setContext("setCTRatio")
try:
self.clearCmdMsg()
if ((new_ct != CTRatio.Amps_100) and (new_ct != CTRatio.Amps_200) and
(new_ct != CTRatio.Amps_400) and (new_ct != CTRatio.Amps_600) and
(new_ct != CTRatio.Amps_800) and (new_ct != CTRatio.Amps_1000) and
(new_ct != CTRatio.Amps_1200) and (new_ct != CTRatio.Amps_1500) and
(new_ct != CTRatio.Amps_2000) and (new_ct != CTRatio.Amps_3000) and
(new_ct != CTRatio.Amps_4000) and (new_ct != CTRatio.Amps_5000)):
self.writeCmdMsg("Legal CT Ratios: 100, 200, 400, 600, " +
"800, 1000, 1200, 1500, 2000, 3000, 4000 and 5000")
self.setContext("")
return ret
if len(password) != 8:
self.writeCmdMsg("Invalid password length.")
self.setContext("")
return ret
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_str = "015731023030443028" + binascii.hexlify(str(new_ct).zfill(4)) + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setCTRatio): 06 returned.")
ret = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return ret
| 851,897 |
Assign one schedule tariff period to meter bufffer.
Args:
schedule (int): A :class:`~ekmmeters.Schedules` value or in range(Extents.Schedules).
tariff (int): :class:`~ekmmeters.Tariffs` value or in range(Extents.Tariffs).
hour (int): Hour from 0-23.
minute (int): Minute from 0-59.
tariff (int): Rate value.
Returns:
bool: True on completed assignment.
|
def assignSchedule(self, schedule, period, hour, minute, tariff):
if ((schedule not in range(Extents.Schedules)) or
(period not in range(Extents.Tariffs)) or
(hour < 0) or (hour > 23) or (minute < 0) or
(minute > 59) or (tariff < 0)):
ekm_log("Out of bounds in Schedule_" + str(schedule + 1))
return False
period += 1
idx_min = "Min_" + str(period)
idx_hour = "Hour_" + str(period)
idx_rate = "Tariff_" + str(period)
if idx_min not in self.m_schedule_params:
ekm_log("Incorrect index: " + idx_min)
return False
if idx_hour not in self.m_schedule_params:
ekm_log("Incorrect index: " + idx_hour)
return False
if idx_rate not in self.m_schedule_params:
ekm_log("Incorrect index: " + idx_rate)
return False
self.m_schedule_params[idx_rate] = tariff
self.m_schedule_params[idx_hour] = hour
self.m_schedule_params[idx_min] = minute
self.m_schedule_params['Schedule'] = schedule
return True
| 851,898 |
Define a single season and assign a schedule
Args:
season (int): A :class:`~ekmmeters.Seasons` value or in range(Extent.Seasons).
month (int): Month 1-12.
day (int): Day 1-31.
schedule (int): A :class:`~ekmmeters.LCDItems` value or in range(Extent.Schedules).
Returns:
bool: True on completion and ACK.
|
def assignSeasonSchedule(self, season, month, day, schedule):
season += 1
schedule += 1
if ((season < 1) or (season > Extents.Seasons) or (schedule < 1) or
(schedule > Extents.Schedules) or (month > 12) or (month < 0) or
(day < 0) or (day > 31)):
ekm_log("Out of bounds: month " + str(month) + " day " + str(day) +
" schedule " + str(schedule) + " season " + str(season))
return False
idx_mon = "Season_" + str(season) + "_Start_Day"
idx_day = "Season_" + str(season) + "_Start_Month"
idx_schedule = "Season_" + str(season) + "_Schedule"
if idx_mon not in self.m_seasons_sched_params:
ekm_log("Incorrect index: " + idx_mon)
return False
if idx_day not in self.m_seasons_sched_params:
ekm_log("Incorrect index: " + idx_day)
return False
if idx_schedule not in self.m_seasons_sched_params:
ekm_log("Incorrect index: " + idx_schedule)
return False
self.m_seasons_sched_params[idx_mon] = month
self.m_seasons_sched_params[idx_day] = day
self.m_seasons_sched_params[idx_schedule] = schedule
return True
| 851,899 |
Serial command to set seasons table.
If no dictionary is passed, the meter object buffer is used.
Args:
cmd_dict (dict): Optional dictionary of season schedules.
password (str): Optional password
Returns:
bool: True on completion and ACK.
|
def setSeasonSchedules(self, cmd_dict=None, password="00000000"):
result = False
self.setContext("setSeasonSchedules")
if not cmd_dict:
cmd_dict = self.m_seasons_sched_params
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_table = ""
req_table += binascii.hexlify(str(cmd_dict["Season_1_Start_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_1_Start_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_1_Schedule"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_2_Start_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_2_Start_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_2_Schedule"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_3_Start_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_3_Start_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_3_Schedule"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_4_Start_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_4_Start_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_4_Schedule"]).zfill(2))
req_table += binascii.hexlify(str(0).zfill(24))
req_str = "015731023030383028" + req_table + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setSeasonSchedules): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
| 851,900 |
Set a singe holiday day and month in object buffer.
There is no class style enum for holidays.
Args:
holiday (int): 0-19 or range(Extents.Holidays).
month (int): Month 1-12.
day (int): Day 1-31
Returns:
bool: True on completion.
|
def assignHolidayDate(self, holiday, month, day):
holiday += 1
if (month > 12) or (month < 0) or (day > 31) or (day < 0) or (holiday < 1) or (holiday > Extents.Holidays):
ekm_log("Out of bounds: month " + str(month) + " day " + str(day) + " holiday " + str(holiday))
return False
day_str = "Holiday_" + str(holiday) + "_Day"
mon_str = "Holiday_" + str(holiday) + "_Month"
if day_str not in self.m_holiday_date_params:
ekm_log("Incorrect index: " + day_str)
return False
if mon_str not in self.m_holiday_date_params:
ekm_log("Incorrect index: " + mon_str)
return False
self.m_holiday_date_params[day_str] = day
self.m_holiday_date_params[mon_str] = month
return True
| 851,901 |
Serial call to set holiday list.
If a buffer dictionary is not supplied, the method will use
the class object buffer populated with assignHolidayDate.
Args:
cmd_dict (dict): Optional dictionary of holidays.
password (str): Optional password.
Returns:
bool: True on completion.
|
def setHolidayDates(self, cmd_dict=None, password="00000000"):
result = False
self.setContext("setHolidayDates")
if not cmd_dict:
cmd_dict = self.m_holiday_date_params
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_table = ""
req_table += binascii.hexlify(str(cmd_dict["Holiday_1_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_1_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_2_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_2_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_3_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_3_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_4_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_4_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_5_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_5_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_6_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_6_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_7_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_7_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_8_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_8_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_9_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_9_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_10_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_10_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_11_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_11_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_12_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_12_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_13_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_13_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_14_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_14_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_15_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_15_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_16_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_16_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_17_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_17_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_18_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_18_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_19_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_19_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_20_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_20_Day"]).zfill(2))
req_str = "015731023030423028" + req_table + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setHolidayDates: 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
| 851,902 |
Serial call to set weekend and holiday :class:`~ekmmeters.Schedules`.
Args:
new_wknd (int): :class:`~ekmmeters.Schedules` value to assign.
new_hldy (int): :class:`~ekmmeters.Schedules` value to assign.
password (str): Optional password..
Returns:
bool: True on completion and ACK.
|
def setWeekendHolidaySchedules(self, new_wknd, new_hldy, password="00000000"):
result = False
self.setContext("setWeekendHolidaySchedules")
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_wkd = binascii.hexlify(str(new_wknd).zfill(2))
req_hldy = binascii.hexlify(str(new_hldy).zfill(2))
req_str = "015731023030433028" + req_wkd + req_hldy + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setWeekendHolidaySchedules): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
| 851,903 |
Serial call to read schedule tariffs buffer
Args:
tableset (int): :class:`~ekmmeters.ReadSchedules` buffer to return.
Returns:
bool: True on completion and ACK.
|
def readSchedules(self, tableset):
self.setContext("readSchedules")
try:
req_table = binascii.hexlify(str(tableset).zfill(1))
req_str = "01523102303037" + req_table + "282903"
self.request(False)
req_crc = self.calc_crc16(req_str[2:].decode("hex"))
req_str += req_crc
self.m_serial_port.write(req_str.decode("hex"))
raw_ret = self.m_serial_port.getResponse(self.getContext())
self.serialPostEnd()
return_crc = self.calc_crc16(raw_ret[1:-2])
if tableset == ReadSchedules.Schedules_1_To_4:
unpacked_read = self.unpackStruct(raw_ret, self.m_schd_1_to_4)
self.convertData(unpacked_read, self.m_schd_1_to_4, self.m_kwh_precision)
if str(return_crc) == str(self.m_schd_1_to_4["crc16"][MeterData.StringValue]):
ekm_log("Schedules 1 to 4 CRC success (06 return")
self.setContext("")
return True
elif tableset == ReadSchedules.Schedules_5_To_6:
unpacked_read = self.unpackStruct(raw_ret, self.m_schd_5_to_6)
self.convertData(unpacked_read, self.m_schd_5_to_6, self.m_kwh_precision)
if str(return_crc) == str(self.m_schd_5_to_6["crc16"][MeterData.StringValue]):
ekm_log("Schedules 5 to 8 CRC success (06 return)")
self.setContext("")
return True
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return False
| 851,904 |
Read a single schedule tariff from meter object buffer.
Args:
schedule (int): A :class:`~ekmmeters.Schedules` value or in range(Extent.Schedules).
tariff (int): A :class:`~ekmmeters.Tariffs` value or in range(Extent.Tariffs).
Returns:
bool: True on completion.
|
def extractSchedule(self, schedule, period):
ret = namedtuple("ret", ["Hour", "Min", "Tariff", "Period", "Schedule"])
work_table = self.m_schd_1_to_4
if Schedules.Schedule_5 <= schedule <= Schedules.Schedule_6:
work_table = self.m_schd_5_to_6
period += 1
schedule += 1
ret.Period = str(period)
ret.Schedule = str(schedule)
if (schedule < 1) or (schedule > Extents.Schedules) or (period < 0) or (period > Extents.Periods):
ekm_log("Out of bounds: tariff " + str(period) + " for schedule " + str(schedule))
ret.Hour = ret.Min = ret.Tariff = str(0)
return ret
idxhr = "Schedule_" + str(schedule) + "_Period_" + str(period) + "_Hour"
idxmin = "Schedule_" + str(schedule) + "_Period_" + str(period) + "_Min"
idxrate = "Schedule_" + str(schedule) + "_Period_" + str(period) + "_Tariff"
if idxhr not in work_table:
ekm_log("Incorrect index: " + idxhr)
ret.Hour = ret.Min = ret.Tariff = str(0)
return ret
if idxmin not in work_table:
ekm_log("Incorrect index: " + idxmin)
ret.Hour = ret.Min = ret.Tariff = str(0)
return ret
if idxrate not in work_table:
ekm_log("Incorrect index: " + idxrate)
ret.Hour = ret.Min = ret.Tariff = str(0)
return ret
ret.Hour = work_table[idxhr][MeterData.StringValue]
ret.Min = work_table[idxmin][MeterData.StringValue].zfill(2)
ret.Tariff = work_table[idxrate][MeterData.StringValue]
return ret
| 851,905 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.